Example usage for java.lang InterruptedException printStackTrace

List of usage examples for java.lang InterruptedException printStackTrace

Introduction

In this page you can find the example usage for java.lang InterruptedException printStackTrace.

Prototype

public void printStackTrace() 

Source Link

Document

Prints this throwable and its backtrace to the standard error stream.

Usage

From source file:edu.umass.cs.gnsclient.client.integrationtests.ServerConnectTest.java

/**
 * arun: Coordinated operations generally need some settling time before
 * they can be tested at "any" replica. That is, read-your-writes
 * consistency is not ensured if a read following a write happens to go to a
 * different replica. Thus, we either need to wait for a long enough
 * duration and/or retransmit upon failure.
 *
 * I have inserted waitSettle() haphazardly at places. These tests need to
 * be systematically fixed by retrying if the expected answer is not found.
 * Simply using the async client to resend the request should suffice as
 * ReconfigurableAppClientAsync is designed to automatically pick "good"
 * active replicas, i.e., it will forget crashed ones for some time; clear
 * its cache, re-query, and pick randomly upon an active replica error; and
 * pick the replica closest by distance and load otherwise.
 *//*from  www  .j  ava 2  s.  c o m*/
private static void waitSettle() {
    try {
        if (COORDINATION_WAIT > 0) {
            Thread.sleep(COORDINATION_WAIT);
        }
    } catch (InterruptedException e) {
        e.printStackTrace();
    }
}

From source file:com.cyberway.issue.crawler.frontier.RecoveryJournal.java

/**
 * Utility method for scanning a recovery journal and applying it to
 * a Frontier./* ww w  .j  a v a  2s.  c om*/
 * 
 * @param source Recover log path.
 * @param frontier Frontier reference.
 * @param retainFailures
 * @throws IOException
 * 
 * @see com.cyberway.issue.crawler.framework.Frontier#importRecoverLog(String, boolean)
 */
public static void importRecoverLog(final File source, final CrawlController controller,
        final boolean retainFailures) throws IOException {
    if (source == null) {
        throw new IllegalArgumentException("Passed source file is null.");
    }
    LOGGER.info("recovering frontier completion state from " + source);

    // first, fill alreadyIncluded with successes (and possibly failures),
    // and count the total lines
    final int lines = importCompletionInfoFromLog(source, controller, retainFailures);

    LOGGER.info("finished completion state; recovering queues from " + source);

    // now, re-add anything that was in old frontier and not already
    // registered as finished. Do this in a separate thread that signals
    // this thread once ENOUGH_TO_START_CRAWLING URIs have been queued. 
    final CountDownLatch recoveredEnough = new CountDownLatch(1);
    new Thread(new Runnable() {
        public void run() {
            importQueuesFromLog(source, controller, lines, recoveredEnough);
        }
    }, "queuesRecoveryThread").start();

    try {
        // wait until at least ENOUGH_TO_START_CRAWLING URIs queued
        recoveredEnough.await();
    } catch (InterruptedException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
}

From source file:com.sec.ose.osi.sdk.protexsdk.project.ProjectAPIWrapper.java

private static String createProject(String newProjectName, AnalysisSourceLocation newAnalysisSourceLocation,
        UIResponseObserver observer) {/*from   ww  w  . ja  v a 2  s  . co  m*/

    if (observer == null) {
        observer = new DefaultUIResponseObserver();
    }

    if (isExistedProjectName(newProjectName) == true) {
        observer.setFailMessage("\"" + newProjectName + "\" is already existed.");
        log.debug("\"" + newProjectName + "\" is already existed.");
        return null;
    }

    String projectID = null;
    ProjectRequest pRequest = new ProjectRequest();

    PolicyCheckResult p = ProjectNamePolicy.checkProjectName(newProjectName);
    if (p.getResult() != PolicyCheckResult.PROJECT_NAME_OK) {
        observer.setFailMessage(p.getResultMsg());
        return null;
    }

    final String DESCRIPTION = "This project is created by OSI - "
            + DateUtil.getCurrentTime("[%1$tY/%1$tm/%1$te(%1$ta) %1$tl:%1$tM:%1$tS %1$tp]");
    pRequest.setName(newProjectName);
    pRequest.setDescription(DESCRIPTION);
    if (newAnalysisSourceLocation != null) {
        pRequest.setAnalysisSourceLocation(newAnalysisSourceLocation);
    }

    try {
        projectID = ProtexSDKAPIManager.getProjectAPI().createProject(pRequest, LicenseCategory.PROPRIETARY);
    } catch (SdkFault e) {
        log.warn(e);
        ErrorCode errorCode = e.getFaultInfo().getErrorCode();
        try {
            Thread.sleep(1000);
        } catch (InterruptedException ie) {
            ie.printStackTrace();
        }
        if (errorCode == ErrorCode.DUPLICATE_PROJECT_NAME) {
            String[] button = { "OK" };
            JOptionPane.showOptionDialog( // block
                    null, "The project name \"" + newProjectName + "\" is already created by other user.",
                    "Duplicated project name", JOptionPane.YES_OPTION, JOptionPane.ERROR_MESSAGE, null, button,
                    "OK");
        }
        return null;
    }

    if (projectID == null)
        return null;

    setScanIgnorePattern(projectID, ProjectNamePolicy.IGNORED_PATTERN, observer);

    observer.pushMessage("[ok]\n");
    return projectID;

}

From source file:com.ikanow.aleph2.distributed_services.utils.KafkaUtils.java

/**
 * Continually checks zookeeper for a leader on the given topic partition.  If
 * a leader is found, returns true, otherwise spins until timeout_ms and returns
 * false./*  w ww .j  a va  2s  .  c o m*/
 * 
 * @param zk_client
 * @param topic
 * @param timeout_ms
 * @return
 */
private static boolean waitUntilLeaderElected(ZkUtils zk_client, String topic, long timeout_ms) {
    long timeout_time_ms = System.currentTimeMillis() + timeout_ms;
    Option<Object> leader = zk_client.getLeaderForPartition(topic, 0);
    while (System.currentTimeMillis() < timeout_time_ms && leader.isEmpty()) {
        try {
            Thread.sleep(100);
        } catch (InterruptedException e) {
            e.printStackTrace();
            return false;
        }
    }
    if (System.currentTimeMillis() > timeout_time_ms) {
        logger.debug("TIMED OUT BEFORE LEADER ELECTION");
        return false;
    }
    return true;
}

From source file:FormatStorage1.MergeFileUtil.java

public static void run(String inputdir, String outputdir, Configuration conf) throws IOException {
    JobConf job = new JobConf(conf);
    job.setJobName("MergeFileUtil");
    job.setJarByClass(MergeFileUtil.class);
    FileSystem fs = null;//  w w w.j av a  2  s. c  o  m
    fs = FileSystem.get(job);
    if (fs.exists(new Path(outputdir))) {
        throw new IOException("outputdir: " + outputdir + " exist!!!");
    }

    FileStatus[] fss = fs.listStatus(new Path(inputdir));

    if (fss == null || fss.length <= 0) {
        throw new IOException("no input files");
    }

    IFormatDataFile ifdf = new IFormatDataFile(job);
    ifdf.open(fss[0].getPath().toString());
    job.set("ifdf.head.info", ifdf.fileInfo().head().toStr());
    ifdf.close();

    long wholesize = 0;
    for (FileStatus status : fss) {
        wholesize += status.getLen();
    }

    job.setNumReduceTasks(0);

    FileInputFormat.setInputPaths(job, inputdir);
    FileOutputFormat.setOutputPath(job, new Path(outputdir));

    job.setOutputKeyClass(LongWritable.class);
    job.setOutputValueClass(IRecord.class);

    job.setMapperClass(MergeMap.class);

    job.setInputFormat(CombineFormatStorageFileInputFormat.class);
    job.setOutputFormat(MergeIFormatOutputFormat.class);

    JobClient jc = new JobClient(job);
    RunningJob rjob = jc.submitJob(job);
    try {

        String lastReport = "";
        SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss,SSS");
        long reportTime = System.currentTimeMillis();
        long maxReportInterval = 3 * 1000;

        while (!rjob.isComplete()) {
            Thread.sleep(1000);

            int mapProgress = Math.round(rjob.mapProgress() * 100);
            int reduceProgress = Math.round(rjob.reduceProgress() * 100);

            String report = " map = " + mapProgress + "%,  reduce = " + reduceProgress + "%";

            if (!report.equals(lastReport) || System.currentTimeMillis() >= reportTime + maxReportInterval) {

                String output = dateFormat.format(Calendar.getInstance().getTime()) + report;
                System.err.println(output);
                lastReport = report;
                reportTime = System.currentTimeMillis();
            }
        }
        LOG.info(rjob.getJobState());

    } catch (IOException e1) {
        e1.printStackTrace();
    } catch (InterruptedException e) {
        e.printStackTrace();
    }
}

From source file:com.taobao.android.builder.tasks.awo.utils.AwoInstaller.java

private static void waitUntilConnected(AndroidDebugBridge adb) {
    int trials = 10;
    final int connectionWaitTime = 50;
    while (trials > 0) {
        try {/*ww w .  j a v a2s.  com*/
            Thread.sleep(connectionWaitTime);
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
        if (adb.isConnected()) {
            break;
        }
        trials--;
    }
}

From source file:io.insideout.stanbol.enhancer.nlp.freeling.TestFreelingAnalysis.java

@AfterClass
public static final void cleanUp() {
    freeling.close();/*from w w w  . j  ava 2s. c  o m*/
    Assert.assertTrue(freeling.isClosed());
    Assert.assertTrue(freeling.getSupportedLanguages().isEmpty());
    Assert.assertFalse(freeling.isLanguageIdentificationSupported());
    try {
        Object o = new Object();
        synchronized (o) {
            o.wait(10000);
        }
    } catch (InterruptedException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
}

From source file:FormatStorage1.MergeFileUtil.java

public static void runold(String inputdir, String outputdir, Configuration conf) throws IOException {
    JobConf job = new JobConf(conf);
    job.setJobName("MergeFileUtil");
    job.setJarByClass(MergeFileUtil.class);
    FileSystem fs = null;/*from  w  ww  . j av  a  2  s  .c o  m*/
    fs = FileSystem.get(job);
    if (fs.exists(new Path(outputdir))) {
        throw new IOException("outputdir: " + outputdir + " exist!!!");
    }

    FileStatus[] fss = fs.listStatus(new Path(inputdir));

    if (fss == null || fss.length <= 0) {
        throw new IOException("no input files");
    }

    for (FileStatus status : fss) {
        if (status.isDir()) {
            throw new IOException("!!!input dir contains directory:\t" + status.getPath().toString());
        }
    }

    IFormatDataFile ifdf = new IFormatDataFile(job);
    ifdf.open(fss[0].getPath().toString());
    job.set("ifdf.head.info", ifdf.fileInfo().head().toStr());
    ifdf.close();

    long wholesize = 0;
    for (FileStatus status : fss) {
        wholesize += status.getLen();
    }

    long fl = 512 * 1024 * 1024;
    int reduces = (int) (wholesize / fl + 1);
    job.setNumReduceTasks(reduces);

    FileInputFormat.setInputPaths(job, inputdir);
    FileOutputFormat.setOutputPath(job, new Path(outputdir));

    job.setOutputKeyClass(LongWritable.class);
    job.setOutputValueClass(IRecord.class);

    job.setMapperClass(MergeMap.class);
    job.setReducerClass(MergeReduce.class);

    job.setInputFormat(MergeIFormatInputFormat.class);
    job.setOutputFormat(MergeIFormatOutputFormat.class);

    JobClient jc = new JobClient(job);
    RunningJob rjob = jc.submitJob(job);
    try {

        String lastReport = "";
        SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss,SSS");
        long reportTime = System.currentTimeMillis();
        long maxReportInterval = 3 * 1000;

        while (!rjob.isComplete()) {
            Thread.sleep(1000);

            int mapProgress = Math.round(rjob.mapProgress() * 100);
            int reduceProgress = Math.round(rjob.reduceProgress() * 100);

            String report = " map = " + mapProgress + "%,  reduce = " + reduceProgress + "%";

            if (!report.equals(lastReport) || System.currentTimeMillis() >= reportTime + maxReportInterval) {

                String output = dateFormat.format(Calendar.getInstance().getTime()) + report;
                System.err.println(output);
                lastReport = report;
                reportTime = System.currentTimeMillis();
            }
        }
        LOG.info(rjob.getJobState());

    } catch (IOException e1) {
        e1.printStackTrace();
    } catch (InterruptedException e) {
        e.printStackTrace();
    }
}

From source file:com.amazonaws.services.kinesis.aggregators.datastore.DynamoUtils.java

public static UpdateItemResult updateWithRetries(AmazonDynamoDB dynamoClient, UpdateItemRequest req)
        throws Exception {
    final double initialBackoff = 2D;
    final int updateRetries = 10;
    final double backoffRatio = 1.2;

    double backoff = initialBackoff;

    UpdateItemResult res = null;/*from w w  w  . j av  a2 s.  c om*/

    for (int i = 0; i < updateRetries; i++) {
        try {
            res = dynamoClient.updateItem(req);
            break;
        } catch (ProvisionedThroughputExceededException ptee) {
            LOG.warn(String.format("Exceeded Provisioned Througput - Backing off for %s", backoff));
            try {
                Thread.sleep(new Double(backoff).longValue());
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
            // simple linear backoff
            backoff = backoff * backoffRatio;
        } catch (ConditionalCheckFailedException ccfe) {
            // silently rethrow these exceptions as they are part of the
            // conditional update logic for MIN/MAX calculations
            throw ccfe;
        } catch (Exception e) {
            LOG.warn(e);
            throw e;
        }
    }

    if (res == null) {
        throw new Exception(String.format("Unable to write after %s retries", updateRetries));
    } else {
        return res;
    }
}

From source file:com.linkedin.helix.mock.storage.DummyProcess.java

static void sleep(long transDelay) {
    try {/*from w  w  w  .j a v a  2 s . co m*/
        if (transDelay > 0) {
            Thread.sleep(transDelay);
        }
    } catch (InterruptedException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
}