Example usage for java.util.concurrent TimeoutException TimeoutException

List of usage examples for java.util.concurrent TimeoutException TimeoutException

Introduction

In this page you can find the example usage for java.util.concurrent TimeoutException TimeoutException.

Prototype

public TimeoutException(String message) 

Source Link

Document

Constructs a TimeoutException with the specified detail message.

Usage

From source file:org.jclouds.http.httpnio.pool.NioHttpCommandConnectionPool.java

public void connectionTimeout(NHttpConnection conn) {
    String message = String.format("Timeout on : %s - timeout %d", getTarget(), conn.getSocketTimeout());
    logger.warn(message);/*from   www .  j  a va  2s .  com*/
    resubmitIfRequestIsReplayable(conn, new TimeoutException(message));
}

From source file:pl.umk.mat.zawodyweb.compiler.classes.LanguageUVA.java

private void checkResults(int id, long maxTime, TestInput input, TestOutput result)
        throws InterruptedException, TimeoutException, HttpException, IOException {

    int limitRise = 50;
    int limitOnPage = 50;

    Random random = new Random();

    Thread.sleep(7000 + (Math.abs(random.nextInt()) % 3000));

    long start_time = System.currentTimeMillis();
    do {//from  w w w. j  ava 2 s  .  c  o m
        if (System.currentTimeMillis() - start_time > maxTime * 1000L) {
            logger.info(String.format("%.1f minutes without answer. Destroy!", maxTime / 60.));
            throw new TimeoutException("Too slow to answer.. destroy");
        }

        logger.info("Checking answer on UVa-ACM");
        List<Map<String, String>> results = getResults(limitOnPage);

        String sid = String.valueOf(id);
        Map<String, String> map = null;
        for (Map<String, String> m : results) {
            if (sid.equals(m.get("id"))) {
                map = m;
                break;
            }
        }

        if (map != null) {
            String status = map.get("status");
            String time = map.get("time");
            result.setPoints(0);
            if ("".equals(status) || "Received".equals(status) || "Running".equals(status)
                    || "Sent to judge".equals(status) || "In judge queue".equals(status)
                    || "Compiling".equals(status) || "Linking".equals(status)) {
                Thread.sleep(7000);
            } else if ("Accepted".equals(status)) {
                result.setStatus(ResultsStatusEnum.ACC.getCode());
                result.setPoints(input.getMaxPoints());
                result.setRuntime(Integer.parseInt(time));
                break;
            } else if ("Compilation error".equals(status)) {
                result.setStatus(ResultsStatusEnum.CE.getCode());
                result.setNotes(getCompilationError(id));
                result.setRuntime(Integer.parseInt(time));
                break;
            } else if ("Presentation error".equals(status)) {
                result.setStatus(ResultsStatusEnum.ACC.getCode());
                result.setPoints(input.getMaxPoints());
                result.setRuntime(Integer.parseInt(time));
                break;
            } else if ("Wrong answer".equals(status)) {
                result.setStatus(ResultsStatusEnum.WA.getCode());
                result.setRuntime(Integer.parseInt(time));
                break;
            } else if ("Time limit exceeded".equals(status)) {
                result.setStatus(ResultsStatusEnum.TLE.getCode());
                result.setRuntime(Integer.parseInt(time));
                break;
            } else if ("Memory limit exceeded".equals(status)) {
                result.setStatus(ResultsStatusEnum.MLE.getCode());
                result.setRuntime(Integer.parseInt(time));
                break;
            } else if ("Runtime error".equals(status)) {
                result.setStatus(ResultsStatusEnum.RE.getCode());
                result.setRuntime(Integer.parseInt(time));
                break;
            } else {
                result.setStatus(ResultsStatusEnum.UNKNOWN.getCode());
                result.setNotes("Unknown status: \"" + status + "\"");
                logger.info("Unknown status: \"" + status + "\"");
                break;
            }
        } else {
            limitOnPage += limitRise;
        }
        Thread.sleep(3000 + (Math.abs(random.nextInt()) % 3000));
    } while (true);
}

From source file:com.streamsets.datacollector.cluster.ClusterProviderImpl.java

@Override
public ClusterPipelineStatus getStatus(SystemProcessFactory systemProcessFactory, File sparkManager,
        File tempDir, String appId, PipelineConfiguration pipelineConfiguration)
        throws TimeoutException, IOException {

    Map<String, String> environment = new HashMap<>();
    environment.put(CLUSTER_TYPE, CLUSTER_TYPE_YARN);
    addKerberosConfiguration(environment);
    ImmutableList.Builder<String> args = ImmutableList.builder();
    args.add(sparkManager.getAbsolutePath());
    args.add("status");
    args.add(appId);/*from w  ww  .  ja  v  a2 s  .c  o  m*/
    ExecutionMode executionMode = PipelineBeanCreator.get().getExecutionMode(pipelineConfiguration,
            new ArrayList<Issue>());
    if (executionMode == ExecutionMode.CLUSTER_MESOS_STREAMING) {
        addMesosArgs(pipelineConfiguration, environment, args);
    }
    SystemProcess process = systemProcessFactory.create(ClusterProviderImpl.class.getSimpleName(), tempDir,
            args.build());
    try {
        process.start(environment);
        if (!process.waitFor(30, TimeUnit.SECONDS)) {
            logOutput(appId, process);
            throw new TimeoutException(errorString("YARN status command for {} timed out.", appId));
        }
        if (process.exitValue() != 0) {
            logOutput(appId, process);
            throw new IllegalStateException(
                    errorString("Status command for {} failed with exit code {}.", appId, process.exitValue()));
        }
        logOutput(appId, process);
        String status;
        if (executionMode == ExecutionMode.CLUSTER_MESOS_STREAMING) {
            status = mesosStatusParser.parseStatus(process.getAllOutput());
        } else {
            status = yarnStatusParser.parseStatus(process.getAllOutput());
        }
        return ClusterPipelineStatus.valueOf(status);
    } finally {
        process.cleanup();
    }
}

From source file:org.spring.springxdcloudInstaller.MainApp.java

static RunningInstance blockNodeInstanceRunning(EC2Client client, RunningInstance instance)
        throws TimeoutException {
    // create utilities that wait for the instance to finish
    RetryablePredicate<RunningInstance> runningTester = new RetryablePredicate<RunningInstance>(
            new InstanceStateRunning(client), 180, 5, TimeUnit.SECONDS);

    System.out.printf("%d: %s awaiting instance to run %n", System.currentTimeMillis(), instance.getId());
    if (!runningTester.apply(instance))
        throw new TimeoutException("timeout waiting for instance to run: " + instance.getId());

    instance = findInstanceById(client, instance.getId());

    RetryablePredicate<HostAndPort> socketTester = new RetryablePredicate<HostAndPort>(
            new InetSocketAddressConnect(), 300, 1, TimeUnit.SECONDS);
    System.out.printf("%d: %s awaiting ssh service to start%n", System.currentTimeMillis(),
            instance.getIpAddress());/*from  w  w w  .ja v  a  2s .c  o m*/
    if (!socketTester.apply(HostAndPort.fromParts(instance.getIpAddress(), 22)))
        throw new TimeoutException("timeout waiting for ssh to start: " + instance.getIpAddress());

    System.out.printf("%d: %s ssh service started%n", System.currentTimeMillis(), instance.getIpAddress());

    System.out.printf("%d: %s http service started%n", System.currentTimeMillis(), instance.getIpAddress());
    System.out.printf("instance %s ready%n", instance.getId());
    System.out.printf("ip address: %s%n", instance.getIpAddress());
    System.out.printf("dns name: %s%n", instance.getDnsName());
    return instance;
}

From source file:pl.umk.mat.zawodyweb.compiler.classes.LanguageLA.java

private void checkResults(int id, long maxTime, TestInput input, TestOutput result)
        throws InterruptedException, TimeoutException, HttpException, IOException {

    int limitRise = 50;
    int limitOnPage = 50;

    Random random = new Random();

    Thread.sleep(7000 + (Math.abs(random.nextInt()) % 3000));

    long start_time = System.currentTimeMillis();
    do {/*from w  w  w  . j  a va  2  s  .c  om*/
        if (System.currentTimeMillis() - start_time > maxTime * 1000L) {
            logger.info(String.format("%.1f minutes without answer. Destroy!", maxTime / 60.));
            throw new TimeoutException("Too slow to answer.. destroy");
        }

        logger.info("Checking answer on LA-ACM");
        List<Map<String, String>> results = getResults(limitOnPage);

        String sid = String.valueOf(id);
        Map<String, String> map = null;
        for (Map<String, String> m : results) {
            if (sid.equals(m.get("id"))) {
                map = m;
                break;
            }
        }

        if (map != null) {
            String status = map.get("status");
            String time = map.get("time");
            result.setPoints(0);
            if ("".equals(status) || "Received".equals(status) || "Running".equals(status)
                    || "Sent to judge".equals(status) || "In judge queue".equals(status)
                    || "Compiling".equals(status) || "Linking".equals(status)) {
                Thread.sleep(7000);
            } else if ("Accepted".equals(status)) {
                result.setStatus(ResultsStatusEnum.ACC.getCode());
                result.setPoints(input.getMaxPoints());
                result.setRuntime(Integer.parseInt(time));
                break;
            } else if ("Compilation error".equals(status)) {
                result.setStatus(ResultsStatusEnum.CE.getCode());
                result.setNotes(getCompilationError(id));
                result.setRuntime(Integer.parseInt(time));
                break;
            } else if ("Presentation error".equals(status)) {
                result.setStatus(ResultsStatusEnum.ACC.getCode());
                result.setPoints(input.getMaxPoints());
                result.setRuntime(Integer.parseInt(time));
                break;
            } else if ("Wrong answer".equals(status)) {
                result.setStatus(ResultsStatusEnum.WA.getCode());
                result.setRuntime(Integer.parseInt(time));
                break;
            } else if ("Time limit exceeded".equals(status)) {
                result.setStatus(ResultsStatusEnum.TLE.getCode());
                result.setRuntime(Integer.parseInt(time));
                break;
            } else if ("Memory limit exceeded".equals(status)) {
                result.setStatus(ResultsStatusEnum.MLE.getCode());
                result.setRuntime(Integer.parseInt(time));
                break;
            } else if ("Runtime error".equals(status)) {
                result.setStatus(ResultsStatusEnum.RE.getCode());
                result.setRuntime(Integer.parseInt(time));
                break;
            } else {
                result.setStatus(ResultsStatusEnum.UNKNOWN.getCode());
                result.setNotes("Unknown status: \"" + status + "\"");
                logger.info("Unknown status: \"" + status + "\"");
                break;
            }
        } else {
            limitOnPage += limitRise;
        }
        Thread.sleep(3000 + (Math.abs(random.nextInt()) % 3000));
    } while (true);
}

From source file:no.ntnu.osnap.com.Protocol.java

/**
 * Sends a String to the remote device. How the String is handled or what is done
 * with the String is application defined by the remote device. The usual thing
 * is to print the text on a display.//from   ww w. ja v  a 2s. c o m
 * @param text Which String to send to the remote device
 * @param blocking TRUE if the method should block until a response or timeout happens.
 *                FALSE if the method should return immediately and send the String asynchronously
 * @throws TimeoutException if the remote device used too long time to receive the String
 */
public final void print(String text, boolean blocking) throws TimeoutException {
    ProtocolInstruction newInstruction = new ProtocolInstruction(OpCode.TEXT, (byte) 0, text.getBytes());
    if (!blocking) {
        queueInstruction(newInstruction);
    } else {
        // Blocking methodlock();
        lock();

        waitingForAck = OpCode.TEXT;
        tempAckProcessor = OpCode.TEXT;

        try {
            sendBytes(newInstruction.getInstructionBytes());
        } catch (IOException ex) {
            //Log.e(getClass().getName(), "Send byte failure: " + ex);
        }
        release();

        long time = System.currentTimeMillis();
        while (waitingForAck != null) {
            if (System.currentTimeMillis() - time > TIMEOUT) {
                waitingForAck = null;
                throw new TimeoutException("Timeout");
            }
            try {
                Thread.sleep(10);
            } catch (InterruptedException ex) {
            }
        }

        tempAckProcessor = null;

        ackProcessingComplete();
    }
}

From source file:org.cloudifysource.rest.util.ApplicationInstallerRunnable.java

private void installServices(final File appDir, final String applicationName, final String authGroups,
        final boolean async, final Cloud cloud, final File cloudOverrides) throws IOException {
    logger.info("Installing services for application: " + applicationName + ". Async install: " + async
            + ". Number of services: " + this.services.size());
    for (final Service service : services) {
        logger.info("Installing service: " + service.getName() + " for application: " + applicationName);
        service.getCustomProperties().put("usmJarPath", Environment.getHomeDirectory() + "/lib/platform/usm");

        final Properties contextProperties = createServiceContextProperties(service, applicationName, async,
                cloud);//from  ww  w .  j  a  v  a2 s  .c om

        final String serviceName = service.getName();
        final String absolutePUName = ServiceUtils.getAbsolutePUName(applicationName, serviceName);
        final File serviceDirectory = new File(appDir, serviceName);

        // scan for service cloud configuration file

        final File serviceCloudConfiguration = new File(serviceDirectory,
                CloudifyConstants.SERVICE_CLOUD_CONFIGURATION_FILE_NAME);
        byte[] serviceCloudConfigurationContents = null;
        if (serviceCloudConfiguration.exists()) {
            serviceCloudConfigurationContents = FileUtils.readFileToByteArray(serviceCloudConfiguration);
            FileUtils.forceDelete(serviceCloudConfiguration);
        }

        boolean found = false;

        try {
            // this will actually create an empty props file.
            final FileAppender appender = new FileAppender("finalPropsFile.properties");
            final LinkedHashMap<File, String> filesToAppend = new LinkedHashMap<File, String>();

            // first add the application properties file. least important overrides.
            // lookup application properties file
            final File applicationPropertiesFile = DSLReader
                    .findDefaultDSLFileIfExists(DSLUtils.APPLICATION_PROPERTIES_FILE_NAME, appDir);
            filesToAppend.put(applicationPropertiesFile, "Application Properties File");
            // add the service properties file, second level overrides.
            // lookup service properties file
            final String propertiesFileName = DSLUtils.getPropertiesFileName(serviceDirectory,
                    DSLUtils.SERVICE_DSL_FILE_NAME_SUFFIX);
            final File servicePropertiesFile = new File(serviceDirectory, propertiesFileName);
            filesToAppend.put(servicePropertiesFile, "Service Properties File");
            // lookup overrides file
            File actualOverridesFile = overridesFile;
            if (actualOverridesFile == null) {
                // when using the CLI, the application overrides file is inside the directory
                actualOverridesFile = DSLReader
                        .findDefaultDSLFileIfExists(DSLUtils.APPLICATION_OVERRIDES_FILE_NAME, appDir);
            }
            // add the overrides file given in the command or via REST, most important overrides.
            filesToAppend.put(actualOverridesFile, "Overrides Properties File");
            /*
             * name the merged properties file as the original properties file. this will allow all properties to be
             * available by anyone who parses the default properties file. (like Lifecycle scripts)
             */
            appender.appendAll(servicePropertiesFile, filesToAppend);

            // Pack the folder and name it absolutePuName
            final File packedFile = Packager.pack(service, serviceDirectory, absolutePUName, null);
            result.getApplicationFile().delete();
            packedFile.deleteOnExit();
            // Deployment will be done using the service's absolute PU name.
            logger.info("Deploying PU: " + absolutePUName + ". File: " + packedFile + ". Properties: "
                    + contextProperties);
            final String templateName = service.getCompute() == null ? null
                    : service.getCompute().getTemplate();
            controller.deployElasticProcessingUnit(absolutePUName, applicationName, authGroups, serviceName,
                    packedFile, contextProperties, templateName, true, 0, TimeUnit.SECONDS,
                    serviceCloudConfigurationContents, selfHealing, null /* service overrides file */,
                    cloudOverrides);
            try {
                FileUtils.deleteDirectory(packedFile.getParentFile());
            } catch (final IOException ioe) {
                // sometimes this delete fails. Not sure why. Maybe deploy
                // is async?
                logger.warning("Failed to delete temporary directory: " + packedFile.getParentFile());
            }

            if (!async) {
                logger.info("Waiting for instance of service: " + serviceName + " of application: "
                        + applicationName);
                final boolean instanceFound = controller.waitForServiceInstance(applicationName, serviceName,
                        SERVICE_INSTANCE_STARTUP_TIMEOUT_MINUTES, TimeUnit.MINUTES);
                if (!instanceFound) {
                    throw new TimeoutException("Service " + serviceName + " of application " + applicationName
                            + " was installed, but no instance of the service has started after "
                            + SERVICE_INSTANCE_STARTUP_TIMEOUT_MINUTES + " minutes.");
                }
                logger.info("Found instance of: " + serviceName);
            }

            found = true;
            logger.fine("service " + service + " deployed.");
        } catch (final Exception e) {
            logger.log(Level.SEVERE, "Failed to install service: " + serviceName + " of application: "
                    + applicationName + ". Application installation will halt. "
                    + "Some services may already have started, and should be shutdown manually. Error was: "
                    + e.getMessage(), e);
            this.controller.handleDeploymentException(e, this.pollingTaskId);
            return;
        }

        if (!found) {
            logger.severe("Failed to find an instance of service: " + serviceName
                    + " while installing application " + applicationName
                    + ". Application installation will stop. Some services may have been installed!");
            return;
        }

    }
}

From source file:org.apache.hadoop.hdfs.server.balancer.TestBalancer.java

/**
 * Wait until heartbeat gives expected results, within CAPACITY_ALLOWED_VARIANCE,
 * summed over all nodes.  Times out after TIMEOUT msec.
 * @throws IOException - if getStats() fails
 * @throws TimeoutException//from   w ww  .  j  a v  a 2 s.  com
 */
static void waitForHeartBeat(long expectedUsedSpace, long expectedTotalSpace, ClientProtocol client,
        MiniDFSCluster cluster) throws IOException, TimeoutException {
    long timeout = TIMEOUT;
    long failtime = (timeout <= 0L) ? Long.MAX_VALUE : Time.now() + timeout;

    while (true) {
        long[] status = client.getStats();
        double totalSpaceVariance = Math.abs((double) status[0] - expectedTotalSpace) / expectedTotalSpace;
        double usedSpaceVariance = Math.abs((double) status[1] - expectedUsedSpace) / expectedUsedSpace;
        if (totalSpaceVariance < CAPACITY_ALLOWED_VARIANCE && usedSpaceVariance < CAPACITY_ALLOWED_VARIANCE)
            break; //done

        if (Time.now() > failtime) {
            throw new TimeoutException("Cluster failed to reached expected values of " + "totalSpace (current: "
                    + status[0] + ", expected: " + expectedTotalSpace + "), or usedSpace (current: " + status[1]
                    + ", expected: " + expectedUsedSpace + "), in more than " + timeout + " msec.");
        }
        try {
            Thread.sleep(100L);
        } catch (InterruptedException ignored) {
        }
    }
}

From source file:info.batey.kafka.unit.KafkaUnit.java

private <T> List<T> readMessages(String topicName, final MessageExtractor<T> messageExtractor)
        throws TimeoutException {
    ExecutorService singleThread = Executors.newSingleThreadExecutor();
    Properties consumerProperties = new Properties();
    consumerProperties.put("zookeeper.connect", zookeeperString);
    consumerProperties.put("group.id", "10");
    consumerProperties.put("socket.timeout.ms", "500");
    consumerProperties.put("consumer.id", "test");
    consumerProperties.put("auto.offset.reset", "smallest");
    consumerProperties.put("consumer.timeout.ms", "500");
    ConsumerConnector javaConsumerConnector = Consumer
            .createJavaConsumerConnector(new ConsumerConfig(consumerProperties));
    StringDecoder stringDecoder = new StringDecoder(new VerifiableProperties(new Properties()));
    Map<String, Integer> topicMap = new HashMap<>();
    topicMap.put(topicName, 1);/*from   w w  w.j  av a 2  s  . c  o  m*/
    Map<String, List<KafkaStream<String, String>>> events = javaConsumerConnector.createMessageStreams(topicMap,
            stringDecoder, stringDecoder);
    List<KafkaStream<String, String>> events1 = events.get(topicName);
    final KafkaStream<String, String> kafkaStreams = events1.get(0);

    Future<List<T>> submit = singleThread.submit(new Callable<List<T>>() {
        public List<T> call() throws Exception {
            List<T> messages = new ArrayList<>();
            try {
                for (MessageAndMetadata<String, String> kafkaStream : kafkaStreams) {
                    T message = messageExtractor.extract(kafkaStream);
                    LOGGER.info("Received message: {}", kafkaStream.message());
                    messages.add(message);
                }
            } catch (ConsumerTimeoutException e) {
                // always gets throws reaching the end of the stream
            }
            return messages;
        }
    });

    List<T> receivedMessages;

    try {
        receivedMessages = submit.get(3, TimeUnit.SECONDS);

    } catch (InterruptedException | ExecutionException | TimeoutException e) {
        throw new TimeoutException("Timed out waiting for messages");

    } finally {
        singleThread.shutdown();
        javaConsumerConnector.shutdown();
    }

    return receivedMessages;
}

From source file:org.apache.hadoop.hdfs.StripedFileTestUtil.java

/**
 * Wait for all the internalBlocks of the blockGroups of the given file to be
 * reported./*from  www  .  j av  a2 s .com*/
 */
public static void waitBlockGroupsReported(DistributedFileSystem fs, String src, int numDeadDNs)
        throws Exception {
    boolean success;
    final int ATTEMPTS = 40;
    int count = 0;

    do {
        success = true;
        count++;
        LocatedBlocks lbs = fs.getClient().getLocatedBlocks(src, 0);
        for (LocatedBlock lb : lbs.getLocatedBlocks()) {
            short expected = (short) (getRealTotalBlockNum((int) lb.getBlockSize()) - numDeadDNs);
            int reported = lb.getLocations().length;
            if (reported < expected) {
                success = false;
                LOG.info("blockGroup " + lb.getBlock() + " of file " + src + " has reported internalBlocks "
                        + reported + " (desired " + expected + "); locations "
                        + Joiner.on(' ').join(lb.getLocations()));
                Thread.sleep(1000);
                break;
            }
        }
        if (success) {
            LOG.info("All blockGroups of file " + src + " verified to have all internalBlocks.");
        }
    } while (!success && count < ATTEMPTS);

    if (count == ATTEMPTS) {
        throw new TimeoutException("Timed out waiting for " + src + " to have all the internalBlocks");
    }
}