Example usage for java.util.concurrent ExecutorService execute

List of usage examples for java.util.concurrent ExecutorService execute

Introduction

In this page you can find the example usage for java.util.concurrent ExecutorService execute.

Prototype

void execute(Runnable command);

Source Link

Document

Executes the given command at some time in the future.

Usage

From source file:edu.lternet.pasta.portal.HarvesterServlet.java

/**
 * The doPost method of the servlet. <br>
 * /*from  w w  w. j  a  va 2  s. co  m*/
 * This method is called when a form has its tag value method equals to post.
 * 
 * @param request
 *          the request send by the client to the server
 * @param response
 *          the response send by the server to the client
 * @throws ServletException
 *           if an error occurred
 * @throws IOException
 *           if an error occurred
 */
public void doPost(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {
    HttpSession httpSession = request.getSession();
    ServletContext servletContext = httpSession.getServletContext();
    ArrayList<String> documentURLs = null;
    File emlFile = null;
    String emlTextArea = null;
    Harvester harvester = null;
    String harvestId = null;
    String harvestListURL = null;
    String harvestReportId = null;
    boolean isDesktopUpload = false;
    boolean isEvaluate = false;
    String uid = (String) httpSession.getAttribute("uid");
    String urlTextArea = null;
    String warningMessage = "";

    try {
        if (uid == null) {
            throw new PastaAuthenticationException(LOGIN_WARNING);
        } else {
            /*
             * The "metadataSource" request parameter can have a value of
             * "emlText", "emlFile", "urlList", "harvestList", or
             * "desktopHarvester". It is set as a hidden input field in 
             * each of the harvester forms.
             */
            String metadataSource = request.getParameter("metadataSource");

            /*
             * "mode" can have a value of "evaluate" or "upgrade". It is set
             * as the value of the submit button in each of the harvester
             * forms.
             */
            String mode = request.getParameter("submit");
            if ((mode != null) && (mode.equalsIgnoreCase("evaluate"))) {
                isEvaluate = true;
            }

            if ((metadataSource != null) && (!metadataSource.equals("desktopHarvester"))) {
                harvestId = generateHarvestId();
                if (isEvaluate) {
                    harvestReportId = uid + "-evaluate-" + harvestId;
                } else {
                    harvestReportId = uid + "-upload-" + harvestId;
                }
            }

            if (metadataSource != null) {
                if (metadataSource.equals("emlText")) {
                    emlTextArea = request.getParameter("emlTextArea");
                    if (emlTextArea == null || emlTextArea.trim().isEmpty()) {
                        warningMessage = "<p class=\"warning\">Please enter the text of an EML document into the text area.</p>";
                    }
                } else if (metadataSource.equals("emlFile")) {
                    Collection<Part> parts = request.getParts();
                    for (Part part : parts) {
                        if (part.getContentType() != null) {
                            // save EML file to disk
                            emlFile = processUploadedFile(part);
                        } else {
                            /*
                             * Parse the request parameters.
                             */
                            String fieldName = part.getName();
                            String fieldValue = request.getParameter(fieldName);
                            if (fieldName != null && fieldValue != null) {
                                if (fieldName.equals("submit") && fieldValue.equalsIgnoreCase("evaluate")) {
                                    isEvaluate = true;
                                } else if (fieldName.equals("desktopUpload")
                                        && fieldValue.equalsIgnoreCase("1")) {
                                    isDesktopUpload = true;
                                }
                            }
                        }
                    }
                } else if (metadataSource.equals("urlList")) {
                    urlTextArea = request.getParameter("urlTextArea");
                    if (urlTextArea == null || urlTextArea.trim().isEmpty()) {
                        warningMessage = "<p class=\"warning\">Please enter one or more EML document URLs into the text area.</p>";
                    } else {
                        documentURLs = parseDocumentURLsFromTextArea(urlTextArea);
                        warningMessage = CHECK_BACK_LATER;
                    }
                } else if (metadataSource.equals("harvestList")) {
                    harvestListURL = request.getParameter("harvestListURL");
                    if (harvestListURL == null || harvestListURL.trim().isEmpty()) {
                        warningMessage = "<p class=\"warning\">Please enter the URL to a Metacat Harvest List.</p>";
                    } else {
                        documentURLs = parseDocumentURLsFromHarvestList(harvestListURL);
                        warningMessage = CHECK_BACK_LATER;
                    }
                }
                /*
                 * If the metadata source is "desktopHarvester", we already have the
                 * EML file stored in a session attribute. Now we need to retrieve
                 * the data files from the brower's form fields and write the
                 * data files to a URL accessible location.
                 */
                else if (metadataSource.equals("desktopHarvester")) {
                    emlFile = (File) httpSession.getAttribute("emlFile");
                    ArrayList<Entity> entityList = parseEntityList(emlFile);
                    harvestReportId = (String) httpSession.getAttribute("harvestReportId");
                    String dataPath = servletContext.getRealPath(DESKTOP_DATA_DIR);
                    String harvestPath = String.format("%s/%s", dataPath, harvestReportId);

                    Collection<Part> parts = request.getParts();
                    String objectName = null;
                    Part filePart = null;

                    for (Part part : parts) {
                        if (part.getContentType() != null) {
                            // save data file to disk
                            //processDataFile(part, harvestPath);
                            filePart = part;
                        } else {
                            /*
                             * Parse the request parameters.
                             */
                            String fieldName = part.getName();
                            String fieldValue = request.getParameter(fieldName);
                            if (fieldName != null && fieldValue != null) {
                                if (fieldName.equals("submit") && fieldValue.equalsIgnoreCase("evaluate")) {
                                    isEvaluate = true;
                                } else if (fieldName.startsWith("object-name-")) {
                                    objectName = fieldValue;
                                }
                            }
                        }

                        if (filePart != null && objectName != null) {
                            processDataFile(filePart, harvestPath, objectName);
                            objectName = null;
                            filePart = null;
                        }

                    }

                    emlFile = transformDesktopEML(harvestPath, emlFile, harvestReportId, entityList);
                }
            } else {
                throw new IllegalStateException("No value specified for request parameter 'metadataSource'");
            }

            if (harvester == null) {
                harvester = new Harvester(harvesterPath, harvestReportId, uid, isEvaluate);
            }

            if (emlTextArea != null) {
                harvester.processSingleDocument(emlTextArea);
            } else if (emlFile != null) {
                if (isDesktopUpload) {
                    ArrayList<Entity> entityList = parseEntityList(emlFile);
                    httpSession.setAttribute("entityList", entityList);
                    httpSession.setAttribute("emlFile", emlFile);
                    httpSession.setAttribute("harvestReportId", harvestReportId);
                    httpSession.setAttribute("isEvaluate", new Boolean(isEvaluate));
                } else {
                    harvester.processSingleDocument(emlFile);
                }
            } else if (documentURLs != null) {
                harvester.setDocumentURLs(documentURLs);
                ExecutorService executorService = Executors.newCachedThreadPool();
                executorService.execute(harvester);
                executorService.shutdown();
            }
        }
    } catch (Exception e) {
        handleDataPortalError(logger, e);
    }

    request.setAttribute("message", warningMessage);

    /*
     * If we have a new reportId, and either there is no warning message or
     * it's the "Check back later" message, set the harvestReportID session
     * attribute to the new reportId value.
     */
    if (harvestReportId != null && harvestReportId.length() > 0
            && (warningMessage.length() == 0 || warningMessage.equals(CHECK_BACK_LATER))) {
        httpSession.setAttribute("harvestReportID", harvestReportId);
    }

    if (isDesktopUpload) {
        RequestDispatcher requestDispatcher = request.getRequestDispatcher("./desktopHarvester.jsp");
        requestDispatcher.forward(request, response);
    } else if (warningMessage.length() == 0) {
        response.sendRedirect("./harvestReport.jsp");
    } else {
        RequestDispatcher requestDispatcher = request.getRequestDispatcher("./harvester.jsp");
        requestDispatcher.forward(request, response);
    }

}

From source file:kmi.taa.core.PredicateObjectRetriever.java

public SortedMap<Integer, String> retrieveAll(Map<Integer, String> originalLines, String proxy) {
    SortedMap<Integer, String> results = Collections.synchronizedSortedMap(new TreeMap<Integer, String>());
    ExecutorService pool = Executors.newFixedThreadPool(50);

    int howManyslinks = originalLines.size();

    for (Integer id : originalLines.keySet()) {
        String line = originalLines.get(id);
        String[] str = line.split("\t");
        String candidateUrl = str[2];
        pool.execute(new Retriever(id, candidateUrl, proxy, results));
    }/* w ww.j av  a2  s  .  co  m*/
    pool.shutdown();

    int count = 0;
    int previousResultSize = 0;
    while (results.size() < howManyslinks && count < 100) {
        try {
            Thread.sleep(1000);
            count += 1;
            if (results.size() != previousResultSize) {
                previousResultSize = results.size();
                count = 0;
            }
            System.out.println("Already retrieved " + results.size() + " triples ...");
        } catch (InterruptedException e) {

        }
    }

    System.out.println("All slinks are queried");
    return results;
}

From source file:org.kurento.test.grid.GridHandler.java

public synchronized void filterValidNodes() {
    if (!nodeListFiltered) {
        log.debug("Node availables in the node list: {}", nodeList.size());
        int nodeListSize = nodeList.size();
        ExecutorService executor = Executors.newFixedThreadPool(nodeListSize);
        final CountDownLatch latch = new CountDownLatch(nodeListSize);

        for (final String nodeCandidate : nodeList) {
            executor.execute(new Runnable() {
                @Override/*from   ww w  . jav  a2 s .co m*/
                public void run() {
                    if (!nodeIsValid(nodeCandidate)) {
                        nodeList.remove(nodeCandidate);
                    }
                    latch.countDown();
                }
            });
        }

        try {
            latch.await();
        } catch (InterruptedException e) {
            throw new RuntimeException(e);
        }
        nodeListFiltered = true;
        log.debug("Node availables in the node list after filtering: {}", nodeList.size());
    }
}

From source file:es.urjc.etsii.code.UserSession.java

private void addFakeClients(UserSession presenterSession, JsonObject jsonMessage,
        final WebRtcEndpoint inputWebRtcEndpoint) {

    final String sessionNumber = jsonMessage.get("sessionNumber").getAsString();
    final int fakeClients = jsonMessage.getAsJsonPrimitive("fakeClients").getAsInt();
    final int timeBetweenClients = jsonMessage.getAsJsonPrimitive("timeBetweenClients").getAsInt();
    final boolean removeFakeClients = jsonMessage.getAsJsonPrimitive("removeFakeClients").getAsBoolean();
    final int playTime = jsonMessage.getAsJsonPrimitive("playTime").getAsInt();
    final String processing = jsonMessage.get("processing").getAsString();
    final int fakeClientsPerInstance = jsonMessage.getAsJsonPrimitive("fakeClientsPerInstance").getAsInt();

    new Thread(new Runnable() {
        @Override/*from  w  ww .  ja v  a 2s  . c  om*/
        public void run() {
            log.info("[Session number {} - WS session {}] Adding {} fake clients (rate {} ms) ", sessionNumber,
                    wsSession.getId(), fakeClients, timeBetweenClients);

            final CountDownLatch latch = new CountDownLatch(fakeClients);
            ExecutorService executor = Executors.newFixedThreadPool(fakeClients);
            for (int i = 0; i < fakeClients; i++) {
                waitMs(timeBetweenClients);
                final int j = i + 1;
                executor.execute(new Runnable() {
                    @Override
                    public void run() {
                        try {
                            addFakeClient(j, processing, inputWebRtcEndpoint, fakeClientsPerInstance);
                        } finally {
                            latch.countDown();
                        }
                    }
                });
            }

            try {
                latch.await();
            } catch (InterruptedException e) {
                log.warn("Exception waiting thread pool to be finished", e);
            }
            executor.shutdown();

            if (removeFakeClients) {
                log.info(
                        "[Session number {} - WS session {}] Waiting {} seconds with all fake clients connected",
                        sessionNumber, wsSession.getId(), playTime);

                for (List<MediaElement> list : mediaElementsInFakeMediaPipelineMap.values()) {
                    waitMs(playTime * 1000);
                    for (int i = 0; i < list.size() / 3; i++) {
                        if (i != 0) {
                            waitMs(timeBetweenClients);
                        }
                        log.info("[Session number {} - WS session {}] Releasing fake viewer {}", sessionNumber,
                                wsSession.getId(), i);
                        for (int j = 0; j < 3; j++) {
                            MediaElement mediaElement = list.get(3 * i + j);
                            if (mediaElement != null) {
                                log.debug("[Session number {} - WS session {}] Releasing {}", sessionNumber,
                                        wsSession.getId(), mediaElement);
                                mediaElement.release();
                                mediaElement = null;
                            }
                        }
                    }
                }
                mediaElementsInFakeMediaPipelineMap.clear();
                releaseFakeMediaPipeline();
            }
        }
    }).start();
}

From source file:org.wso2.carbon.bps.perf.rest.RestClientTest.java

public void execute() throws Exception {

    serverURL = config.getProperty("serverURL");
    ActivitiRestClient pretestClient = new ActivitiRestClient(serverURL, 1);
    JSONObject processDefs = pretestClient.getProcessDefinitions();
    try {/*ww w.  j  a  v a 2s .  com*/
        JSONArray defs = processDefs.getJSONArray("data");
        for (int defNumber = 0; defNumber < defs.length(); defNumber++) {
            JSONObject def = defs.getJSONObject(defNumber);
            String pid = def.getString("id");
            String pkey = def.getString("key");
            processKeytoId.put(pkey, pid);
        }
    } catch (JSONException e) {
        log.error("Failed to get process definitions from the server: " + serverURL
                + ". Process definitions response: " + processDefs.toString());
    }

    instanceCount = Integer.parseInt(config.getProperty("instances"));

    List<Integer> threadNumbers = new ArrayList<>();
    String threadsProp = config.getProperty("threads");
    String[] threadParts = threadsProp.split(",");
    for (String threadPart : threadParts) {
        int threadCount = Integer.parseInt(threadPart.trim());
        threadNumbers.add(threadCount);
    }

    sleepTime = Integer.parseInt(config.getProperty("sleep"));
    outPath = config.getProperty("results");
    File outFolder = new File(outPath);
    if (!outFolder.exists()) {
        log.info("Results folder " + outFolder.getAbsolutePath() + " does not exist. Creating a new folder...");
        outFolder.mkdirs();
    }
    File testReportFile = new File(outFolder, "summary.csv");
    StringBuffer summaryBuffer = new StringBuffer();
    summaryBuffer.append("Server URL," + serverURL + "\n");
    summaryBuffer.append("Number of process instances," + instanceCount + "\n");
    summaryBuffer.append("Number of threads," + Arrays.toString(threadNumbers.toArray()) + "\n\n\n");
    log.info("Test configuration - \n" + summaryBuffer.toString());
    summaryBuffer.append("Process ID,Threads,Total time,TPS,Average execution time\n\n");
    FileUtils.write(testReportFile, summaryBuffer.toString());

    List<ProcessConfig> processConfigs = new ArrayList<>();
    String processRef = "process";
    Set<String> processPropsNames = config.stringPropertyNames();
    for (String processPropName : processPropsNames) {
        if (processPropName.startsWith(processRef)) {
            String processProp = config.getProperty(processPropName);
            ProcessConfig processConfig = new ProcessConfig(processProp, processKeytoId);
            processConfigs.add(processConfig);
            log.info("Test configuration created for the process " + processConfig.toString());
        }
    }

    boolean testFailures = false;
    long allTestsStartTime = System.currentTimeMillis();
    int numTotalTests = processConfigs.size() * threadNumbers.size();
    int numCompletedTests = 0;

    List<String> completedProcessNames = new ArrayList<>();
    log.info("Starting performance tests...");
    for (ProcessConfig processConfig : processConfigs) {
        log.info("Starting tests for process " + processConfig.getId());

        for (int numTreads : threadNumbers) {
            log.info("Starting test for process " + processConfig.getId() + " with " + numTreads
                    + " threads...");
            ActivitiRestClient client = new ActivitiRestClient(serverURL, numTreads);

            List<RestProcessExecutor> processExecutors = new ArrayList<>(instanceCount);
            ExecutorService executorService = Executors.newFixedThreadPool(numTreads);

            long stime = System.currentTimeMillis();
            for (int i = 0; i < instanceCount; i++) {
                RestProcessExecutor processExecutor = new RestProcessExecutor(null, processConfig.getId(),
                        processConfig.getStartupVariables(), client, i);
                processExecutors.add(processExecutor);
                executorService.execute(processExecutor);
            }

            executorService.shutdown();
            try {
                executorService.awaitTermination(1, TimeUnit.HOURS);
            } catch (InterruptedException e) {
                String msg = "Error occurred while waiting for executors to terminate.";
                log.error(msg, e);
            }
            long etime = System.currentTimeMillis();

            StringBuffer buf = new StringBuffer();
            double totalDuration = 0;
            buf.append("Instance durations for process: " + processConfig.getId() + "\n");
            for (RestProcessExecutor processExecutor : processExecutors) {
                testFailures = processExecutor.isFailed();
                if (testFailures) {
                    break;
                }

                buf.append(processExecutor.getExternalDuration() + "\n");
                totalDuration += processExecutor.getExternalDuration();
            }

            if (!testFailures) {
                double externalTPS = (double) instanceCount * 1000 / (double) (etime - stime);
                externalTPS = UMath.round(externalTPS, 3);

                double avgExeTime = totalDuration / instanceCount;
                avgExeTime = UMath.round(avgExeTime, 3);

                log.info("Test for process " + processConfig.getId() + " with " + numTreads
                        + " threads completed with duration: " + (etime - stime) + " ms | TPS: " + externalTPS
                        + " | Average execution time: " + avgExeTime);
                String processRecord = processConfig.getId() + "," + numTreads + "," + (etime - stime) + ","
                        + externalTPS + "," + avgExeTime + "\n";
                FileWriter fileWriter = new FileWriter(testReportFile, true);
                fileWriter.write(processRecord);
                fileWriter.close();

                buf.append("\n\nTPS," + externalTPS + "\n\n");
                buf.append("\n\nAverage execution time," + avgExeTime + " ms\n\n");

                File processReportFile = new File(outFolder, processConfig.getId() + ".csv");
                FileUtils.write(processReportFile, buf.toString());
                client.close();

                numCompletedTests++;
                double testingTime = System.currentTimeMillis() - allTestsStartTime;
                double testingTimeMinutes = UMath.round(testingTime / (1000 * 60), 2);
                double testingTimeHours = UMath.round(testingTime / (1000 * 60 * 60), 2);

                double remainingTime = (testingTime / numCompletedTests) * (numTotalTests - numCompletedTests);
                double remainingTimeMinutes = UMath.round(remainingTime / (1000 * 60), 2);
                double remainingTimeHours = UMath.round(remainingTime / (1000 * 60 * 60), 2);
                log.info("Completed test for process " + processConfig.getId() + " with " + numTreads
                        + " threads.");
                log.info(numCompletedTests + " out of " + numTotalTests + " completed in " + testingTimeMinutes
                        + " minutes (" + testingTimeHours + " hours). Estimated remaining time: "
                        + remainingTimeMinutes + " minutes (" + remainingTimeHours + " hours)");

                //                    client.undeploy();
                //                    client.deploy();

                completedProcessNames.add("Process: " + processConfig.getId() + " | Threads: " + numTreads);
                log.info("Waiting " + sleepTime + " ms before the next test");
                Thread.sleep(sleepTime);
            } else {
                log.error("Test for process " + processConfig.getId() + " with " + numTreads
                        + " failed. See client and server logs for more information.");
                break; // terminate tests for this process with other threads
            }
        }

        if (!testFailures) {
            log.info("Completed tests for process " + processConfig.getId());
        } else {
            log.error("At least one test for the process " + processConfig.getId()
                    + " has failed. Test suite will be terminated.");
            StringBuffer retryMessage = new StringBuffer();
            retryMessage.append("Below tests were completed successfully:\n");
            for (String completedProcessName : completedProcessNames) {
                retryMessage.append(completedProcessName + "\n");
            }
            log.info(retryMessage.toString());
            break; // terminate tests for other processes
        }
    }
}

From source file:com.wavemaker.tools.apidocs.tools.spring.SpringSwaggerParserTest.java

@Test
public void testMultiThread3() throws InterruptedException {
    ExecutorService service = Executors.newFixedThreadPool(4);
    List<Class<?>> controllerClasses = new ArrayList<>();
    controllerClasses.add(VacationController.class);
    controllerClasses.add(com.wavemaker.tools.apidocs.tools.spring.controller2.VacationController.class);
    final Pattern namePattern = Pattern.compile("(\\w)*.(\\w*)$");
    for (int i = 0; i < 5; i++) {
        for (final Class<?> controllerClass : controllerClasses) {
            final int finalI = i;
            service.execute(new Runnable() {
                public void run() {
                    Swagger swagger;/*from  w  ww . j  a v  a 2  s .c om*/
                    try {
                        swagger = runForSingleClass(controllerClass);
                    } catch (SwaggerParserException e) {
                        throw new RuntimeException("Exception while parsing class:" + controllerClass.getName(),
                                e);
                    }
                    Assert.assertNotNull(swagger);
                    assertEquals(1, swagger.getTags().size());
                    assertEquals(controllerClass.getName(), swagger.getTags().get(0).getFullyQualifiedName());
                    try {
                        String name = controllerClass.getName();
                        Matcher nameMatcher = namePattern.matcher(name);
                        if (nameMatcher.find()) {
                            name = nameMatcher.group(0);
                        }
                        name = name.replace('.', '_');

                        writeToFile(swagger, "mul_package_class_" + name + "_" + finalI + "" + ".json");
                    } catch (IOException e) {
                        throw new RuntimeException("Error while writing to file", e);
                    }
                }
            });
        }
    }

    service.shutdown();
    service.awaitTermination(10, TimeUnit.SECONDS);
}

From source file:ubic.gemma.apps.Blat.java

/**
 * @param querySequenceFile//from w w w  .j a  v  a 2  s  . c  om
 * @param outputPath
 * @return processed results.
 */
private Collection<BlatResult> jniGfClientCall(final File querySequenceFile, final String outputPath,
        final int portToUse) throws IOException {
    try {
        log.debug("Starting blat run");

        FutureTask<Boolean> blatThread = new FutureTask<Boolean>(new Callable<Boolean>() {
            @Override
            public Boolean call() {
                GfClientCall(host, Integer.toString(portToUse), seqDir, querySequenceFile.getPath(),
                        outputPath);
                return true;
            }
        });

        ExecutorService executor = Executors.newSingleThreadExecutor();
        executor.execute(blatThread);
        executor.shutdown();

        // wait...
        StopWatch overallWatch = new StopWatch();
        overallWatch.start();

        while (!blatThread.isDone()) {
            try {
                Thread.sleep(BLAT_UPDATE_INTERVAL_MS);
            } catch (InterruptedException ie) {
                throw new RuntimeException(ie);
            }

            synchronized (outputPath) {
                File outputFile = new File(outputPath);
                Long size = outputFile.length();
                NumberFormat nf = new DecimalFormat();
                nf.setMaximumFractionDigits(2);
                String minutes = TimeUtil.getMinutesElapsed(overallWatch);
                log.info("BLAT output so far: " + nf.format(size / 1024.0) + " kb (" + minutes
                        + " minutes elapsed)");
            }

        }

        overallWatch.stop();
        String minutes = TimeUtil.getMinutesElapsed(overallWatch);
        log.info("Blat took a total of " + minutes + " minutes");

    } catch (UnsatisfiedLinkError e) {
        log.error(e, e);
        log.info("Falling back on exec()");
        this.execGfClient(querySequenceFile, outputPath, portToUse);
    }
    return this.processPsl(outputPath, null);
}

From source file:org.commonjava.util.partyline.ReadLockOnDerivativeDontPreventMainFileReadTest.java

/**
 * Test that verifies concurrent reading locks on different files will not effect each other's reading process,
 * this setup an script of events for multiple files, where:
 * <ol>//from   w w  w  . java2 s.com
 *     <li>Multiple reads happen simultaneously, read locks on distinct files/li>
 *     <li>Reading processes for different files are isolated</li>
 * </ol>
 * @throws Exception
 */
@BMRules(rules = {
        // wait for first openInputStream call to exit
        @BMRule(name = "second openInputStream", targetClass = "JoinableFileManager", targetMethod = "openInputStream", targetLocation = "ENTRY", binding = "name:String = $1.getName()", condition = "name.equals(\"bar-1.pom\")", action = "debug(\">>>wait for service enter first openInputStream.\");"
                + "waitFor(\"first openInputStream\");" + "debug(\"<<<proceed with second openInputStream.\")"),

        // setup the trigger to signal second openInputStream when the first openInputStream exits
        @BMRule(name = "first openInputStream", targetClass = "JoinableFileManager", targetMethod = "openInputStream", targetLocation = "ENTRY", binding = "name:String = $1.getName()", condition = "name.equals(\"bar-1.pom.sha1\")", action = "debug(\"<<<signalling second openInputStream.\"); "
                + "signalWake(\"first openInputStream\", true);"
                + "debug(\"<<<signalled second openInputStream.\")") })
@Test
@BMUnitConfig(debug = true)
public void run() throws Exception {
    final ExecutorService execs = Executors.newFixedThreadPool(2);
    final CountDownLatch latch = new CountDownLatch(2);
    final JoinableFileManager manager = new JoinableFileManager();

    final String main = "main";
    final String derivative = "derivative";

    final File d = temp.newFolder();
    final File mFile = new File(d, "org/foo/bar/1/bar-1.pom");
    final File dFile = new File(d, "org/foo/bar/1/bar-1.pom.sha1");

    FileUtils.write(mFile, main);
    FileUtils.write(dFile, derivative);

    Map<String, String> returning = new HashMap<String, String>();

    for (int i = 0; i < 2; i++) {
        final int k = i;
        execs.execute(() -> {
            File file = null;
            String name = "";

            switch (k) {
            case 0:
                file = mFile;
                name = main;
                break;
            case 1:
                file = dFile;
                name = derivative;
                break;
            }
            Thread.currentThread().setName(name);
            try (InputStream s = manager.openInputStream(file)) {
                returning.put(name, IOUtils.toString(s));
                s.close();
            } catch (final Exception e) {
                e.printStackTrace();
                fail("Failed to open inputStream: " + e.getMessage());
            } finally {
                latch.countDown();
            }
        });
        Thread.sleep(1000); // make the fragile BMRule to always work
    }

    latch.await();

    // note reporting main null error
    final String mainStream = returning.get(main);
    assertThat(mainStream, equalTo(main));
}

From source file:org.elasticsearch.xpack.watcher.common.http.HttpClientTests.java

public void testThatHttpClientFailsOnNonHttpResponse() throws Exception {
    ExecutorService executor = Executors.newSingleThreadExecutor();
    AtomicReference<Exception> hasExceptionHappened = new AtomicReference();
    try (ServerSocket serverSocket = new MockServerSocket(0, 50, InetAddress.getByName("localhost"))) {
        executor.execute(() -> {
            try (Socket socket = serverSocket.accept()) {
                BufferedReader in = new BufferedReader(
                        new InputStreamReader(socket.getInputStream(), StandardCharsets.UTF_8));
                in.readLine();/*w  w  w  . j  av  a 2s  . c o  m*/
                socket.getOutputStream().write("This is not a HTTP response".getBytes(StandardCharsets.UTF_8));
                socket.getOutputStream().flush();
            } catch (Exception e) {
                hasExceptionHappened.set(e);
                logger.error((Supplier<?>) () -> new ParameterizedMessage("Error in writing non HTTP response"),
                        e);
            }
        });
        HttpRequest request = HttpRequest.builder("localhost", serverSocket.getLocalPort()).path("/").build();
        expectThrows(ClientProtocolException.class, () -> httpClient.execute(request));
        assertThat("A server side exception occured, but shouldn't", hasExceptionHappened.get(),
                is(nullValue()));
    } finally {
        terminate(executor);
    }
}

From source file:org.springframework.integration.ip.tcp.connection.TcpNioConnectionTests.java

@Test
public void testByteArrayBlocksForZeroRead() throws Exception {
    SocketChannel socketChannel = mock(SocketChannel.class);
    Socket socket = mock(Socket.class);
    when(socketChannel.socket()).thenReturn(socket);
    TcpNioConnection connection = new TcpNioConnection(socketChannel, false, false, null, null);
    final TcpNioConnection.ChannelInputStream stream = (ChannelInputStream) new DirectFieldAccessor(connection)
            .getPropertyValue("channelInputStream");
    final CountDownLatch latch = new CountDownLatch(1);
    final byte[] out = new byte[4];
    ExecutorService exec = Executors.newSingleThreadExecutor();
    exec.execute(new Runnable() {

        @Override/*from   www. j a v  a 2 s  . com*/
        public void run() {
            try {
                stream.read(out);
            } catch (IOException e) {
                e.printStackTrace();
            }
            latch.countDown();
        }
    });
    Thread.sleep(1000);
    assertEquals(0x00, out[0]);
    stream.write(ByteBuffer.wrap("foo".getBytes()));
    assertTrue(latch.await(10, TimeUnit.SECONDS));
    assertEquals("foo\u0000", new String(out));
}