Example usage for java.util Collections synchronizedList

List of usage examples for java.util Collections synchronizedList

Introduction

In this page you can find the example usage for java.util Collections synchronizedList.

Prototype

public static <T> List<T> synchronizedList(List<T> list) 

Source Link

Document

Returns a synchronized (thread-safe) list backed by the specified list.

Usage

From source file:org.mule.modules.jmsbatchmessaging.JmsBatchMessagingConnector.java

public List<String> getListOfMessages(Map<String, List<String>> map, String queue) {
    List<String> messages = map.get(queue);
    if (messages == null) {
        messages = Collections.synchronizedList(new ArrayList<String>());
        map.put(queue, messages);// w ww  .j  a  va  2  s . c  o m
    }
    return messages;
}

From source file:com.romeikat.datamessie.core.sync.service.template.withIdAndVersion.EntityWithIdAndVersionSynchronizer.java

private List<Long> loadLhsIds(final List<Long> rhsIds) {
    final List<Long> lhsIds = Collections.synchronizedList(Lists.newArrayListWithExpectedSize(rhsIds.size()));
    final List<List<Long>> rhsIdsBatches = Lists.partition(rhsIds, batchSizeEntities);
    new ParallelProcessing<List<Long>>(sessionFactorySyncSource, rhsIdsBatches, parallelismFactor) {
        @Override//from  w  ww .  j  av  a  2 s .  c  o m
        public void doProcessing(final HibernateSessionProvider lhsSessionProvider,
                final List<Long> rhsIdsBatch) {
            final Collection<Long> lhsIdsBatch = dao.getIds(lhsSessionProvider.getStatelessSession(),
                    rhsIdsBatch);
            lhsIds.addAll(lhsIdsBatch);
        }
    };
    return lhsIds;
}

From source file:org.apache.flume.channel.recoverable.memory.TestRecoverableMemoryChannel.java

@Test
public void testThreaded() throws IOException, InterruptedException {
    int numThreads = 10;
    final CountDownLatch producerStopLatch = new CountDownLatch(numThreads);
    // due to limited capacity we must wait for consumers to start to put
    final CountDownLatch consumerStartLatch = new CountDownLatch(numThreads);
    final CountDownLatch consumerStopLatch = new CountDownLatch(numThreads);
    final List<Exception> errors = Collections.synchronizedList(new ArrayList<Exception>());
    final List<String> expected = Collections.synchronizedList(new ArrayList<String>());
    final List<String> actual = Collections.synchronizedList(new ArrayList<String>());
    for (int i = 0; i < numThreads; i++) {
        final int id = i;
        Thread t = new Thread() {
            @Override/*from  w w w. ja va2  s  .  c om*/
            public void run() {
                try {
                    consumerStartLatch.await();
                    if (id % 2 == 0) {
                        expected.addAll(putEvents(channel, Integer.toString(id), 1, 5));
                    } else {
                        expected.addAll(putEvents(channel, Integer.toString(id), 5, 5));
                    }
                    logger.info("Completed some puts " + expected.size());
                } catch (Exception e) {
                    logger.error("Error doing puts", e);
                    errors.add(e);
                } finally {
                    producerStopLatch.countDown();
                }
            }
        };
        t.setDaemon(true);
        t.start();
    }
    for (int i = 0; i < numThreads; i++) {
        final int id = i;
        Thread t = new Thread() {
            @Override
            public void run() {
                try {
                    consumerStartLatch.countDown();
                    consumerStartLatch.await();
                    while (!producerStopLatch.await(1, TimeUnit.SECONDS) || expected.size() > actual.size()) {
                        if (id % 2 == 0) {
                            actual.addAll(takeEvents(channel, 1, Integer.MAX_VALUE));
                        } else {
                            actual.addAll(takeEvents(channel, 5, Integer.MAX_VALUE));
                        }
                    }
                    if (actual.isEmpty()) {
                        logger.error("Found nothing!");
                    } else {
                        logger.info("Completed some takes " + actual.size());
                    }
                } catch (Exception e) {
                    logger.error("Error doing takes", e);
                    errors.add(e);
                } finally {
                    consumerStopLatch.countDown();
                }
            }
        };
        t.setDaemon(true);
        t.start();
    }
    Assert.assertTrue("Timed out waiting for producers", producerStopLatch.await(30, TimeUnit.SECONDS));
    Assert.assertTrue("Timed out waiting for consumer", consumerStopLatch.await(30, TimeUnit.SECONDS));
    Assert.assertEquals(Collections.EMPTY_LIST, errors);
    Collections.sort(expected);
    Collections.sort(actual);
    Assert.assertEquals(expected, actual);
}

From source file:fr.efl.chaine.xslt.GauloisPipe.java

/**
 * Launch the pipe.//w  ww. j a v a 2 s .  c  o  m
 *
 * @throws fr.efl.chaine.xslt.InvalidSyntaxException If config's syntax is incorrect
 * @throws java.io.FileNotFoundException If a file is not found...
 * @throws net.sf.saxon.s9api.SaxonApiException If a SaxonApi problem occurs
 * @throws java.net.URISyntaxException Because MVN forces to have comments...
 */
@SuppressWarnings("ThrowFromFinallyBlock")
public void launch() throws InvalidSyntaxException, FileNotFoundException, SaxonApiException,
        URISyntaxException, IOException {
    initDebugDirectory();
    Runtime.getRuntime().addShutdownHook(new Thread(new ErrorCollector(errors)));
    long start = System.currentTimeMillis();
    errors = Collections.synchronizedList(new ArrayList<Exception>());
    documentCache = new DocumentCache(config.getMaxDocumentCacheSize());
    if (this.messageListenerclass != null) {
        try {
            this.messageListener = this.messageListenerclass.newInstance();
        } catch (InstantiationException | IllegalAccessException ex) {
            System.err.println("[WARN] Fail to instanciate " + this.messageListenerclass.getName());
            ex.printStackTrace(System.err);
        }
    }
    boolean retCode = true;
    try {
        Configuration saxonConfig = configurationFactory.getConfiguration();
        LOGGER.debug("configuration is a " + saxonConfig.getClass().getName());
        // this is now done in constructor
        // saxonConfig.setURIResolver(buildUriResolver(saxonConfig.getURIResolver()));
        processor = new Processor(saxonConfig);
        xsltCompiler = processor.newXsltCompiler();
        builder = processor.newDocumentBuilder();

        List<CfgFile> sourceFiles = config.getSources().getFiles();
        LOGGER.info("[" + instanceName + "] works on {} files", sourceFiles.size());

        if (config.getPipe().getTraceOutput() != null) {
            traceListener = buildTraceListener(config.getPipe().getTraceOutput());
        }

        if (config.getPipe().getNbThreads() > 1) {
            if (config.hasFilesOverMultiThreadLimit()) {
                List<ParametrableFile> files = new ArrayList<>(sourceFiles.size());
                for (CfgFile f : config.getSources()
                        .getFilesOverLimit(config.getPipe().getMultithreadMaxSourceSize())) {
                    files.add(resolveInputFile(f));
                }
                if (!files.isEmpty()) {
                    LOGGER.info("[" + instanceName + "] Running mono-thread for {} huge files", files.size());
                    retCode = executesPipeOnMultiThread(config.getPipe(), files, 1,
                            config.getSources().getListener());
                }
            }
            List<ParametrableFile> files = new ArrayList<>(sourceFiles.size());
            for (CfgFile f : config.getSources()
                    .getFilesUnderLimit(config.getPipe().getMultithreadMaxSourceSize())) {
                files.add(resolveInputFile(f));
            }
            if (!files.isEmpty() || config.getSources().getListener() != null) {
                LOGGER.info("[" + instanceName + "] Running multi-thread for {} regular-size files",
                        files.size());
                retCode = executesPipeOnMultiThread(config.getPipe(), files, config.getPipe().getNbThreads(),
                        config.getSources().getListener());
            }
        } else {
            List<ParametrableFile> files = new ArrayList<>(sourceFiles.size());
            for (CfgFile f : sourceFiles) {
                files.add(resolveInputFile(f));
            }
            LOGGER.info("[" + instanceName + "] Running mono-thread on all {} files", files.size());
            retCode = executesPipeOnMultiThread(config.getPipe(), files, 1, config.getSources().getListener());
        }

    } catch (Throwable e) {
        LOGGER.warn("[" + instanceName + "] " + e.getMessage(), e);
        // on sort avec des codes d'erreur non-zero
        throw e;
    } finally {
        if (!retCode) {
            throw new SaxonApiException("An error occurs. See previous logs.");
        }
    }
    try {
        if (config.getSources().getListener() == null) {
            long duration = System.currentTimeMillis() - start;
            Duration duree = DatatypeFactory.newInstance().newDuration(duration);
            LOGGER.info("[" + instanceName + "] Process terminated: " + duree.toString());
        }
    } catch (Exception ex) {
        LOGGER.info("[" + instanceName + "] Process terminated.");
    }
}

From source file:org.apache.falcon.service.BacklogMetricEmitterService.java

private void addToBacklog(Entity entity) {
    if (entity.getEntityType() != EntityType.PROCESS) {
        return;// ww w. j a  v a  2 s  .  c om
    }
    Process process = (Process) entity;
    if (process.getSla() == null) {
        return;
    }
    entityBacklogs.putIfAbsent(entity, Collections.synchronizedList(new ArrayList<MetricInfo>()));
}

From source file:com.gargoylesoftware.htmlunit.javascript.background.JavaScriptJobManagerTest.java

/**
 * Test for bug 1728883 that makes sure closing a window prevents a
 * recursive setTimeout from continuing forever.
 *
 * @throws Exception if the test fails// ww w.  ja va2  s .  co m
 */
@Test
public void interruptAllWithRecursiveSetTimeout() throws Exception {
    final String content = "<html>\n" + "<head>\n" + "  <title>test</title>\n" + "  <script>\n"
            + "    var threadID;\n" + "    function test() {\n" + "      alert('ping');\n"
            + "      threadID = setTimeout(test, 5);\n" + "    }\n" + "  </script>\n" + "</head>\n"
            + "<body onload='test()'>\n" + "</body>\n" + "</html>";

    final List<String> collectedAlerts = Collections.synchronizedList(new ArrayList<String>());
    final HtmlPage page = loadPage(content, collectedAlerts);
    final JavaScriptJobManager jobManager = page.getEnclosingWindow().getJobManager();
    assertNotNull(jobManager);

    // Not perfect, but 100 chances to start should be enough for a loaded system
    Thread.sleep(500);

    Assert.assertFalse("At least one alert should have fired by now", collectedAlerts.isEmpty());
    ((TopLevelWindow) page.getEnclosingWindow()).close();

    // 100 chances to stop
    jobManager.waitForJobs(500);

    final int finalValue = collectedAlerts.size();

    // 100 chances to fail
    jobManager.waitForJobs(500);

    Assert.assertEquals("No new alerts should have happened", finalValue, collectedAlerts.size());
}

From source file:op.care.med.inventory.PnlInventory.java

private void initPanel() {
    cpMap = Collections.synchronizedMap(new HashMap<String, CollapsiblePane>());
    cpListener = Collections.synchronizedMap(new HashMap<String, CollapsiblePaneAdapter>());
    lstInventories = Collections.synchronizedList(new ArrayList<MedInventory>());

    mapKey2ClosedToggleButton = Collections.synchronizedMap(new HashMap<String, JToggleButton>());
    color1 = SYSConst.yellow1;/* w w w  . j  av  a2s .  c o m*/
    color2 = SYSConst.greyscale;

    //        linemap = Collections.synchronizedMap(new HashMap<MedStockTransaction, JPanel>());
    prepareSearchArea();
}

From source file:org.apache.syncope.core.persistence.dao.impl.SubjectSearchDAOImpl.java

@Override
public <T extends AbstractSubject> boolean matches(final T subject, final SearchCond searchCondition,
        final SubjectType type) {

    List<Object> parameters = Collections.synchronizedList(new ArrayList<Object>());

    // 1. get the query string from the search condition
    SearchSupport svs = new SearchSupport(type);
    StringBuilder queryString = getQuery(searchCondition, parameters, type, svs);

    boolean matches;
    if (queryString.length() == 0) {
        // Could be empty: got into a role search with a single membership condition ...
        matches = false;//ww w.  jav a  2 s .  c  o  m
    } else {
        // 2. take into account the passed user
        queryString.insert(0, "SELECT u.subject_id FROM (");
        queryString.append(") u WHERE subject_id=?").append(setParameter(parameters, subject.getId()));

        // 3. prepare the search query
        Query query = entityManager.createNativeQuery(queryString.toString());

        // 4. populate the search query with parameter values
        fillWithParameters(query, parameters);

        // 5. executes query
        matches = !query.getResultList().isEmpty();
    }

    return matches;
}

From source file:org.apache.tajo.worker.Task.java

public void initPlan() throws IOException {
    plan = LogicalNodeDeserializer.deserialize(queryContext, request.getPlan());
    LogicalNode[] scanNode = PlannerUtil.findAllNodes(plan, NodeType.SCAN);
    if (scanNode != null) {
        for (LogicalNode node : scanNode) {
            ScanNode scan = (ScanNode) node;
            descs.put(scan.getCanonicalName(), scan.getTableDesc());
        }//  www . j  a v  a 2s.  co  m
    }

    LogicalNode[] partitionScanNode = PlannerUtil.findAllNodes(plan, NodeType.PARTITIONS_SCAN);
    if (partitionScanNode != null) {
        for (LogicalNode node : partitionScanNode) {
            PartitionedTableScanNode scan = (PartitionedTableScanNode) node;
            descs.put(scan.getCanonicalName(), scan.getTableDesc());
        }
    }

    interQuery = request.getProto().getInterQuery();
    if (interQuery) {
        context.setInterQuery();
        this.shuffleType = context.getDataChannel().getShuffleType();

        if (shuffleType == ShuffleType.RANGE_SHUFFLE) {
            SortNode sortNode = PlannerUtil.findTopNode(plan, NodeType.SORT);
            this.finalSchema = PlannerUtil.sortSpecsToSchema(sortNode.getSortKeys());
            this.sortComp = new BaseTupleComparator(finalSchema, sortNode.getSortKeys());
        }
    } else {
        Path outFilePath = ((FileStorageManager) StorageManager.getFileStorageManager(systemConf))
                .getAppenderFilePath(taskId, queryContext.getStagingDir());
        LOG.info("Output File Path: " + outFilePath);
        context.setOutputPath(outFilePath);
    }

    this.localChunks = Collections.synchronizedList(new ArrayList<FileChunk>());
    LOG.info("==================================");
    LOG.info("* Stage " + request.getId() + " is initialized");
    LOG.info("* InterQuery: " + interQuery + (interQuery ? ", Use " + this.shuffleType + " shuffle" : "")
            + ", Fragments (num: " + request.getFragments().size() + ")" + ", Fetches (total:"
            + request.getFetches().size() + ") :");

    if (LOG.isDebugEnabled()) {
        for (FetchImpl f : request.getFetches()) {
            LOG.debug("Table Id: " + f.getName() + ", Simple URIs: " + f.getSimpleURIs());
        }
    }
    LOG.info("* Local task dir: " + taskDir);
    if (LOG.isDebugEnabled()) {
        LOG.debug("* plan:\n");
        LOG.debug(plan.toString());
    }
    LOG.info("==================================");
}

From source file:org.apache.tajo.worker.LegacyTaskImpl.java

public void initPlan() throws IOException {
    plan = LogicalNodeDeserializer.deserialize(queryContext, context.getEvalContext(), request.getPlan());
    LogicalNode[] scanNode = PlannerUtil.findAllNodes(plan, NodeType.SCAN);
    if (scanNode != null) {
        for (LogicalNode node : scanNode) {
            ScanNode scan = (ScanNode) node;
            descs.put(scan.getCanonicalName(), scan.getTableDesc());
        }/*from  w  w w. ja  va2 s . co  m*/
    }

    LogicalNode[] partitionScanNode = PlannerUtil.findAllNodes(plan, NodeType.PARTITIONS_SCAN);
    if (partitionScanNode != null) {
        for (LogicalNode node : partitionScanNode) {
            PartitionedTableScanNode scan = (PartitionedTableScanNode) node;
            descs.put(scan.getCanonicalName(), scan.getTableDesc());
        }
    }

    interQuery = request.getProto().getInterQuery();
    if (interQuery) {
        context.setInterQuery();
        this.shuffleType = context.getDataChannel().getShuffleType();

        if (shuffleType == ShuffleType.RANGE_SHUFFLE) {
            SortNode sortNode = PlannerUtil.findTopNode(plan, NodeType.SORT);
            this.finalSchema = PlannerUtil.sortSpecsToSchema(sortNode.getSortKeys());
            this.sortComp = new BaseTupleComparator(finalSchema, sortNode.getSortKeys());
        }
    } else {
        Path outFilePath = ((FileTablespace) TablespaceManager.get(queryContext.getStagingDir().toUri()).get())
                .getAppenderFilePath(getId(), queryContext.getStagingDir());
        LOG.info("Output File Path: " + outFilePath);
        context.setOutputPath(outFilePath);
    }

    this.localChunks = Collections.synchronizedList(new ArrayList<FileChunk>());
    LOG.info("==================================");
    LOG.info("* Stage " + request.getId() + " is initialized");
    LOG.info("* InterQuery: " + interQuery + (interQuery ? ", Use " + this.shuffleType + " shuffle" : "")
            + ", Fragments (num: " + request.getFragments().size() + ")" + ", Fetches (total:"
            + request.getFetches().size() + ") :");

    if (LOG.isDebugEnabled()) {
        for (FetchImpl f : request.getFetches()) {
            LOG.debug("Table Id: " + f.getName() + ", Simple URIs: " + f.getSimpleURIs());
        }
    }
    LOG.info("* Local task dir: " + taskDir);
    if (LOG.isDebugEnabled()) {
        LOG.debug("* plan:\n");
        LOG.debug(plan.toString());
    }
    LOG.info("==================================");
}