Example usage for java.util Collections synchronizedMap

List of usage examples for java.util Collections synchronizedMap

Introduction

In this page you can find the example usage for java.util Collections synchronizedMap.

Prototype

public static <K, V> Map<K, V> synchronizedMap(Map<K, V> m) 

Source Link

Document

Returns a synchronized (thread-safe) map backed by the specified map.

Usage

From source file:org.kuali.rice.kns.service.impl.SessionDocumentServiceImpl.java

@Override
@SuppressWarnings("unchecked")
public void afterPropertiesSet() throws Exception {
    cachedObjects = Collections.synchronizedMap(new KualiLRUMap(maxCacheSize));
}

From source file:annis.gui.resultview.ResultViewPanel.java

public ResultViewPanel(SearchUI ui, PluginSystem ps, InstanceConfig instanceConfig,
        PagedResultQuery initialQuery) {
    this.sui = ui;
    this.tokenAnnoVisible = new TreeMap<>();
    this.ps = ps;
    this.controller = ui.getQueryController();
    this.selectedSegmentationLayer = ui.getQueryState().getBaseText().getValue();
    this.initialQuery = initialQuery;

    cacheResolver = Collections
            .synchronizedMap(new HashMap<HashSet<SingleResolverRequest>, List<ResolverEntry>>());

    resultPanelList = Collections.synchronizedList(new LinkedList<SingleResultPanel>());

    resultLayout = new CssLayout();
    resultLayout.addStyleName("result-view-css");
    Panel resultPanel = new Panel(resultLayout);
    resultPanel.setSizeFull();//from  w ww . jav  a  2s.  c om
    resultPanel.addStyleName(ChameleonTheme.PANEL_BORDERLESS);
    resultPanel.addStyleName("result-view-panel");

    this.instanceConfig = instanceConfig;

    setSizeFull();
    setMargin(false);

    MenuBar mbResult = new MenuBar();
    mbResult.setWidth("100%");
    mbResult.addStyleName("menu-hover");
    addComponent(mbResult);

    miSegmentation = mbResult.addItem("Base text", null);
    miTokAnnos = mbResult.addItem("Token Annotations", null);

    addComponent(resultPanel);

    setExpandRatio(mbResult, 0.0f);
    setExpandRatio(resultPanel, 1.0f);

    paging = new PagingComponent();

    addComponent(paging, 1);

    setComponentAlignment(paging, Alignment.TOP_CENTER);
    setExpandRatio(paging, 0.0f);
}

From source file:org.wings.externalizer.AbstractExternalizeManager.java

public AbstractExternalizeManager() {
    if (LOG.isDebugEnabled()) {
        LOG.debug("Externalizer scope using prefix" + prefix + "expires in " + FINAL_EXPIRES + " seconds ");
    }//from  ww  w  .  ja va  2 s.c om

    reverseExternalized = Collections.synchronizedMap(new HashMap<ExternalizedResource, String>());
    setPrefix(StringUtil.toShortestAlphaNumericString(PREFIX_TIMESLICE, 2));
}

From source file:ubc.pavlab.gotrack.beans.Cache.java

@PostConstruct
public void init() {
    // You can do here your initialization thing based on managed properties, if necessary.
    log.info("Cache init");

    applicationLevelDataCache = Collections.synchronizedMap(applicationLevelDataCache);

    log.info("Used Memory: "
            + (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / 1000000 + " MB");

    // Obtain SpeciesDAO.
    SpeciesDAO speciesDAO = daoFactoryBean.getGotrack().getSpeciesDAO();
    log.info("SpeciesDAO successfully obtained: " + speciesDAO);

    speciesList = speciesDAO.list();//from   w w  w .j  a va 2 s .  co m

    log.info("Species List successfully obtained: " + speciesList);

    // Obtain CacheDAO.
    CacheDAO cacheDAO = daoFactoryBean.getGotrack().getCacheDAO();
    log.info("CacheDAO successfully obtained: " + cacheDAO);

    evidenceCodeCategories = cacheDAO.getEvidenceCategories();

    // System.gc();();
    log.info("Used Memory: "
            + (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / 1000000 + " MB");

    goSetSizes = cacheDAO.getGOSizes();

    log.info("GO Set sizes successfully obtained");

    // System.gc();
    log.info("Used Memory: "
            + (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / 1000000 + " MB");

    List<Integer> eds = new ArrayList<Integer>(cacheDAO.getGOEditions());
    // List<List<Integer>> edPartitions = Lists.partition( eds, 20 );
    // int cnt = 0;
    // for ( List<Integer> list : edPartitions ) {
    // Map<Integer, Set<Relationship>> tmp = cacheDAO.getOntologies( list );
    // log.info( "GO Ontologies Retrieved: " + tmp.size() );
    // for ( Entry<Integer, Set<Relationship>> relsEntry : tmp.entrySet() ) {
    // ontologies.put( relsEntry.getKey(), new GeneOntology( relsEntry.getValue() ) );
    // cnt++;
    // relsEntry.getValue().clear();
    //
    // }
    // // System.gc();
    // log.info( "GO Ontologies Loaded: " + cnt + "/" + eds.size() );
    // log.info( "Used Memory: " + ( Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory() )
    // / 1000000 + " MB" );
    //
    // }

    ontologies = cacheDAO.getOntologies();

    System.gc();
    log.info("GO Ontologies Loaded: " + ontologies.keySet().size() + "/" + eds.size());
    log.info("Used Memory: "
            + (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / 1000000 + " MB");

    aggregates = cacheDAO.getAggregates();

    log.info("Aggregates successfully obtained");

    log.info("Used Memory: "
            + (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / 1000000 + " MB");

    // Create speciesAverage view
    for (Species species : speciesList) {
        Integer speciesId = species.getId();
        Map<Edition, StatsEntry> a = aggregates.get(speciesId);
        HashMap<Edition, Double> sa = new HashMap<Edition, Double>();
        if (a != null) {
            for (Entry<Edition, StatsEntry> editionEntry : a.entrySet()) {
                sa.put(editionEntry.getKey(), editionEntry.getValue().getAverageDirects());
            }
        }
        speciesAverage.put(speciesId, sa);
    }

    log.info("speciesAverages successfully computed");

    log.info("Used Memory: "
            + (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / 1000000 + " MB");

    genePopulations = cacheDAO.getPopulations();

    log.info("gene populations successfully obtained");

    log.info("Used Memory: "
            + (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / 1000000 + " MB");

    // currentEditions = cacheDAO.getCurrentEditions();
    // log.debug( "Current Editions Size: " + currentEditions.size() );

    allEditions = cacheDAO.getAllEditions();
    log.debug("All Editions Size: " + allEditions.size());

    for (Integer species : allEditions.keySet()) {
        List<Edition> l = allEditions.get(species);
        Collections.sort(l);
        Edition ed = Iterables.getLast(l, null);
        log.debug("Current edition for species_id (" + species + "): " + ed);
        currentEditions.put(species, ed);
    }

    // log.info( "Loading accession to geneSymbol cache..." );
    // for ( Species species : speciesList ) {
    // Integer speciesId = species.getId();
    // Edition currEd = currentEditions.get( speciesId );
    //
    // if ( currEd == null ) continue;
    // log.debug( species.getCommonName() + ": " + currEd.toString() );
    // // get current accessions
    // Map<String, Accession> currAccMap = cacheDAO.getAccessions( speciesId, currEd.getEdition() );
    //
    // // Create Map of current genes
    // Map<String, Gene> currentGenes = new HashMap<>();
    //
    // for ( Accession acc : currAccMap.values() ) {
    // String symbol = acc.getSymbol();
    // Gene gene = currentGenes.get( symbol.toUpperCase() );
    // if ( gene == null ) {
    // gene = new Gene( symbol );
    // currentGenes.put( symbol.toUpperCase(), gene );
    // }
    // gene.getAccessions().add( acc );
    // gene.getSynonyms().addAll( acc.getSynonyms() );
    //
    // }
    //
    // speciesToCurrentGenes.put( speciesId, currentGenes );
    //
    // log.info( "Done loading accession to geneSymbol for species (" + speciesId + "), size: "
    // + currAccMap.size() + " unique symbols: " + currentGenes.size() );
    // }
    // log.info( "Done loading accession to geneSymbol cache..." );

    speciesToCurrentGenes = cacheDAO.getCurrentGenes();
    log.info("Done loading current genes...");

    for (Species species : speciesList) {
        if (speciesToCurrentGenes.keySet().contains(species.getId())) {
            log.info("Current gene size for species (" + species + "): "
                    + speciesToCurrentGenes.get(species.getId()).size());
        }
    }

    log.info("Used Memory: "
            + (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / 1000000 + " MB");

    log.info("Cache Completed");

}

From source file:eu.itesla_project.modules.RunSecurityAnalysisTool.java

@Override
public void run(CommandLine line) throws Exception {
    OfflineConfig config = OfflineConfig.load();
    String caseFormat = line.getOptionValue("case-format");
    Path caseDir = Paths.get(line.getOptionValue("case-dir"));
    String caseBaseName = line.getOptionValue("case-basename");
    Path outputCsvFile = Paths.get(line.getOptionValue("output-csv-file"));
    boolean detailed = line.hasOption("detailed");

    ContingenciesAndActionsDatabaseClient contingencyDb = config.getContingencyDbClientFactoryClass()
            .newInstance().create();/* w w  w .  j  a va 2s .  c  o m*/
    LoadFlowFactory loadFlowFactory = config.getLoadFlowFactoryClass().newInstance();

    try (ComputationManager computationManager = new LocalComputationManager()) {

        Importer importer = Importers.getImporter(caseFormat, computationManager);
        if (importer == null) {
            throw new RuntimeException("Format " + caseFormat + " not supported");
        }

        Map<String, Map<String, List<LimitViolation>>> statusPerContingencyPerCase = Collections
                .synchronizedMap(new TreeMap<>());

        Set<String> contingencyIds = Collections.synchronizedSet(new LinkedHashSet<>());

        if (caseBaseName != null) {
            System.out.println("loading case " + caseBaseName + " ...");

            // load the network
            Network network = importer.import_(new GenericReadOnlyDataSource(caseDir, caseBaseName),
                    new Properties());

            List<Contingency> contingencies = contingencyDb.getContingencies(network);
            contingencyIds.addAll(contingencies.stream().map(Contingency::getId).collect(Collectors.toList()));

            StaticSecurityAnalysis securityAnalysis = new StaticSecurityAnalysis(network, loadFlowFactory,
                    computationManager);

            statusPerContingencyPerCase.put(caseBaseName, securityAnalysis.run(contingencies));
        } else {
            Importers.importAll(caseDir, importer, true, network -> {
                try {
                    List<Contingency> contingencies = contingencyDb.getContingencies(network);
                    contingencyIds.addAll(
                            contingencies.stream().map(Contingency::getId).collect(Collectors.toList()));

                    StaticSecurityAnalysis securityAnalysis = new StaticSecurityAnalysis(network,
                            loadFlowFactory, computationManager);

                    statusPerContingencyPerCase.put(network.getId(), securityAnalysis.run(contingencies));
                } catch (Exception e) {
                    LOGGER.error(e.toString(), e);
                }
            }, dataSource -> System.out.println("loading case " + dataSource.getBaseName() + " ..."));
        }

        try (BufferedWriter writer = Files.newBufferedWriter(outputCsvFile, StandardCharsets.UTF_8)) {
            writer.write("base case");
            for (String contingencyId : contingencyIds) {
                writer.write(CSV_SEPARATOR);
                writer.write(contingencyId);
            }
            writer.newLine();

            for (Map.Entry<String, Map<String, List<LimitViolation>>> e : statusPerContingencyPerCase
                    .entrySet()) {
                String baseCaseName = e.getKey();
                Map<String, List<LimitViolation>> statusPerContingency = e.getValue();
                writer.write(baseCaseName);
                for (String contingencyId : contingencyIds) {
                    List<LimitViolation> violations = statusPerContingency.get(contingencyId);
                    writer.write(CSV_SEPARATOR);
                    writer.write(toString(violations, detailed));
                }
                writer.newLine();
            }
        }
    }
}

From source file:org.kuali.kra.rules.ErrorReporter.java

private Map<String, Collection<SoftError>> initializeSoftErrorMap() {
    Map<String, Collection<SoftError>> softErrorMap = Collections
            .synchronizedMap(new HashMap<String, Collection<SoftError>>() {
                private static final long serialVersionUID = 709850431504932842L;

                @Override/*from w  w w.  j  a v  a  2 s  . c  o  m*/
                public Collection<SoftError> get(Object key) {
                    return super.remove(key);
                }

            });
    GlobalVariables.getUserSession().addObject(KeyConstants.SOFT_ERRORS_KEY, softErrorMap);
    return softErrorMap;
}

From source file:com.offbynull.portmapper.upnpigd.UpnpIgdDiscovery.java

private static Set<UpnpIgdDevice> scanForDevices(InetSocketAddress multicastSocketAddress,
        Set<InetAddress> localAddresses, String searchQuery) throws IOException, InterruptedException {

    final Set<UpnpIgdDevice> ret = Collections.synchronizedSet(new HashSet<UpnpIgdDevice>());
    final Map<Channel, InetAddress> bindMap = Collections.synchronizedMap(new HashMap<Channel, InetAddress>());

    UdpCommunicatorListener listener = new UdpCommunicatorListener() {

        @Override/*w  ww. j av a2 s . co  m*/
        public void incomingPacket(InetSocketAddress sourceAddress, DatagramChannel channel,
                ByteBuffer packet) {
            byte[] inPacket = ByteBufferUtils.copyContentsToArray(packet);

            String inStr;
            try {
                inStr = new String(inPacket, 0, inPacket.length, "US-ASCII");
            } catch (UnsupportedEncodingException uee) {
                return;
            }

            Matcher matcher;

            URI url;
            if ((matcher = LOCATION_PATTERN.matcher(inStr)).find()) {
                String urlStr = matcher.group(1);
                try {
                    url = new URI(urlStr);
                } catch (URISyntaxException urise) {
                    return;
                }
            } else {
                return;
            }

            String name = null;
            if ((matcher = SERVER_PATTERN.matcher(inStr)).find()) {
                name = matcher.group(1);
            }

            InetAddress localAddress = bindMap.get(channel);

            UpnpIgdDevice device = new UpnpIgdDevice(localAddress, sourceAddress.getAddress(), name, url);
            ret.add(device);
        }
    };

    UdpCommunicator comm = null;
    try {
        List<DatagramChannel> channels = new ArrayList<>();

        for (InetAddress localAddr : localAddresses) {
            DatagramChannel channel = DatagramChannel.open();
            channel.configureBlocking(false);
            channel.bind(new InetSocketAddress(localAddr, 0));
            channels.add(channel);

            bindMap.put(channel, localAddr);
        }

        comm = new UdpCommunicator(channels);
        comm.startAsync().awaitRunning();
        comm.addListener(listener);

        ByteBuffer searchQueryBuffer = ByteBuffer.wrap(searchQuery.getBytes("US-ASCII")).asReadOnlyBuffer();
        for (int i = 0; i < 3; i++) {
            for (DatagramChannel channel : channels) {
                comm.send(channel, multicastSocketAddress, searchQueryBuffer.asReadOnlyBuffer());
            }

            Thread.sleep(TimeUnit.SECONDS.toMillis(MAX_WAIT + 1));
        }

        return new HashSet<>(ret);
    } finally {
        if (comm != null) {
            try {
                comm.stopAsync().awaitTerminated(); // this stop should handle closing all the datagram channels
            } catch (IllegalStateException ise) { // NOPMD
                // do nothing
            }
        }
    }
}

From source file:eu.itesla_project.security.RunSecurityAnalysisTool.java

@Override
public void run(CommandLine line) throws Exception {
    ComponentDefaultConfig config = new ComponentDefaultConfig();
    String caseFormat = line.getOptionValue("case-format");
    Path caseDir = Paths.get(line.getOptionValue("case-dir"));
    String caseBaseName = line.getOptionValue("case-basename");
    Path outputCsvFile = Paths.get(line.getOptionValue("output-csv-file"));
    boolean detailed = line.hasOption("detailed");

    ContingenciesProvider contingencyProvider = config.findFactoryImplClass(ContingenciesProviderFactory.class)
            .newInstance().create();/*from   w w w.j av a 2s .  co m*/
    LoadFlowFactory loadFlowFactory = config.findFactoryImplClass(LoadFlowFactory.class).newInstance();

    try (ComputationManager computationManager = new LocalComputationManager()) {

        Importer importer = Importers.getImporter(caseFormat, computationManager);
        if (importer == null) {
            throw new RuntimeException("Format " + caseFormat + " not supported");
        }

        Map<String, Map<String, List<LimitViolation>>> statusPerContingencyPerCase = Collections
                .synchronizedMap(new TreeMap<>());

        Set<String> contingencyIds = Collections.synchronizedSet(new LinkedHashSet<>());

        if (caseBaseName != null) {
            System.out.println("loading case " + caseBaseName + " ...");

            // load the network
            Network network = importer.import_(new GenericReadOnlyDataSource(caseDir, caseBaseName),
                    new Properties());

            List<Contingency> contingencies = contingencyProvider.getContingencies(network);
            contingencyIds.addAll(contingencies.stream().map(Contingency::getId).collect(Collectors.toList()));

            StaticSecurityAnalysis securityAnalysis = new StaticSecurityAnalysis(network, loadFlowFactory,
                    computationManager);

            statusPerContingencyPerCase.put(caseBaseName, securityAnalysis.run(contingencies));
        } else {
            Importers.importAll(caseDir, importer, true, network -> {
                try {
                    List<Contingency> contingencies = contingencyProvider.getContingencies(network);
                    contingencyIds.addAll(
                            contingencies.stream().map(Contingency::getId).collect(Collectors.toList()));

                    StaticSecurityAnalysis securityAnalysis = new StaticSecurityAnalysis(network,
                            loadFlowFactory, computationManager);

                    statusPerContingencyPerCase.put(network.getId(), securityAnalysis.run(contingencies));
                } catch (Exception e) {
                    LOGGER.error(e.toString(), e);
                }
            }, dataSource -> System.out.println("loading case " + dataSource.getBaseName() + " ..."));
        }

        try (BufferedWriter writer = Files.newBufferedWriter(outputCsvFile, StandardCharsets.UTF_8)) {
            writer.write("base case");
            for (String contingencyId : contingencyIds) {
                writer.write(CSV_SEPARATOR);
                writer.write(contingencyId);
            }
            writer.newLine();

            for (Map.Entry<String, Map<String, List<LimitViolation>>> e : statusPerContingencyPerCase
                    .entrySet()) {
                String baseCaseName = e.getKey();
                Map<String, List<LimitViolation>> statusPerContingency = e.getValue();
                writer.write(baseCaseName);
                for (String contingencyId : contingencyIds) {
                    List<LimitViolation> violations = statusPerContingency.get(contingencyId);
                    writer.write(CSV_SEPARATOR);
                    writer.write(toString(violations, detailed));
                }
                writer.newLine();
            }
        }
    }
}

From source file:esg.node.core.AbstractDataNodeComponent.java

public AbstractDataNodeComponent(String name) {
    this.myName = name;
    this.esgListeners = Collections.synchronizedList(new ArrayList<ESGListener>());
    this.esgQueueListenersMap = Collections.synchronizedMap(new HashMap<String, ESGQueueListener>());
    this.eventQueue = new ESGQueue(this);
}

From source file:org.apache.jackrabbit.core.query.lucene.CachingIndexReader.java

/**
 * Creates a new <code>CachingIndexReader</code> based on
 * <code>delegatee</code>/*from www.  j a  v a2 s  .  co  m*/
 *
 * @param delegatee the base <code>IndexReader</code>.
 * @param cache     a document number cache, or <code>null</code> if not
 *                  available to this reader.
 * @param initCache if the parent caches should be initialized
 *                  when this index reader is constructed.
 * @throws IOException if an error occurs while reading from the index.
 */
@SuppressWarnings("unchecked")
CachingIndexReader(IndexReader delegatee, DocNumberCache cache, boolean initCache) throws IOException {
    super(delegatee);
    this.cache = cache;
    this.inSegmentParents = new int[delegatee.maxDoc()];
    Arrays.fill(this.inSegmentParents, -1);
    this.shareableNodes = initShareableNodes(delegatee);
    this.cacheInitializer = new CacheInitializer(delegatee);
    if (initCache) {
        cacheInitializer.run();
    }
    // limit cache to 1% of maxDoc(), but at least 10.
    this.docNumber2id = Collections.synchronizedMap(new LRUMap(Math.max(10, delegatee.maxDoc() / 100)));
    this.termDocsCache = new TermDocsCache(delegatee, FieldNames.PROPERTIES);
}