List of usage examples for java.util Collections synchronizedMap
public static <K, V> Map<K, V> synchronizedMap(Map<K, V> m)
From source file:de.mrapp.android.util.multithreading.AbstractDataBinder.java
/** * Creates a new data binder, which uses a specifc executor service and cache. Caching is * enabled by default./* w w w . j a v a 2 s. c o m*/ * * @param context * The context, which should be used by the data binder, as an instance of the class * {@link Context}. The context may not be null * @param threadPool * The executor service, which should be used to manage asynchronous tasks, as an * instance of the type {@link ExecutorService}. The executor service may not be null * @param cache * The LRU cache, which should be used to cache already loaded data, as an instance of * the class LruCache. The cache may not be null */ public AbstractDataBinder(@NonNull final Context context, @NonNull final ExecutorService threadPool, @NonNull final LruCache<KeyType, DataType> cache) { ensureNotNull(context, "The context may not be null"); ensureNotNull(threadPool, "The executor service may not be null"); ensureNotNull(cache, "The cache may not be null"); this.context = context; this.logger = new Logger(LogLevel.INFO); this.listeners = new LinkedHashSet<>(); this.cache = cache; this.views = Collections.synchronizedMap(new WeakHashMap<ViewType, KeyType>()); this.threadPool = threadPool; this.cancelLock = new Object(); this.canceled = false; this.useCache = true; }
From source file:com.netscape.ca.CertificateAuthority.java
public Map<Object, Long> getNonces(HttpServletRequest request, String name) { // Create a new session or use an existing one. HttpSession session = request.getSession(true); if (session == null) { throw new PKIException("Unable to create session."); }/*from w ww.ja va 2 s. com*/ // Lock the session to prevent concurrent access. // http://yet-another-dev.blogspot.com/2009/08/synchronizing-httpsession.html Object lock = request.getSession().getId().intern(); synchronized (lock) { // Find the existing storage in the session. @SuppressWarnings("unchecked") Map<Object, Long> nonces = (Map<Object, Long>) session.getAttribute("nonces-" + name); if (nonces == null) { // If not present, create a new storage. nonces = Collections.synchronizedMap(new Nonces(mMaxNonces)); // Put the storage in the session. session.setAttribute("nonces-" + name, nonces); } return nonces; } }
From source file:org.apache.axiom.om.util.StAXUtils.java
/** * @return XMLInputFactory for the current classloader *//*www . j a v a 2s . c o m*/ private static XMLInputFactory getXMLInputFactory_perClassLoader(StAXParserConfiguration configuration) { ClassLoader cl = getContextClassLoader(); XMLInputFactory factory; if (cl == null) { factory = getXMLInputFactory_singleton(configuration); } else { // Check the cache if (configuration == null) { configuration = StAXParserConfiguration.DEFAULT; } Map map = (Map) inputFactoryPerCLMap.get(configuration); if (map == null) { map = Collections.synchronizedMap(new WeakHashMap()); inputFactoryPerCLMap.put(configuration, map); factory = null; } else { factory = (XMLInputFactory) map.get(cl); } // If not found in the cache map, crate a new factory if (factory == null) { if (log.isDebugEnabled()) { log.debug("About to create XMLInputFactory implementation with " + "classloader=" + cl); log.debug("The classloader for javax.xml.stream.XMLInputFactory is: " + XMLInputFactory.class.getClassLoader()); } try { factory = newXMLInputFactory(null, configuration); } catch (ClassCastException cce) { if (log.isDebugEnabled()) { log.debug("Failed creation of XMLInputFactory implementation with " + "classloader=" + cl); log.debug("Exception is=" + cce); log.debug("Attempting with classloader: " + XMLInputFactory.class.getClassLoader()); } factory = newXMLInputFactory(XMLInputFactory.class.getClassLoader(), configuration); } if (factory != null) { // Cache the new factory map.put(cl, factory); if (log.isDebugEnabled()) { log.debug("Created XMLInputFactory = " + factory.getClass() + " with classloader=" + cl); log.debug("Configuration = " + configuration); log.debug("Size of XMLInputFactory map for this configuration = " + map.size()); log.debug("Configurations for which factories have been cached = " + inputFactoryPerCLMap.keySet()); } } else { factory = getXMLInputFactory_singleton(configuration); } } } return factory; }
From source file:org.geoserver.catalog.ResourcePool.java
/** * Get Connect params./*from w w w . jav a2 s. co m*/ * * <p> * This is used to smooth any relative path kind of issues for any file * URLS or directory. This code should be expanded to deal with any other context * sensitve isses dataStores tend to have. * </p> * * @return DOCUMENT ME! * * @task REVISIT: cache these? */ public static Map getParams(Map m, String baseDir) { Map params = Collections.synchronizedMap(new HashMap(m)); for (Iterator i = params.entrySet().iterator(); i.hasNext();) { Map.Entry entry = (Map.Entry) i.next(); String key = (String) entry.getKey(); Object value = entry.getValue(); //TODO: this code is a pretty big hack, using the name to // determine if the key is a url, could be named something else // and still be a url if ((key != null) && key.matches(".* *url") && value instanceof String) { String path = (String) value; if (path.startsWith("file:")) { File fixedPath = GeoserverDataDirectory.findDataFile(path); entry.setValue(DataUtilities.fileToURL(fixedPath).toExternalForm()); } } else if (value instanceof URL && ((URL) value).getProtocol().equals("file")) { File fixedPath = GeoserverDataDirectory.findDataFile(((URL) value).toString()); entry.setValue(DataUtilities.fileToURL(fixedPath)); } else if ((key != null) && key.equals("directory") && value instanceof String) { String path = (String) value; //if a url is used for a directory (for example property store), convert it to path if (path.startsWith("file:")) { File fixedPath = GeoserverDataDirectory.findDataFile((String) value); entry.setValue(fixedPath.toString()); } } } return params; }
From source file:org.hyperledger.fabric.sdk.Channel.java
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { in.defaultReadObject();/*from w ww . j av a 2s.c o m*/ toString = "Channel{id: " + config.getNextID() + ", name: " + name + "}"; initialized = false; lastChaincodeUpgradeEventBlock = 0; shutdown = false; msps = new HashMap<>(); txListeners = new LinkedHashMap<>(); channelEventQue = new ChannelEventQue(); blockListeners = new LinkedHashMap<>(); peerEndpointMap = Collections.synchronizedMap(new HashMap<>()); setSDPeerAddition(new SDOPeerDefaultAddition(getServiceDiscoveryProperties())); // sdOrdererAddition = DEFAULT_ORDERER_ADDITION; endorsementSelector = ServiceDiscovery.DEFAULT_ENDORSEMENT_SELECTION; chainCodeListeners = new LinkedHashMap<>(); for (Peer peer : peers) { peerEndpointMap.put(peer.getEndpoint(), peer); } ordererEndpointMap = Collections.synchronizedMap(new HashMap<>()); for (Orderer orderer : orderers) { ordererEndpointMap.put(orderer.getEndpoint(), orderer); } for (EventHub eventHub : getEventHubs()) { eventHub.setEventQue(channelEventQue); } }
From source file:HSqlPrimerDesign.java
@SuppressWarnings("Duplicates") public static void locations(Connection connection) throws ClassNotFoundException, SQLException, InstantiationException, IllegalAccessException, IOException { long time = System.nanoTime(); String base = new File("").getAbsolutePath(); DpalLoad.main(new String[0]); Dpal_Inst = DpalLoad.INSTANCE_WIN64; System.out.println(Dpal_Inst); Connection db = connection;//from www . j ava2 s .c o m db.setAutoCommit(false); Statement stat = db.createStatement(); PrintWriter log = new PrintWriter(new File("javalog.log")); stat.execute("SET FILES LOG FALSE;"); PreparedStatement st = db.prepareStatement("INSERT INTO Primerdb.MatchedPrimers(" + "Primer, PrimerMatch, Comp,FragAVG,FragVAR,H2SD,L2SD, Cluster, Strain)" + "Values(?,?,?,?,?,?,?,?,?)"); ResultSet call = stat.executeQuery("Select * From Primerdb.Phages;"); List<String[]> phages = new ArrayList<>(); while (call.next()) { String[] r = new String[3]; r[0] = call.getString("Strain"); r[1] = call.getString("Cluster"); r[2] = call.getString("Name"); phages.add(r); // if(strain.equals("-myco")) { // if (r[2].equals("xkcd")) { // strain = r[0]; // } // }else if(strain.equals("-arthro")){ // if (r[2].equals("ArV1")) { // strain = r[0]; // } // } } call.close(); Set<String> strains = phages.stream().map(y -> y[0]).collect(Collectors.toSet()); for (String x : strains) { Set<String> clust = phages.stream().filter(y -> y[0].equals(x)).map(y -> y[1]) .collect(Collectors.toSet()); String[] clusters = clust.toArray(new String[clust.size()]); // String z ="A1"; for (String z : clusters) { System.out.println("Starting:" + z); List<Primer> primers = new ArrayList<>(); Set<Matches> matched = new HashSet<>(); Set<String> clustphage = phages.stream().filter(a -> a[0].equals(x) && a[1].equals(z)) .map(a -> a[2]).collect(Collectors.toSet()); String[] clustphages = clustphage.toArray(new String[clustphage.size()]); if (clustphages.length > 1) { try { ResultSet resultSet = stat .executeQuery("Select * from primerdb.primers" + " where Strain ='" + x + "' and Cluster ='" + z + "' and UniqueP = true" + " and Hairpin = false"); while (resultSet.next()) { Primer primer = new Primer(resultSet.getString("Sequence")); primer.setTm(resultSet.getDouble("Tm")); primers.add(primer); } resultSet.close(); } catch (SQLException e) { e.printStackTrace(); System.out.println("Error occurred at " + x + " " + z); } System.out.println(primers.size()); Set<Primer> primerlist2 = primers.stream().collect(Collectors.toSet()); Primer[] primers2 = primerlist2.toArray(new Primer[primerlist2.size()]); Map<String, Map<CharSequence, List<Integer>>> locations = Collections .synchronizedMap(new HashMap<>()); clustphage.stream().forEach(phage -> { String[] seqs = Fasta.parse(base + "/Fastas/" + phage + ".fasta"); String sequence = seqs[0] + seqs[1]; Map<String, List<Integer>> seqInd = new HashMap<>(); for (int i = 0; i <= sequence.length() - 10; i++) { String sub = sequence.substring(i, i + 10); if (seqInd.containsKey(sub)) { seqInd.get(sub).add(i); } else { List<Integer> list = new ArrayList<>(); list.add(i); seqInd.put(sub, list); } } Map<CharSequence, List<Integer>> alllocs = new HashMap<>(); for (Primer primer : primers2) { List<Integer> locs = new ArrayList<>(); String sequence1 = primer.getSequence(); String frag = sequence1.substring(0, 10); List<Integer> integers = seqInd.get(frag); if (integers != null) { for (Integer i : integers) { if ((sequence1.length() + i) < sequence.length() && sequence.substring(i, sequence1.length() + i).equals(sequence1)) { locs.add(i); } } } alllocs.put(sequence1, locs); } locations.put(phage, alllocs); }); System.out.println("locations found"); System.out.println((System.nanoTime() - time) / Math.pow(10, 9) / 60.0); final int[] k = new int[] { 0 }; primerlist2.parallelStream().forEach(a -> { int matches = 0; int i = 0; while (primers2[i] != a) { i++; } for (int j = i + 1; j < primers2.length; j++) { double[] frags = new double[clustphages.length]; int phageCounter = 0; Primer b = primers2[j]; boolean match = true; if (matches > 0) { break; } if (Math.abs(a.getTm() - b.getTm()) > 5.0 || a.getSequence().equals(b.getSequence())) { continue; } for (String phage : clustphages) { List<Integer> loc1 = locations.get(phage).get(a.getSequence()); List<Integer> loc2 = locations.get(phage).get(b.getSequence()); // if(loc1.size()==0){ // System.out.println(phage+" "+a.getSequence()); // } if (loc1.size() == 0 || loc2.size() == 0) { // if (loc1.size()!=1||loc2.size()!=1){ match = false; break; } boolean found = false; int fragCount = 0; int l1 = loc1.get(0); int l2 = loc2.get(0); int count1 = 0; int count2 = 0; int frag = Math.abs(l1 - l2); while (!found) { if (frag >= 500 && frag <= 2000) { fragCount++; if (++count1 < loc1.size()) l1 = loc1.get(count1); else if (++count2 < loc2.size()) l2 = loc2.get(count2); } else if (l1 < l2 && frag < 500) { count2++; } else if (l1 > l2 && frag < 500) { count1++; } else if (l1 > l2 && frag > 2000) { count2++; } else if (l1 < l2 && frag > 2000) { count1++; } else { break; } if (count1 < loc1.size() && count2 < loc2.size()) { l1 = loc1.get(count1); l2 = loc2.get(count2); frag = Math.abs(l1 - l2); } else { if (fragCount == 1) { found = true; frags[phageCounter++] = frag + 0.0; } else { break; } } } if (!found) { match = false; break; } } if (match) { matches++; matched.add(new Matches(a, b, frags)); } } // k[0]++; // System.out.println(k[0]); }); System.out.println((System.nanoTime() - time) / Math.pow(10, 9) / 60.0); System.out.println("Primers matched"); int c = 0; int i = 0; try { for (Matches primerkey : matched) { c++; String primer1 = primerkey.one.getSequence(); String primer2 = primerkey.two.getSequence(); st.setString(1, primer1); st.setString(2, primer2); st.setDouble(3, complementarity(primer1, primer2, Dpal_Inst)); st.setDouble(4, primerkey.stats.getMean()); st.setDouble(5, primerkey.stats.getVariance()); st.setDouble(6, primerkey.stats.getMean() + 2 * primerkey.stats.getStandardDeviation()); st.setDouble(7, primerkey.stats.getMean() - 2 * primerkey.stats.getStandardDeviation()); st.setString(8, z); st.setString(9, x); st.addBatch(); i++; if (i == 1000) { i = 0; st.executeBatch(); db.commit(); } } if (i > 0) { st.executeBatch(); db.commit(); } } catch (SQLException e) { e.printStackTrace(); System.out.println("Error occurred at " + x + " " + z); } System.out.println(c); } log.println(z); log.flush(); System.gc(); } } stat.execute("SET FILES LOG TRUE;"); st.close(); stat.close(); System.out.println("Matches Submitted"); }
From source file:org.jasig.portal.groups.filesystem.FileSystemGroupStore.java
/** * *//*from w ww . ja v a2s . c o m*/ protected void initialize(GroupServiceConfiguration cfg) { cache = Collections.synchronizedMap(new HashMap()); goodSeparator = File.separatorChar; badSeparator = (goodSeparator == FORWARD_SLASH) ? BACK_SLASH : FORWARD_SLASH; defaultEntityType = org.jasig.portal.security.IPerson.class; GroupServiceConfiguration config = cfg; if (config == null) { try { config = GroupServiceConfiguration.getConfiguration(); } catch (Exception ex) { throw new RuntimeException(ex); } } String sep = config.getNodeSeparator(); if (sep != null) { String period = String.valueOf(PERIOD); useSubstitutePeriod = sep.equals(period); } }
From source file:net.sf.ehcache.store.DiskStore.java
private Map swapSpoolReference() { Map copyOfSpool = null;/* w ww.ja v a2 s . com*/ synchronized (spoolLock) { // Copy the reference of the old spool, not the contents. Avoid potential spike in memory usage copyOfSpool = spool; // use a new map making the reference swap above SAFE spool = Collections.synchronizedMap(new HashMap()); } return copyOfSpool; }
From source file:org.apereo.portal.groups.filesystem.FileSystemGroupStore.java
/** * *///from w ww . j a va 2 s . c o m protected void initialize(GroupServiceConfiguration cfg) { cache = Collections.synchronizedMap(new HashMap()); goodSeparator = File.separatorChar; badSeparator = (goodSeparator == FORWARD_SLASH) ? BACK_SLASH : FORWARD_SLASH; defaultEntityType = IPerson.class; GroupServiceConfiguration config = cfg; if (config == null) { try { config = GroupServiceConfiguration.getConfiguration(); } catch (Exception ex) { throw new RuntimeException(ex); } } String sep = config.getNodeSeparator(); if (sep != null) { String period = String.valueOf(PERIOD); useSubstitutePeriod = sep.equals(period); } }
From source file:net.minecraftforge.fml.client.FMLClientHandler.java
public void setupServerList() { extraServerListData = Collections.synchronizedMap(Maps.<ServerStatusResponse, JsonObject>newHashMap()); serverDataTag = Collections.synchronizedMap(Maps.<ServerData, ExtendedServerListData>newHashMap()); }