List of usage examples for java.util Collections synchronizedMap
public static <K, V> Map<K, V> synchronizedMap(Map<K, V> m)
From source file:org.pentaho.platform.engine.services.runtime.RuntimeContext.java
@SuppressWarnings({ "unchecked" }) protected static Map getComponentClassMap() { if (RuntimeContext.componentClassMap == null) { RuntimeContext.componentClassMap = Collections .synchronizedMap(RuntimeContext.createComponentClassMap()); }//w ww.jav a 2 s .com return RuntimeContext.componentClassMap; }
From source file:org.dancres.blitz.jini.lockmgr.DistributedLockManager.java
/** * Set the channel state. We do nothing here. *///from w w w .j a v a 2 s. c o m public void setState(byte[] state) { System.out.println("SetState:" + id); if (state == null) { System.out.println("Not found state"); return; } try { ByteArrayInputStream myStream = new ByteArrayInputStream(state); ObjectInputStream myOIS = new ObjectInputStream(myStream); preparedLocks = (Map) myOIS.readObject(); preparedReleases = (Map) myOIS.readObject(); heldLocks = (Map) myOIS.readObject(); preparedNews = (Map) myOIS.readObject(); preparedDeletes = (Map) myOIS.readObject(); heldResources = Collections.synchronizedMap((Map) myOIS.readObject()); myOIS.close(); System.out.println("Got " + preparedLocks.size() + " prepared locks"); System.out.println("Got " + preparedReleases.size() + " prepared releases"); System.out.println("Got " + heldLocks.size() + " held locks"); System.out.println("Got " + preparedNews.size() + " prepared news"); System.out.println("Got " + preparedDeletes.size() + " prepared deletes"); System.out.println("Got " + heldResources.size() + " held resources"); } catch (Exception anE) { System.err.println("Failed to perform setState"); anE.printStackTrace(System.err); } }
From source file:org.apache.axiom.om.util.StAXUtils.java
/** * @return XMLOutputFactory for the current classloader *///from www. j a v a2s .c o m private static XMLOutputFactory getXMLOutputFactory_perClassLoader(StAXWriterConfiguration configuration) { ClassLoader cl = getContextClassLoader(); XMLOutputFactory factory; if (cl == null) { factory = getXMLOutputFactory_singleton(configuration); } else { if (configuration == null) { configuration = StAXWriterConfiguration.DEFAULT; } Map map = (Map) outputFactoryPerCLMap.get(configuration); if (map == null) { map = Collections.synchronizedMap(new WeakHashMap()); outputFactoryPerCLMap.put(configuration, map); factory = null; } else { factory = (XMLOutputFactory) map.get(cl); } if (factory == null) { if (log.isDebugEnabled()) { log.debug("About to create XMLOutputFactory implementation with " + "classloader=" + cl); log.debug("The classloader for javax.xml.stream.XMLOutputFactory is: " + XMLOutputFactory.class.getClassLoader()); } try { factory = newXMLOutputFactory(null, configuration); } catch (ClassCastException cce) { if (log.isDebugEnabled()) { log.debug("Failed creation of XMLOutputFactory implementation with " + "classloader=" + cl); log.debug("Exception is=" + cce); log.debug("Attempting with classloader: " + XMLOutputFactory.class.getClassLoader()); } factory = newXMLOutputFactory(XMLOutputFactory.class.getClassLoader(), configuration); } if (factory != null) { map.put(cl, factory); if (log.isDebugEnabled()) { log.debug("Created XMLOutputFactory = " + factory.getClass() + " for classloader=" + cl); log.debug("Configuration = " + configuration); log.debug("Size of XMLOutFactory map for this configuration = " + map.size()); log.debug("Configurations for which factories have been cached = " + outputFactoryPerCLMap.keySet()); } } else { factory = getXMLOutputFactory_singleton(configuration); } } } return factory; }
From source file:net.sf.morph.transform.transformers.BaseTransformer.java
/** * {@inheritDoc}//from w w w . j av a2s . c o m * @see java.lang.Object#clone() */ protected Object clone() throws CloneNotSupportedException { BaseTransformer result = (BaseTransformer) super.clone(); result.transformableCallCache = Collections.synchronizedMap(new HashMap()); return result; }
From source file:org.apache.distributedlog.BKLogHandler.java
protected void readLogSegmentsFromStore(final Versioned<List<String>> logSegmentNames, final Comparator<LogSegmentMetadata> comparator, final LogSegmentFilter segmentFilter, final CompletableFuture<Versioned<List<LogSegmentMetadata>>> readResult) { Set<String> segmentsReceived = new HashSet<String>(); segmentsReceived.addAll(segmentFilter.filter(logSegmentNames.getValue())); Set<String> segmentsAdded; final Set<String> removedSegments = Collections.synchronizedSet(new HashSet<String>()); final Map<String, LogSegmentMetadata> addedSegments = Collections .synchronizedMap(new HashMap<String, LogSegmentMetadata>()); Pair<Set<String>, Set<String>> segmentChanges = logSegmentCache.diff(segmentsReceived); segmentsAdded = segmentChanges.getLeft(); removedSegments.addAll(segmentChanges.getRight()); if (segmentsAdded.isEmpty()) { if (LOG.isTraceEnabled()) { LOG.trace("No segments added for {}.", getFullyQualifiedName()); }//from www .j a va2 s.c o m // update the cache before #getCachedLogSegments to return updateLogSegmentCache(removedSegments, addedSegments); List<LogSegmentMetadata> segmentList; try { segmentList = getCachedLogSegments(comparator); } catch (UnexpectedException e) { readResult.completeExceptionally(e); return; } readResult.complete(new Versioned<List<LogSegmentMetadata>>(segmentList, logSegmentNames.getVersion())); return; } final AtomicInteger numChildren = new AtomicInteger(segmentsAdded.size()); final AtomicInteger numFailures = new AtomicInteger(0); for (final String segment : segmentsAdded) { String logSegmentPath = logMetadata.getLogSegmentPath(segment); LogSegmentMetadata cachedSegment = metadataCache.get(logSegmentPath); if (null != cachedSegment) { addedSegments.put(segment, cachedSegment); completeReadLogSegmentsFromStore(removedSegments, addedSegments, comparator, readResult, logSegmentNames.getVersion(), numChildren, numFailures); continue; } metadataStore.getLogSegment(logSegmentPath).whenComplete(new FutureEventListener<LogSegmentMetadata>() { @Override public void onSuccess(LogSegmentMetadata result) { addedSegments.put(segment, result); complete(); } @Override public void onFailure(Throwable cause) { // LogSegmentNotFoundException exception is possible in two cases // 1. A log segment was deleted by truncation between the call to getChildren and read // attempt on the znode corresponding to the segment // 2. In progress segment has been completed => inprogress ZNode does not exist if (cause instanceof LogSegmentNotFoundException) { removedSegments.add(segment); complete(); } else { // fail fast if (1 == numFailures.incrementAndGet()) { readResult.completeExceptionally(cause); return; } } } private void complete() { completeReadLogSegmentsFromStore(removedSegments, addedSegments, comparator, readResult, logSegmentNames.getVersion(), numChildren, numFailures); } }); } }
From source file:org.intermine.web.logic.session.SessionMethods.java
/** * * * @param session the current session/*w w w. j av a2s . co m*/ * @param identifier table identifier * @param table table to register */ @SuppressWarnings("unchecked") public static void setResultsTable(HttpSession session, String identifier, PagedTable table) { @SuppressWarnings("rawtypes") Map<String, PagedTable> tables = (Map) session.getAttribute(Constants.TABLE_MAP); if (tables == null) { tables = Collections.synchronizedMap(new LRUMap(100)); session.setAttribute(Constants.TABLE_MAP, tables); } tables.put(identifier, table); table.setTableid(identifier); }
From source file:org.apache.fop.util.ColorUtil.java
/** * Initializes the colorMap with some predefined values. *///www. j a v a2 s. c o m private static void initializeColorMap() { // CSOK: MethodLength colorMap = Collections.synchronizedMap(new java.util.HashMap<String, Color>()); colorMap.put("aliceblue", createColor(240, 248, 255)); colorMap.put("antiquewhite", createColor(250, 235, 215)); colorMap.put("aqua", createColor(0, 255, 255)); colorMap.put("aquamarine", createColor(127, 255, 212)); colorMap.put("azure", createColor(240, 255, 255)); colorMap.put("beige", createColor(245, 245, 220)); colorMap.put("bisque", createColor(255, 228, 196)); colorMap.put("black", createColor(0, 0, 0)); colorMap.put("blanchedalmond", createColor(255, 235, 205)); colorMap.put("blue", createColor(0, 0, 255)); colorMap.put("blueviolet", createColor(138, 43, 226)); colorMap.put("brown", createColor(165, 42, 42)); colorMap.put("burlywood", createColor(222, 184, 135)); colorMap.put("cadetblue", createColor(95, 158, 160)); colorMap.put("chartreuse", createColor(127, 255, 0)); colorMap.put("chocolate", createColor(210, 105, 30)); colorMap.put("coral", createColor(255, 127, 80)); colorMap.put("cornflowerblue", createColor(100, 149, 237)); colorMap.put("cornsilk", createColor(255, 248, 220)); colorMap.put("crimson", createColor(220, 20, 60)); colorMap.put("cyan", createColor(0, 255, 255)); colorMap.put("darkblue", createColor(0, 0, 139)); colorMap.put("darkcyan", createColor(0, 139, 139)); colorMap.put("darkgoldenrod", createColor(184, 134, 11)); colorMap.put("darkgray", createColor(169, 169, 169)); colorMap.put("darkgreen", createColor(0, 100, 0)); colorMap.put("darkgrey", createColor(169, 169, 169)); colorMap.put("darkkhaki", createColor(189, 183, 107)); colorMap.put("darkmagenta", createColor(139, 0, 139)); colorMap.put("darkolivegreen", createColor(85, 107, 47)); colorMap.put("darkorange", createColor(255, 140, 0)); colorMap.put("darkorchid", createColor(153, 50, 204)); colorMap.put("darkred", createColor(139, 0, 0)); colorMap.put("darksalmon", createColor(233, 150, 122)); colorMap.put("darkseagreen", createColor(143, 188, 143)); colorMap.put("darkslateblue", createColor(72, 61, 139)); colorMap.put("darkslategray", createColor(47, 79, 79)); colorMap.put("darkslategrey", createColor(47, 79, 79)); colorMap.put("darkturquoise", createColor(0, 206, 209)); colorMap.put("darkviolet", createColor(148, 0, 211)); colorMap.put("deeppink", createColor(255, 20, 147)); colorMap.put("deepskyblue", createColor(0, 191, 255)); colorMap.put("dimgray", createColor(105, 105, 105)); colorMap.put("dimgrey", createColor(105, 105, 105)); colorMap.put("dodgerblue", createColor(30, 144, 255)); colorMap.put("firebrick", createColor(178, 34, 34)); colorMap.put("floralwhite", createColor(255, 250, 240)); colorMap.put("forestgreen", createColor(34, 139, 34)); colorMap.put("fuchsia", createColor(255, 0, 255)); colorMap.put("gainsboro", createColor(220, 220, 220)); colorMap.put("ghostwhite", createColor(248, 248, 255)); colorMap.put("gold", createColor(255, 215, 0)); colorMap.put("goldenrod", createColor(218, 165, 32)); colorMap.put("gray", createColor(128, 128, 128)); colorMap.put("green", createColor(0, 128, 0)); colorMap.put("greenyellow", createColor(173, 255, 47)); colorMap.put("grey", createColor(128, 128, 128)); colorMap.put("honeydew", createColor(240, 255, 240)); colorMap.put("hotpink", createColor(255, 105, 180)); colorMap.put("indianred", createColor(205, 92, 92)); colorMap.put("indigo", createColor(75, 0, 130)); colorMap.put("ivory", createColor(255, 255, 240)); colorMap.put("khaki", createColor(240, 230, 140)); colorMap.put("lavender", createColor(230, 230, 250)); colorMap.put("lavenderblush", createColor(255, 240, 245)); colorMap.put("lawngreen", createColor(124, 252, 0)); colorMap.put("lemonchiffon", createColor(255, 250, 205)); colorMap.put("lightblue", createColor(173, 216, 230)); colorMap.put("lightcoral", createColor(240, 128, 128)); colorMap.put("lightcyan", createColor(224, 255, 255)); colorMap.put("lightgoldenrodyellow", createColor(250, 250, 210)); colorMap.put("lightgray", createColor(211, 211, 211)); colorMap.put("lightgreen", createColor(144, 238, 144)); colorMap.put("lightgrey", createColor(211, 211, 211)); colorMap.put("lightpink", createColor(255, 182, 193)); colorMap.put("lightsalmon", createColor(255, 160, 122)); colorMap.put("lightseagreen", createColor(32, 178, 170)); colorMap.put("lightskyblue", createColor(135, 206, 250)); colorMap.put("lightslategray", createColor(119, 136, 153)); colorMap.put("lightslategrey", createColor(119, 136, 153)); colorMap.put("lightsteelblue", createColor(176, 196, 222)); colorMap.put("lightyellow", createColor(255, 255, 224)); colorMap.put("lime", createColor(0, 255, 0)); colorMap.put("limegreen", createColor(50, 205, 50)); colorMap.put("linen", createColor(250, 240, 230)); colorMap.put("magenta", createColor(255, 0, 255)); colorMap.put("maroon", createColor(128, 0, 0)); colorMap.put("mediumaquamarine", createColor(102, 205, 170)); colorMap.put("mediumblue", createColor(0, 0, 205)); colorMap.put("mediumorchid", createColor(186, 85, 211)); colorMap.put("mediumpurple", createColor(147, 112, 219)); colorMap.put("mediumseagreen", createColor(60, 179, 113)); colorMap.put("mediumslateblue", createColor(123, 104, 238)); colorMap.put("mediumspringgreen", createColor(0, 250, 154)); colorMap.put("mediumturquoise", createColor(72, 209, 204)); colorMap.put("mediumvioletred", createColor(199, 21, 133)); colorMap.put("midnightblue", createColor(25, 25, 112)); colorMap.put("mintcream", createColor(245, 255, 250)); colorMap.put("mistyrose", createColor(255, 228, 225)); colorMap.put("moccasin", createColor(255, 228, 181)); colorMap.put("navajowhite", createColor(255, 222, 173)); colorMap.put("navy", createColor(0, 0, 128)); colorMap.put("oldlace", createColor(253, 245, 230)); colorMap.put("olive", createColor(128, 128, 0)); colorMap.put("olivedrab", createColor(107, 142, 35)); colorMap.put("orange", createColor(255, 165, 0)); colorMap.put("orangered", createColor(255, 69, 0)); colorMap.put("orchid", createColor(218, 112, 214)); colorMap.put("palegoldenrod", createColor(238, 232, 170)); colorMap.put("palegreen", createColor(152, 251, 152)); colorMap.put("paleturquoise", createColor(175, 238, 238)); colorMap.put("palevioletred", createColor(219, 112, 147)); colorMap.put("papayawhip", createColor(255, 239, 213)); colorMap.put("peachpuff", createColor(255, 218, 185)); colorMap.put("peru", createColor(205, 133, 63)); colorMap.put("pink", createColor(255, 192, 203)); colorMap.put("plum ", createColor(221, 160, 221)); colorMap.put("plum", createColor(221, 160, 221)); colorMap.put("powderblue", createColor(176, 224, 230)); colorMap.put("purple", createColor(128, 0, 128)); colorMap.put("red", createColor(255, 0, 0)); colorMap.put("rosybrown", createColor(188, 143, 143)); colorMap.put("royalblue", createColor(65, 105, 225)); colorMap.put("saddlebrown", createColor(139, 69, 19)); colorMap.put("salmon", createColor(250, 128, 114)); colorMap.put("sandybrown", createColor(244, 164, 96)); colorMap.put("seagreen", createColor(46, 139, 87)); colorMap.put("seashell", createColor(255, 245, 238)); colorMap.put("sienna", createColor(160, 82, 45)); colorMap.put("silver", createColor(192, 192, 192)); colorMap.put("skyblue", createColor(135, 206, 235)); colorMap.put("slateblue", createColor(106, 90, 205)); colorMap.put("slategray", createColor(112, 128, 144)); colorMap.put("slategrey", createColor(112, 128, 144)); colorMap.put("snow", createColor(255, 250, 250)); colorMap.put("springgreen", createColor(0, 255, 127)); colorMap.put("steelblue", createColor(70, 130, 180)); colorMap.put("tan", createColor(210, 180, 140)); colorMap.put("teal", createColor(0, 128, 128)); colorMap.put("thistle", createColor(216, 191, 216)); colorMap.put("tomato", createColor(255, 99, 71)); colorMap.put("turquoise", createColor(64, 224, 208)); colorMap.put("violet", createColor(238, 130, 238)); colorMap.put("wheat", createColor(245, 222, 179)); colorMap.put("white", createColor(255, 255, 255)); colorMap.put("whitesmoke", createColor(245, 245, 245)); colorMap.put("yellow", createColor(255, 255, 0)); colorMap.put("yellowgreen", createColor(154, 205, 50)); colorMap.put("transparent", new ColorWithAlternatives(0, 0, 0, 0, null)); }
From source file:com.moviejukebox.model.Library.java
public void buildIndex(ThreadExecutor<Void> tasks) throws Throwable { moviesList.clear();/* w w w . j a v a 2s.c o m*/ indexes.clear(); tasks.restart(); final List<Movie> indexMovies = new ArrayList<>(library.values()); moviesList.addAll(library.values()); if (!indexMovies.isEmpty()) { Map<String, Index> dynamicIndexes = new LinkedHashMap<>(); // Add the sets FIRST! That allows users to put series inside sets dynamicIndexes.put(SET, indexBySets(indexMovies)); final Map<String, Index> syncindexes = Collections.synchronizedMap(indexes); for (final String indexStr : INDEX_LIST.split(",")) { tasks.submit(new Callable<Void>() { @Override public Void call() { SystemTools.showMemory(); LOG.info(" Indexing {}...", indexStr); switch (indexStr) { case INDEX_OTHER: syncindexes.put(INDEX_OTHER, indexByProperties(indexMovies)); break; case INDEX_GENRES: syncindexes.put(INDEX_GENRES, indexByGenres(indexMovies)); break; case INDEX_TITLE: syncindexes.put(INDEX_TITLE, indexByTitle(indexMovies)); break; case INDEX_CERTIFICATION: syncindexes.put(INDEX_CERTIFICATION, indexByCertification(indexMovies)); break; case INDEX_YEAR: syncindexes.put(INDEX_YEAR, indexByYear(indexMovies)); break; case INDEX_LIBRARY: syncindexes.put(INDEX_LIBRARY, indexByLibrary(indexMovies)); break; case INDEX_CAST: syncindexes.put(INDEX_CAST, indexByCast(indexMovies)); break; case INDEX_DIRECTOR: syncindexes.put(INDEX_DIRECTOR, indexByDirector(indexMovies)); break; case INDEX_COUNTRY: syncindexes.put(INDEX_COUNTRY, indexByCountry(indexMovies)); break; case INDEX_WRITER: syncindexes.put(INDEX_WRITER, indexByWriter(indexMovies)); break; case INDEX_AWARD: syncindexes.put(INDEX_AWARD, indexByAward(indexMovies)); break; case INDEX_PERSON: syncindexes.put(INDEX_PERSON, indexByPerson(indexMovies)); break; case INDEX_RATINGS: syncindexes.put(INDEX_RATINGS, indexByRatings(indexMovies)); break; default: break; } return null; } }); } tasks.waitFor(); SystemTools.showMemory(); // Make a "copy" of uncompressed index this.keepUncompressedIndexes(); Map<String, Map<String, Movie>> dynamicIndexMasters = new HashMap<>(); for (Map.Entry<String, Index> dynamicEntry : dynamicIndexes.entrySet()) { Map<String, Movie> indexMasters = buildIndexMasters(dynamicEntry.getKey(), dynamicEntry.getValue()); dynamicIndexMasters.put(dynamicEntry.getKey(), indexMasters); for (Map.Entry<String, Index> indexesEntry : indexes.entrySet()) { // For each category in index, compress this one. for (Map.Entry<String, List<Movie>> indexEntry : indexesEntry.getValue().entrySet()) { compressSetMovies(indexEntry.getValue(), dynamicEntry.getValue(), indexMasters, indexesEntry.getKey(), indexEntry.getKey()); } } indexes.put(dynamicEntry.getKey(), dynamicEntry.getValue()); moviesList.addAll(indexMasters.values()); // so the driver knows what's an index master } // Now add the masters to the titles index // Issue 1018 - Check that this index was selected if (INDEX_LIST.contains(INDEX_TITLE)) { for (Map.Entry<String, Map<String, Movie>> dynamicIndexMastersEntry : dynamicIndexMasters .entrySet()) { Index mastersTitlesIndex = indexByTitle(dynamicIndexMastersEntry.getValue().values()); for (Map.Entry<String, List<Movie>> indexEntry : mastersTitlesIndex.entrySet()) { for (Movie m : indexEntry.getValue()) { int setCount = dynamicIndexes.get(dynamicIndexMastersEntry.getKey()).get(m.getTitle()) .size(); if (setCount >= minSetCount) { indexes.get(INDEX_TITLE).addMovie(indexEntry.getKey(), m); } } } } } SystemTools.showMemory(); tasks.restart(); // OK, now that all the index masters are in-place, sort everything. LOG.info(" Sorting Indexes ..."); for (final Map.Entry<String, Index> indexesEntry : indexes.entrySet()) { for (final Map.Entry<String, List<Movie>> indexEntry : indexesEntry.getValue().entrySet()) { tasks.submit(new Callable<Void>() { @Override public Void call() { Comparator<Movie> cmpMovie = getComparator(indexesEntry.getKey(), indexEntry.getKey()); if (cmpMovie == null) { Collections.sort(indexEntry.getValue()); } else { Collections.sort(indexEntry.getValue(), cmpMovie); } return null; } }); } } tasks.waitFor(); SystemTools.showMemory(); // Cut off the Other/New lists if they're too long AND add them to the NEW category if required boolean trimNewTvOK = trimNewCategory(INDEX_NEW_TV, newTvCount); boolean trimNewMovieOK = trimNewCategory(INDEX_NEW_MOVIE, newMovieCount); // Merge the two categories into the Master "New" category if (CATEGORIES_MAP.get(INDEX_NEW) != null) { Index otherIndexes = indexes.get(INDEX_OTHER); List<Movie> newList = new ArrayList<>(); int newMovies = 0; int newTVShows = 0; if (trimNewMovieOK && (CATEGORIES_MAP.get(INDEX_NEW_MOVIE) != null) && (otherIndexes.get(CATEGORIES_MAP.get(INDEX_NEW_MOVIE)) != null)) { newList.addAll(otherIndexes.get(CATEGORIES_MAP.get(INDEX_NEW_MOVIE))); newMovies = otherIndexes.get(CATEGORIES_MAP.get(INDEX_NEW_MOVIE)).size(); } else // Remove the empty "New Movie" category { if (CATEGORIES_MAP.get(INDEX_NEW_MOVIE) != null) { otherIndexes.remove(CATEGORIES_MAP.get(INDEX_NEW_MOVIE)); } } if (trimNewTvOK && (CATEGORIES_MAP.get(INDEX_NEW_TV) != null) && (otherIndexes.get(CATEGORIES_MAP.get(INDEX_NEW_TV)) != null)) { newList.addAll(otherIndexes.get(CATEGORIES_MAP.get(INDEX_NEW_TV))); newTVShows = otherIndexes.get(CATEGORIES_MAP.get(INDEX_NEW_TV)).size(); } else // Remove the empty "New TV" category { if (CATEGORIES_MAP.get(INDEX_NEW_TV) != null) { otherIndexes.remove(CATEGORIES_MAP.get(INDEX_NEW_TV)); } } // If we have new videos, then create the super "New" category if ((newMovies + newTVShows) > 0) { StringBuilder categoryMessage = new StringBuilder("Creating new category with "); if (newMovies > 0) { categoryMessage.append(newMovies).append(" new movie").append(newMovies > 1 ? "s" : ""); } if (newTVShows > 0) { categoryMessage.append(newMovies > 0 ? " & " : ""); categoryMessage.append(newTVShows).append(" new TV Show").append(newTVShows > 1 ? "s" : ""); } LOG.debug(categoryMessage.toString()); otherIndexes.put(CATEGORIES_MAP.get(INDEX_NEW), newList); Collections.sort(otherIndexes.get(CATEGORIES_MAP.get(INDEX_NEW)), new LastModifiedComparator()); } } // Now set up the index masters' posters for (Map.Entry<String, Map<String, Movie>> dynamicIndexMastersEntry : dynamicIndexMasters.entrySet()) { for (Map.Entry<String, Movie> mastersEntry : dynamicIndexMastersEntry.getValue().entrySet()) { List<Movie> set = dynamicIndexes.get(dynamicIndexMastersEntry.getKey()) .get(mastersEntry.getKey()); mastersEntry.getValue().setPosterFilename(set.get(0).getBaseName() + ".jpg"); mastersEntry.getValue().setFile(set.get(0).getFile()); // ensure ArtworkScanner looks in the right directory } } Collections.sort(indexMovies); setMovieListNavigation(indexMovies); SystemTools.showMemory(); } tasks.restart(); final List<Person> indexPersons = new ArrayList<>(people.values()); if (!indexPersons.isEmpty()) { for (final String indexStr : INDEX_LIST.split(",")) { if (!(INDEX_CAST + INDEX_DIRECTOR + INDEX_WRITER + INDEX_PERSON).contains(indexStr)) { continue; } tasks.submit(new Callable<Void>() { @Override public Void call() { SystemTools.showMemory(); LOG.info(" Indexing {} (person)...", indexStr); indexByJob(indexPersons, indexStr.equals(INDEX_CAST) ? Filmography.DEPT_ACTORS : indexStr.equals(INDEX_DIRECTOR) ? Filmography.DEPT_DIRECTING : indexStr.equals(INDEX_WRITER) ? Filmography.DEPT_WRITING : Movie.UNKNOWN, indexStr); return null; } }); } tasks.waitFor(); SystemTools.showMemory(); } }
From source file:org.apache.hadoop.hbase.regionserver.HLog.java
private static List<Path> splitLog(final Path rootDir, final FileStatus[] logfiles, final FileSystem fs, final HBaseConfiguration conf) throws IOException { final Map<byte[], WriterAndPath> logWriters = Collections .synchronizedMap(new TreeMap<byte[], WriterAndPath>(Bytes.BYTES_COMPARATOR)); List<Path> splits = null; // Number of threads to use when log splitting to rewrite the logs. // More means faster but bigger mem consumption. int logWriterThreads = conf.getInt("hbase.regionserver.hlog.splitlog.writer.threads", 3); // Number of logs to read concurrently when log splitting. // More means faster but bigger mem consumption */ int concurrentLogReads = conf.getInt("hbase.regionserver.hlog.splitlog.reader.threads", 3); // Is append supported? boolean append = isAppend(conf); try {//from w w w . ja v a 2s. c o m int maxSteps = Double.valueOf(Math.ceil((logfiles.length * 1.0) / concurrentLogReads)).intValue(); for (int step = 0; step < maxSteps; step++) { final Map<byte[], LinkedList<HLogEntry>> logEntries = new TreeMap<byte[], LinkedList<HLogEntry>>( Bytes.BYTES_COMPARATOR); // Stop at logfiles.length when it's the last step int endIndex = step == maxSteps - 1 ? logfiles.length : step * concurrentLogReads + concurrentLogReads; for (int i = (step * concurrentLogReads); i < endIndex; i++) { // Check for possibly empty file. With appends, currently Hadoop // reports a zero length even if the file has been sync'd. Revisit if // HADOOP-4751 is committed. long length = logfiles[i].getLen(); if (LOG.isDebugEnabled()) { LOG.debug("Splitting hlog " + (i + 1) + " of " + logfiles.length + ": " + logfiles[i].getPath() + ", length=" + logfiles[i].getLen()); } recoverLog(fs, logfiles[i].getPath(), append); SequenceFile.Reader in = null; int count = 0; try { in = new SequenceFile.Reader(fs, logfiles[i].getPath(), conf); try { HLogKey key = newKey(conf); KeyValue val = new KeyValue(); while (in.next(key, val)) { byte[] regionName = key.getRegionName(); LinkedList<HLogEntry> queue = logEntries.get(regionName); if (queue == null) { queue = new LinkedList<HLogEntry>(); LOG.debug("Adding queue for " + Bytes.toStringBinary(regionName)); logEntries.put(regionName, queue); } HLogEntry hle = new HLogEntry(val, key); queue.push(hle); count++; // Make the key and value new each time; otherwise same instance // is used over and over. key = newKey(conf); val = new KeyValue(); } LOG.debug("Pushed=" + count + " entries from " + logfiles[i].getPath()); } catch (IOException e) { LOG.debug("IOE Pushed=" + count + " entries from " + logfiles[i].getPath()); e = RemoteExceptionHandler.checkIOException(e); if (!(e instanceof EOFException)) { LOG.warn("Exception processing " + logfiles[i].getPath() + " -- continuing. Possible DATA LOSS!", e); } } } catch (IOException e) { if (length <= 0) { LOG.warn("Empty hlog, continuing: " + logfiles[i] + " count=" + count, e); continue; } throw e; } finally { try { if (in != null) { in.close(); } } catch (IOException e) { LOG.warn("Close in finally threw exception -- continuing", e); } // Delete the input file now so we do not replay edits. We could // have gotten here because of an exception. If so, probably // nothing we can do about it. Replaying it, it could work but we // could be stuck replaying for ever. Just continue though we // could have lost some edits. fs.delete(logfiles[i].getPath(), true); } } ExecutorService threadPool = Executors.newFixedThreadPool(logWriterThreads); for (final byte[] key : logEntries.keySet()) { Thread thread = new Thread(Bytes.toStringBinary(key)) { @Override public void run() { LinkedList<HLogEntry> entries = logEntries.get(key); LOG.debug("Thread got " + entries.size() + " to process"); long threadTime = System.currentTimeMillis(); try { int count = 0; // Items were added to the linkedlist oldest first. Pull them // out in that order. for (ListIterator<HLogEntry> i = entries.listIterator(entries.size()); i .hasPrevious();) { HLogEntry logEntry = i.previous(); WriterAndPath wap = logWriters.get(key); if (wap == null) { Path logfile = new Path( HRegion.getRegionDir( HTableDescriptor.getTableDir(rootDir, logEntry.getKey().getTablename()), HRegionInfo.encodeRegionName(key)), HREGION_OLDLOGFILE_NAME); Path oldlogfile = null; SequenceFile.Reader old = null; if (fs.exists(logfile)) { FileStatus stat = fs.getFileStatus(logfile); if (stat.getLen() <= 0) { LOG.warn("Old hlog file " + logfile + " is zero " + "length. Deleting existing file"); fs.delete(logfile, false); } else { LOG.warn("Old hlog file " + logfile + " already " + "exists. Copying existing file to new file"); oldlogfile = new Path(logfile.toString() + ".old"); fs.rename(logfile, oldlogfile); old = new SequenceFile.Reader(fs, oldlogfile, conf); } } SequenceFile.Writer w = SequenceFile.createWriter(fs, conf, logfile, getKeyClass(conf), KeyValue.class, getCompressionType(conf)); wap = new WriterAndPath(logfile, w); logWriters.put(key, wap); if (LOG.isDebugEnabled()) { LOG.debug("Creating new hlog file writer for path " + logfile + " and region " + Bytes.toStringBinary(key)); } if (old != null) { // Copy from existing log file HLogKey oldkey = newKey(conf); KeyValue oldval = new KeyValue(); for (; old.next(oldkey, oldval); count++) { if (LOG.isDebugEnabled() && count > 0 && count % 10000 == 0) { LOG.debug("Copied " + count + " edits"); } w.append(oldkey, oldval); } old.close(); fs.delete(oldlogfile, true); } } wap.w.append(logEntry.getKey(), logEntry.getEdit()); count++; } if (LOG.isDebugEnabled()) { LOG.debug("Applied " + count + " total edits to " + Bytes.toStringBinary(key) + " in " + (System.currentTimeMillis() - threadTime) + "ms"); } } catch (IOException e) { e = RemoteExceptionHandler.checkIOException(e); LOG.warn("Got while writing region " + Bytes.toStringBinary(key) + " log " + e); e.printStackTrace(); } } }; threadPool.execute(thread); } threadPool.shutdown(); // Wait for all threads to terminate try { for (int i = 0; !threadPool.awaitTermination(5, TimeUnit.SECONDS); i++) { LOG.debug("Waiting for hlog writers to terminate, iteration #" + i); } } catch (InterruptedException ex) { LOG.warn("Hlog writers were interrupted, possible data loss!"); } } } finally { splits = new ArrayList<Path>(logWriters.size()); for (WriterAndPath wap : logWriters.values()) { wap.w.close(); LOG.debug("Closed " + wap.p); splits.add(wap.p); } } return splits; }
From source file:org.opencms.monitor.CmsMemoryMonitor.java
/** * Flushes the locks cache.<p>/*from www. j av a 2s . c om*/ * * @param newLocks if not <code>null</code> the lock cache is replaced by the given map */ public void flushLocks(Map<String, CmsLock> newLocks) { if ((newLocks == null) || newLocks.isEmpty()) { flushCache(CacheType.LOCK); return; } // initialize new lock cache Map<String, CmsLock> newLockCache = Collections.synchronizedMap(newLocks); // register it register(CmsLockManager.class.getName(), newLockCache); // save the old cache Map<String, CmsLock> oldCache = m_cacheLock; // replace the old by the new cache m_cacheLock = newLockCache; // clean up the old cache oldCache.clear(); }