List of usage examples for java.util Collections synchronizedMap
public static <K, V> Map<K, V> synchronizedMap(Map<K, V> m)
From source file:com.lfv.lanzius.server.LanziusServer.java
private void menuChoiceServerStart() { if (isSwapping) return;/*from w w w. j a va 2 s.co m*/ log.info("Menu: Starting server"); try { // Create a HTTP server for information exchange httpServer = new Server(); // Setup velocity Velocity.init(); // Read the property files Properties serverProperties = new Properties(); serverProperties.loadFromXML(new FileInputStream("data/properties/serverproperties.xml")); logEvents = serverProperties.getProperty("LogEvents", "false").equalsIgnoreCase("true"); if (logEvents) log.info("Enabling event logging"); // Setup HTTP server int httpPort = Integer.parseInt(serverProperties.getProperty("HttpPort", "36600")); httpConnector = new SelectChannelConnector(); httpConnector.setPort(httpPort); httpServer.setConnectors(new Connector[] { httpConnector }); // Dynamic context handler for the xml data provider ContextHandler xmlContextHandler = new ContextHandler(); xmlContextHandler.setContextPath("/xml"); Handler xmlHandler = new InfoRequestHandler(this, log); xmlContextHandler.setHandler(xmlHandler); httpServer.setHandlers(new Handler[] { xmlContextHandler }); // Create a bundle bundle = new ServerBundle(doc); // Create server logger map containing all available loggers loggerMap = Collections.synchronizedMap(new TreeMap<Integer, ServerLogger>()); // Start the UDP server int port = Integer.parseInt(serverProperties.getProperty("UdpPort", "36604")); networkManager = new ServerNetworkManager(port, bundle, this); networkManager.setNetworkHandler(this); isaTracePainter = serverProperties.getProperty("ISATracePainter", "line"); panel.resetIsaChart(); } catch (Exception ex) { log.error("Unable to start server! ", ex); JOptionPane.showMessageDialog(frame, "Unable to start server! Check the validity of the configuration file, the serverproperties.xml and\nthe networkproperties.ini files! Also make sure that no other server is already running!", "Error!", JOptionPane.ERROR_MESSAGE); return; } try { serverStartedDate = null; networkManager.start(); serverStartedDate = new Date(); // Start the HTTP server httpServer.start(); // Tell it to the view to show an icon panel.setServerStartedDate(serverStartedDate); } catch (Exception ex) { log.error("Unable to start server!", ex); JOptionPane.showMessageDialog(frame, "Unable to start server! Check the validity of the configuration file, the serverproperties.xml and\nthe networkproperties.ini files! Also make sure that no other server is already running!", "Error!", JOptionPane.ERROR_MESSAGE); // Network manager has been started, stop it again! if (serverStartedDate != null) { networkManager.stop(); serverStartedDate = null; } } }
From source file:org.apache.hadoop.hbase.client.HTable.java
/** * {@inheritDoc}//from ww w . j av a2 s .c o m */ @Override public <R extends Message> Map<byte[], R> batchCoprocessorService(Descriptors.MethodDescriptor methodDescriptor, Message request, byte[] startKey, byte[] endKey, R responsePrototype) throws ServiceException, Throwable { final Map<byte[], R> results = Collections.synchronizedMap(new TreeMap<byte[], R>(Bytes.BYTES_COMPARATOR)); batchCoprocessorService(methodDescriptor, request, startKey, endKey, responsePrototype, new Callback<R>() { @Override public void update(byte[] region, byte[] row, R result) { if (region != null) { results.put(region, result); } } }); return results; }
From source file:com.gargoylesoftware.htmlunit.html.HtmlPage.java
/** * Creates a clone of this instance, and clears cached state to be not shared with the original. * * @return a clone of this instance/* w w w . j av a2s . c o m*/ */ @Override protected HtmlPage clone() { final HtmlPage result = (HtmlPage) super.clone(); result.idMap_ = Collections.synchronizedMap(new HashMap<String, SortedSet<DomElement>>()); result.nameMap_ = Collections.synchronizedMap(new HashMap<String, SortedSet<DomElement>>()); return result; }
From source file:com.buaa.cfs.conf.Configuration.java
/** * Load a class by name, returning null rather than throwing an exception if it couldn't be loaded. This is to avoid * the overhead of creating an exception. * * @param name the class name/*from w w w .j a va 2s. com*/ * * @return the class object, or null if it could not be found. */ public Class<?> getClassByNameOrNull(String name) { Map<String, WeakReference<Class<?>>> map; synchronized (CACHE_CLASSES) { map = CACHE_CLASSES.get(classLoader); if (map == null) { map = Collections.synchronizedMap(new WeakHashMap<String, WeakReference<Class<?>>>()); CACHE_CLASSES.put(classLoader, map); } } Class<?> clazz = null; WeakReference<Class<?>> ref = map.get(name); if (ref != null) { clazz = ref.get(); } if (clazz == null) { try { clazz = Class.forName(name, true, classLoader); } catch (ClassNotFoundException e) { // Leave a marker that the class isn't found map.put(name, new WeakReference<Class<?>>(NEGATIVE_CACHE_SENTINEL)); return null; } // two putters can race here, but they'll put the same class map.put(name, new WeakReference<Class<?>>(clazz)); return clazz; } else if (clazz == NEGATIVE_CACHE_SENTINEL) { return null; // not found } else { // cache hit return clazz; } }
From source file:fmiquerytest.Coordinates.java
public static void main(String[] args) { df_short.setTimeZone(tz); df_iso.setTimeZone(tz);/*from ww w . j a v a 2 s. c om*/ df_daycode.setTimeZone(tz); DecimalFormatSymbols otherSymbols = new DecimalFormatSymbols(); otherSymbols.setDecimalSeparator('.'); df_fiveDecimal.setDecimalFormatSymbols(otherSymbols); String startTime = df_short.format(new Date(startTimeMillis)); System.out.println("startTime: " + startTime); //Clean up old weather data //********************************************************************** FileSystemTools.cleanupOldWeatherData(daysToStoreWeatherData); //Google query //********************************************************************** if (gShare.equals("")) { Scanner input = new Scanner(System.in); System.out.println("Paste Google Directions Share:"); gShare = input.nextLine(); } String gQuery = Parser.getQueryFromShare(gShare); System.out.println("Google query URL: " + gQuery); //Check if we already have this route //Valid only if the route option is 0 (default) //Because otherwise we cannot be sure we already have the optional route List<routeStep> gSteps = new ArrayList<>(); if (FileSystemTools.isSavedRoute(gQuery) && gRouteOption == 0) { System.out.println("Route found from saved list. Loading."); gSteps = FileSystemTools.loadSavedRoute(gQuery); } else { gSteps = Parser.getSteps(gQuery); if (gRouteOption == 0) { System.out.println("Saving new route to list."); FileSystemTools.saveRoute(gQuery, gSteps); } } //Compile route table with current settings //********************************************************************** List<routeStep> routeData = RouteTools.compileRoute(gSteps, refreshInterval); String endTime = df_short.format(new Date(startTimeMillis + routeDur * 1000)); System.out.println("endTime: " + endTime); //Forecast from FMI is only for 48h - warning if we are going over //Or is it 54h? http://ilmatieteenlaitos.fi/avoin-data-saaennustedata-hirlam if (((startTimeMillis + routeDur * 1000) - System.currentTimeMillis()) / (1000 * 60 * 60) > 48) { System.out.println("**************************************************" + newLine + "WARNING:" + newLine + "Weather forecast available only for 48 hours" + newLine + "**************************************************"); } //Prepare time and file variables //********************************************************************** String nowAsISO = df_iso.format(new Date()); System.out.println("Start ISO time: " + nowAsISO); double timeMarginal = routeDur * 1.2 + 3600; String endTimeForFmi = df_iso.format(new Date(startTimeMillis + (intValue(timeMarginal)) * 1000)); String endTimeForFile = df_iso.format(new Date(startTimeMillis + (intValue(routeDur + 3600)) * 1000)); System.out.println("End ISO time: " + endTimeForFmi); String fmiParam = new StringBuilder("&starttime=").append(nowAsISO).append("&endtime=") .append(endTimeForFmi).toString(); File weatherDataFileNameFirst = new File("weather" + nowAsISO.replaceAll("[^A-Za-z0-9 ]", "") + ".txt"); File weatherDataFileNameLast = new File("weather" + endTimeForFmi.replaceAll("[^A-Za-z0-9 ]", "") + ".txt"); File weatherDataFileNameStart = new File( "weather" + (df_iso.format(new Date(startTimeMillis))).replaceAll("[^A-Za-z0-9 ]", "") + ".txt"); File weatherDataFileNameEnd = new File("weather" + endTimeForFile.replaceAll("[^A-Za-z0-9 ]", "") + ".txt"); List<stationData> allStations = new ArrayList<>(); List<stationData> fmiData = new ArrayList<>(); List<String> savedFileTimes = new ArrayList<>(); //********************************************************************** //Check if we already have the weather data //********************************************************************** if (!weatherDataFileNameStart.exists() || !weatherDataFileNameEnd.exists()) { //FMI query //********************************************************************** String fmiCities = new StringBuilder(fmiBase).append(fmiKey).append(fmiMiddle).append(fmiQueryCities) .append(fmiParam).toString(); String fmiObsStations = new StringBuilder(fmiBase).append(fmiKey).append(fmiMiddle) .append(fmiQueryObsStations).append(fmiParam).toString(); //System.out.println("FMI cities URL: "+fmiCities); //System.out.println("FMI obsstations URL: "+fmiObsStations); //Collect weather data from FMI //********************************************************************** System.out.print("FMI data:" + newLine + fmiCities + newLine + "Loading and processing..."); fmiData.addAll(Parser.getStations(fmiCities)); System.out.println("SUCCESS."); System.out.print("FMI data:" + newLine + fmiObsStations + newLine + "Loading and processing..."); fmiData.addAll(Parser.getStations(fmiObsStations)); System.out.println("SUCCESS."); //Get unique stations //********************************************************************** List<stationData> uniqueStations = ToolBox.getUniqueStations(fmiData); System.out.println("Parsed stations count: " + uniqueStations.size()); //Save or load stations //********************************************************************** List<stationData> savedStations = new ArrayList<>(); if (!stationFileName.exists()) { //Save current parsed stations to file FileSystemTools.saveObjectToFile(uniqueStations, stationFileName); } else { //Or if the stations were already saved, load them System.out.println("Station information file found: " + stationFileName); System.out.print("Loading..."); savedStations = FileSystemTools.loadStationsFromFile(stationFileName); System.out.println("DONE."); System.out.println("Loaded stations count: " + savedStations.size()); } //Merge station information //********************************************************************** System.out.println("Merging station information."); savedStations.addAll(uniqueStations); allStations = ToolBox.getUniqueStations(savedStations); System.out.println("Merged stations count: " + allStations.size()); //Find names for stations //********************************************************************** String gMapsGeoCode = "https://maps.googleapis.com/maps/api/geocode/xml?latlng="; //for (stationData station : allStations){ for (int i = 0; i < allStations.size(); i++) { if (allStations.get(i).stationName.equals("")) { gQuery = new StringBuilder(gMapsGeoCode).append(allStations.get(i).stationLocation.Lat) .append(",").append(allStations.get(i).stationLocation.Lon).append("&key=").append(gKey) .toString(); System.out.println("Google query URL: " + gQuery); allStations.get(i).stationName = Parser.getStationName(gQuery); } } //System.out.println("Station names parsed."); Collections.sort(allStations); //Print stations and separate them for saving //********************************************************************** List<stationData> onlyStations = new ArrayList<>(); //int indeksi = 0; List<weatherData> weatherPoint = new ArrayList<>(); weatherPoint.add(0, new weatherData("", "", "")); for (stationData station : allStations) { //System.out.format("%-4s%-30s%-10s%-10s%n", // indeksi,station.stationName,station.stationLocation.Lat,station.stationLocation.Lon); //++indeksi; onlyStations.add(new stationData(station.stationLocation, station.stationName, weatherPoint)); } //Save station names //********************************************************************** System.out.println("Saving station names."); FileSystemTools.saveObjectToFile(onlyStations, stationFileName); //Save weather dataset //********************************************************************** //Compute file names between start and end System.out.println("Saving weather data..."); long currentTimeAsDouble = System.currentTimeMillis(); int hoursPassed = intValue(Math.floor(currentTimeAsDouble - startTimeMillis) / 1000 / 60 / 60); File weatherDataFileNameTemp = weatherDataFileNameFirst; while (!weatherDataFileNameTemp.equals(weatherDataFileNameLast)) { String savedFileTime = df_iso.format(new Date(startTimeMillis + ((hoursPassed * 3600) * 1000))); savedFileTimes.add(savedFileTime); weatherDataFileNameTemp = new File( "weather" + savedFileTime.replaceAll("[^A-Za-z0-9 ]", "") + ".txt"); //System.out.println("Weather data file: "+weatherDataFileNameTemp); //This if we don't actually maybe want //if (!weatherDataFileNameTemp.exists()){ List<stationData> thisHourWeather = FileSystemTools.extractHourOfWeatherData(savedFileTime, fmiData); //System.out.println("Saving: "+weatherDataFileNameTemp); FileSystemTools.saveObjectToFile(thisHourWeather, weatherDataFileNameTemp); //} ++hoursPassed; } } //If we have weather data saved, definitely we have the stations also //********************************************************************** else { System.out.println("Loading weather data..."); File weatherDataFileNameTemp = weatherDataFileNameStart; int hoursPassed = 0; while (!weatherDataFileNameTemp.equals(weatherDataFileNameEnd)) { String savedFileTime = df_iso.format(new Date(startTimeMillis + ((hoursPassed * 3600) * 1000))); savedFileTimes.add(savedFileTime); weatherDataFileNameTemp = new File( "weather" + savedFileTime.replaceAll("[^A-Za-z0-9 ]", "") + ".txt"); System.out.println("Weather data file: " + weatherDataFileNameTemp); if (weatherDataFileNameTemp.exists()) { fmiData.addAll(FileSystemTools.loadStationsFromFile(weatherDataFileNameTemp)); } ++hoursPassed; } allStations = FileSystemTools.loadStationsFromFile(stationFileName); System.out.println("DONE."); } //Find closest weather stations in route points and extract their data //********************************************************************** System.out.println("Calculating nearest stations in route points:"); List<Integer> neededStations = new ArrayList<>(); for (routeStep step : routeData) { distance[] stationDistances = RouteTools.calculateStationDistances(step.StartLocation, allStations); System.out.format("%-6s%.5f, %.5f ", "Step: ", step.StartLocation.Lat, step.StartLocation.Lon); for (int i = 0; i < 1; i++) { System.out.format("%-9s%-5s%-20s%.5f%n", "Station: ", stationDistances[i].stationNum, allStations.get(stationDistances[i].stationNum).stationName, stationDistances[i].stationDistance); } neededStations.add(stationDistances[0].stationNum); } System.out.println("Needed stations: " + neededStations.toString().trim()); //Remove duplicates from needed stations list Set<Integer> uniqueEntries = new HashSet<Integer>(neededStations); //Extract weather data from needed stations Map routeWeather = Collections.synchronizedMap(new HashMap()); routeWeather = WeatherTools.extractNeededStations(uniqueEntries, fmiData, allStations); //Find what fields we have List<String> allParameters = new ArrayList<>(); for (int i = 0; i < fmiData.size(); ++i) { allParameters.add(fmiData.get(i).weatherData.get(0).parameterName); } Set<String> uniqueParameters = new HashSet<String>(allParameters); for (String par : uniqueParameters) { for (Integer num : uniqueEntries) { for (String time : savedFileTimes) { //System.out.format("%-5s%-25s%-35s%s%n",num,time,par,routeWeather.get(num+"-"+time+"-"+par)); } } } // Build the final data table //********************************************************************** List<stepWeather> stepDataBase = new ArrayList<>(); stepDataBase = RouteTools.combineRouteDatabase(routeData, neededStations, allStations); //Find sunrise and sunset times during the route //********************************************************************** List<String> sunEvents = DayLightTime.calculateSunEvents(stepDataBase); for (String s : sunEvents) { System.out.println(s.replaceAll(",", ".")); } //Make a webpage to show the weather data //********************************************************************** WeatherTools.makeResultHtml(stepDataBase, allStations, routeWeather, sunEvents); }
From source file:org.apache.hadoop.hive.ql.metadata.Hive.java
/** * Given a source directory name of the load path, load all dynamically generated partitions * into the specified table and return a list of strings that represent the dynamic partition * paths./*from w ww. j a v a 2s . c om*/ * @param loadPath * @param tableName * @param partSpec * @param replace * @param numDP number of dynamic partitions * @param listBucketingEnabled * @param isAcid true if this is an ACID operation * @param txnId txnId, can be 0 unless isAcid == true * @return partition map details (PartitionSpec and Partition) * @throws HiveException */ public Map<Map<String, String>, Partition> loadDynamicPartitions(final Path loadPath, final String tableName, final Map<String, String> partSpec, final boolean replace, final int numDP, final boolean listBucketingEnabled, final boolean isAcid, final long txnId, final boolean hasFollowingStatsTask, final AcidUtils.Operation operation) throws HiveException { final Map<Map<String, String>, Partition> partitionsMap = Collections .synchronizedMap(new LinkedHashMap<Map<String, String>, Partition>()); int poolSize = conf.getInt(ConfVars.HIVE_LOAD_DYNAMIC_PARTITIONS_THREAD_COUNT.varname, 1); final ExecutorService pool = Executors.newFixedThreadPool(poolSize, new ThreadFactoryBuilder().setDaemon(true).setNameFormat("load-dynamic-partitions-%d").build()); // Get all valid partition paths and existing partitions for them (if any) final Table tbl = getTable(tableName); final Set<Path> validPartitions = getValidPartitionsInPath(numDP, loadPath); final int partsToLoad = validPartitions.size(); final AtomicInteger partitionsLoaded = new AtomicInteger(0); final boolean inPlaceEligible = conf.getLong("fs.trash.interval", 0) <= 0 && InPlaceUpdate.canRenderInPlace(conf) && !SessionState.getConsole().getIsSilent(); final PrintStream ps = (inPlaceEligible) ? SessionState.getConsole().getInfoStream() : null; final SessionState parentSession = SessionState.get(); final List<Future<Void>> futures = Lists.newLinkedList(); try { // for each dynamically created DP directory, construct a full partition spec // and load the partition based on that final Map<Long, RawStore> rawStoreMap = new HashMap<Long, RawStore>(); for (final Path partPath : validPartitions) { // generate a full partition specification final LinkedHashMap<String, String> fullPartSpec = Maps.newLinkedHashMap(partSpec); Warehouse.makeSpecFromName(fullPartSpec, partPath); futures.add(pool.submit(new Callable<Void>() { @Override public Void call() throws Exception { try { // move file would require session details (needCopy() invokes SessionState.get) SessionState.setCurrentSessionState(parentSession); LOG.info("New loading path = " + partPath + " with partSpec " + fullPartSpec); // load the partition Partition newPartition = loadPartition(partPath, tbl, fullPartSpec, replace, true, listBucketingEnabled, false, isAcid, hasFollowingStatsTask); partitionsMap.put(fullPartSpec, newPartition); if (inPlaceEligible) { synchronized (ps) { InPlaceUpdate.rePositionCursor(ps); partitionsLoaded.incrementAndGet(); InPlaceUpdate.reprintLine(ps, "Loaded : " + partitionsLoaded.get() + "/" + partsToLoad + " partitions."); } } // Add embedded rawstore, so we can cleanup later to avoid memory leak if (getMSC().isLocalMetaStore()) { if (!rawStoreMap.containsKey(Thread.currentThread().getId())) { rawStoreMap.put(Thread.currentThread().getId(), HiveMetaStore.HMSHandler.getRawStore()); } } return null; } catch (Exception t) { LOG.error("Exception when loading partition with parameters " + " partPath=" + partPath + ", " + " table=" + tbl.getTableName() + ", " + " partSpec=" + fullPartSpec + ", " + " replace=" + replace + ", " + " listBucketingEnabled=" + listBucketingEnabled + ", " + " isAcid=" + isAcid + ", " + " hasFollowingStatsTask=" + hasFollowingStatsTask, t); throw t; } } })); } pool.shutdown(); LOG.debug("Number of partitions to be added is " + futures.size()); for (Future future : futures) { future.get(); } for (RawStore rs : rawStoreMap.values()) { rs.shutdown(); } } catch (InterruptedException | ExecutionException e) { LOG.debug("Cancelling " + futures.size() + " dynamic loading tasks"); //cancel other futures for (Future future : futures) { future.cancel(true); } throw new HiveException("Exception when loading " + partsToLoad + " in table " + tbl.getTableName() + " with loadPath=" + loadPath, e); } try { if (isAcid) { List<String> partNames = new ArrayList<>(partitionsMap.size()); for (Partition p : partitionsMap.values()) { partNames.add(p.getName()); } getMSC().addDynamicPartitions(txnId, tbl.getDbName(), tbl.getTableName(), partNames, AcidUtils.toDataOperationType(operation)); } LOG.info("Loaded " + partitionsMap.size() + " partitions"); return partitionsMap; } catch (TException te) { throw new HiveException("Exception updating metastore for acid table " + tableName + " with partitions " + partitionsMap.values(), te); } }
From source file:org.LexGrid.LexBIG.Impl.Extensions.GenericExtensions.LexBIGServiceConvenienceMethodsImpl.java
/** * Return the map used to cache codingScheme information, which maps from * key (derived from request parameters) to CodingScheme object. * <p>/*from ww w .j ava 2 s.c om*/ * Note: Methods requiring the cache should invoke this method rather than * directly referencing the class variable in order to allow lazy * initialization. */ @LgClientSideSafe protected Map getCache_CopyRights() { if (cache_copyRights_ == null) cache_copyRights_ = Collections.synchronizedMap(new LRUMap(128)); return cache_copyRights_; }
From source file:org.LexGrid.LexBIG.Impl.Extensions.GenericExtensions.LexBIGServiceConvenienceMethodsImpl.java
/** * Return the map used to cache codingScheme copyRitght information, which * maps from key (derived from request parameters) to CopyRight String. * <p>//from w ww. j av a 2 s . co m * Note: Methods requiring the cache should invoke this method rather than * directly referencing the class variable in order to allow lazy * initialization. */ @LgClientSideSafe protected Map getCache_CodingSchemes() { if (cache_codingSchemes_ == null) cache_codingSchemes_ = Collections.synchronizedMap(new LRUMap(16)); return cache_codingSchemes_; }
From source file:org.LexGrid.LexBIG.Impl.Extensions.GenericExtensions.LexBIGServiceConvenienceMethodsImpl.java
/** * Return the map used to cache hierarchy ID information, which maps from * key (derived from request parameters) to an array of hierarchy IDs * (String[]).//from w ww. java 2s. c o m * <p> * Note: Methods requiring the cache should invoke this method rather than * directly referencing the class variable in order to allow lazy * initialization. */ @LgClientSideSafe protected Map getCache_HIDs() { if (cache_hIDs_ == null) cache_hIDs_ = Collections.synchronizedMap(new LRUMap(128)); return cache_hIDs_; }
From source file:org.LexGrid.LexBIG.Impl.Extensions.GenericExtensions.LexBIGServiceConvenienceMethodsImpl.java
/** * Return the map used to cache hierarchy root information, which maps from * key (derived from request parameters) to a ResolvedConceptReferenceList. * <p>// w w w . j a va 2s . c o m * Note: Methods requiring the cache should invoke this method rather than * directly referencing the class variable in order to allow lazy * initialization. */ @LgClientSideSafe protected Map getCache_HRoots() { if (cache_hRoots_ == null) cache_hRoots_ = Collections.synchronizedMap(new LRUMap(128)); return cache_hRoots_; }