List of usage examples for java.util Collections synchronizedList
public static <T> List<T> synchronizedList(List<T> list)
From source file:org.openhealthtools.openexchange.actorconfig.ActorConfigurationLoader.java
/** * Loads the supplied configuration file. If the argument is * 'true', then create an initialize all of the IHE actors in the file. If the * argument is 'false', save the actors away for GUI access. * * @param file the configuration file/*from w w w. j a v a2 s . c om*/ * @param autoInstallActors If 'true' create the actors in this configuration, else store them up * @param reset whether to reset actorDefinitions or resetAllBrokers * @param logContext the LogContext to be used for audit logging * @return 'true' if the configuration was loaded successfully * @throws IheConfigurationException When there is a problem with the configuration file */ private boolean loadConfiguration(File file, boolean autoInstallActors, boolean reset, ILogContext logContext) throws IheConfigurationException { LibraryConfig libConfig = LibraryConfig.getInstance(); libConfig.setLogContext(logContext); boolean okay = true; // Reset the list of loaded actors if (reset) actorDefinitions = Collections.synchronizedList(new ArrayList<IActorDescription>()); // If we are auto-installing, reset all the brokers if (autoInstallActors && reset) destroyAllActors(); // Make sure we have a configuration file File configFile = file; if (configFile == null) { throw new IheConfigurationException("No file given to configuration loader"); } else if (!configFile.exists()) { throw new IheConfigurationException( "The configuration file \"" + configFile.getAbsolutePath() + "\" does not exist"); } // Create a builder factory and a builder, and get the configuration document. DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setValidating(false); Document configuration = null; try { configuration = factory.newDocumentBuilder().parse(configFile); } catch (SAXException e) { // An XML exception throw new IheConfigurationException( "Invalid XML in configuration file '" + configFile.getAbsolutePath() + "'", e); } catch (IOException e) { // A problem reading the file throw new IheConfigurationException( "Cannot read configuration file '" + configFile.getAbsolutePath() + "'", e); } catch (ParserConfigurationException e) { // No XML implementation throw new IheConfigurationException( "No XML implementation to process configuration file '" + configFile.getAbsolutePath() + "'", e); } // Get the list of XML elements in the configuration file NodeList configurationElements = configuration.getDocumentElement().getChildNodes(); // Load all the connection definitions first for (int elementIndex = 0; elementIndex < configurationElements.getLength(); elementIndex++) { Node element = configurationElements.item(elementIndex); if (element instanceof Element) { // See what type of element it is String name = element.getNodeName(); if (name.equalsIgnoreCase(CONNECTIONFILE)) { // An included connection file, load it if (!processConnectionFile((Element) element, configFile)) okay = false; } else if (name.equalsIgnoreCase(SECURECONNECTION) || name.equalsIgnoreCase(STANDARDCONNECTION)) { // An included connection, load it if (!ConnectionFactory.loadConnectionDescriptionsFromXmlNode(element, configFile)) { ActorDescriptionLoader.throwIheConfigurationException( "Error loading configuration file '" + configFile.getAbsolutePath() + "'", configFile); okay = false; } } } } // If all the connection files loaded okay, define the various actors if (okay) { for (int elementIndex = 0; elementIndex < configurationElements.getLength(); elementIndex++) { Node element = configurationElements.item(elementIndex); if (element instanceof Element) { // See what type of element it is String name = element.getNodeName(); if (name.equalsIgnoreCase(ACTORFILE)) { if (!processActorFile((Element) element, false /*autoInstallActors*/, configFile, false /*reset*/)) okay = false; } else if (name.equalsIgnoreCase(ACTOR)) { // An IHE actor definition if (!processActorDefinition((Element) element, configFile)) okay = false; } } } } if (autoInstallActors) { Collection config = actorDefinitions; resetConfiguration(config); } // Done initialized = true; return true; }
From source file:org.beepcore.beep.profile.tls.jsse.TLSProfileJSSE.java
/** * TLS provides encryption and optionally authentication for a session * by opening a channel with this profile. The default action is to * set up for a channel with encryption only, no authentication. To * mandate authentication, set the configuration via <code>init</code>.<p> * @see org.beepcore.beep.profile.Profile *///www . j a va 2 s . c o m public TLSProfileJSSE() { try { SSLContext ctx = SSLContext.getInstance("TLS"); ctx.init(null, null, null); socketFactory = (SSLSocketFactory) ctx.getSocketFactory(); } catch (NoSuchAlgorithmException e) { log.error(e.getMessage()); } catch (KeyManagementException e) { log.error(e.getMessage()); } if (handshakeListeners == null) { handshakeListeners = Collections.synchronizedList(new LinkedList()); } }
From source file:org.apache.kylin.storage.hbase.util.DeployCoprocessorCLI.java
private static List<String> resetCoprocessorOnHTables(final Admin hbaseAdmin, final Path hdfsCoprocessorJar, List<String> tableNames) throws IOException { List<String> processedTables = Collections.synchronizedList(new ArrayList<String>()); ExecutorService coprocessorPool = Executors .newFixedThreadPool(Runtime.getRuntime().availableProcessors() * 2); CountDownLatch countDownLatch = new CountDownLatch(tableNames.size()); for (final String tableName : tableNames) { coprocessorPool.execute(new ResetCoprocessorWorker(countDownLatch, hbaseAdmin, hdfsCoprocessorJar, tableName, processedTables)); }//from ww w . ja v a 2s. co m try { countDownLatch.await(); } catch (InterruptedException e) { logger.error("reset coprocessor failed: ", e); } coprocessorPool.shutdown(); return processedTables; }
From source file:org.opencds.service.drools.v55.DroolsAdapter.java
/** * big picture pseudo code for following method: * * for this requestedKmId { //from w w w.java2 s. c o m * getResponse: * create Drools session * load KM into session * load globals into session * load data from allFactLists into session * KBase.execute (calls Drools) * unload result from KM to outputString * } * * This means that we are considering the OMG-CDSS concept of KnowledgeModule equivalent to * the Drools concept of KnowledgeBase. */ public String getOneResponse(DSSRequestKMItem dssRequestKMItem) throws InvalidDriDataFormatExceptionFault, RequiredDataNotProvidedExceptionFault, EvaluationExceptionFault, InvalidTimeZoneOffsetExceptionFault, UnrecognizedScopedEntityExceptionFault, UnrecognizedLanguageExceptionFault, UnsupportedLanguageExceptionFault, DSSRuntimeExceptionFault { String requestedKmId = dssRequestKMItem.getRequestedKmId(); String requestedKmPrimaryProcessName = SimpleKnowledgeRepository .getRequiredKMPrimaryProcessNameForKMId(requestedKmId); TimingDataKM timingDataKM = dssRequestKMItem.getKmTimingData(); @SuppressWarnings("unchecked") JAXBElement<org.opencds.vmr.v1_0.schema.CDSInput> cdsInput = (JAXBElement<org.opencds.vmr.v1_0.schema.CDSInput>) dssRequestKMItem .getDssRequestDataItem().getCdsInput(); DSSRequestDataItem dssRequestDataItem = dssRequestKMItem.getDssRequestDataItem(); String externalFactModelSSId = dssRequestDataItem.getExternalFactModelSSId(); Date evalTime = dssRequestDataItem.getEvalTime(); String clientLanguage = dssRequestDataItem.getClientLanguage(); String clientTimeZoneOffset = dssRequestDataItem.getClientTimeZoneOffset(); String interactionId = dssRequestDataItem.getInteractionId(); log.debug("II: " + interactionId + " KMId: " + requestedKmId + " (" + requestedKmPrimaryProcessName + ")" + ", SSId: " + externalFactModelSSId + ", evalTime: " + evalTime + ", clTimeZone: " + clientTimeZoneOffset + ", clLang: " + clientLanguage); /** * Load fact map from specific externalFactModels, as specified in externalFactModel SSId... * * Every separately identified SSId, by definition, specifies separate input and output mappings. * Input mappings are used here, and then output mappings are used following the session.execute. */ Map<String, List<?>> allFactLists = Collections.synchronizedMap(new WeakHashMap<String, List<?>>()); /** * allFactLists are updated in place by the following call, including both facts and concepts... * ================================================================== */ String focalPersonId = BuildCDSInputFactLists.buildFactLists(cdsInput, evalTime, allFactLists, timingDataKM); /** * ================================================================== */ dssRequestKMItem.setFocalPersonId(focalPersonId); log.debug("II: " + interactionId + " KMId: " + requestedKmId + " built fact/concept lists for " + focalPersonId); timingDataKM.setFinishBuildConceptListsTime(new AtomicLong(System.nanoTime())); /** * Get the KMs and Load them into a stateless session * * Currently, assumption is made that each requested knowledge module will be run separately * (i.e., as part of a separate distinct knowledge base) * */ File drlFile = null; File bpmnFile = null; File pkgFile = null; KnowledgeBase knowledgeBase = (KnowledgeBase) SimpleKnowledgeRepository .getKnowledgeBaseCache(requestedKmId); if (knowledgeBase != null) { log.debug("II: " + interactionId + " KMId: " + requestedKmId + " knowledgeBase from cache"); } else { knowledgeBase = KnowledgeBaseFactory.newKnowledgeBase(); KnowledgeBuilder knowledgeBuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); drlFile = SimpleKnowledgeRepository.getResourceAsFileWithoutException("knowledgeModules", requestedKmId + ".drl"); bpmnFile = SimpleKnowledgeRepository.getResourceAsFileWithoutException("knowledgeModules", requestedKmId + ".bpmn"); pkgFile = SimpleKnowledgeRepository.getResourceAsFileWithoutException("knowledgeModules", requestedKmId + ".pkg"); if (drlFile != null) knowledgeBuilder.add(ResourceFactory.newFileResource(drlFile), ResourceType.DRL); if (bpmnFile != null) knowledgeBuilder.add(ResourceFactory.newFileResource(bpmnFile), ResourceType.BPMN2); if (pkgFile != null) knowledgeBuilder.add(ResourceFactory.newFileResource(pkgFile), ResourceType.PKG); if (knowledgeBuilder.hasErrors()) { throw new DSSRuntimeExceptionFault("KnowledgeBuilder had errors on build of: '" + requestedKmId + "', " + knowledgeBuilder.getErrors().toString()); } if (knowledgeBuilder.getKnowledgePackages().size() == 0) { throw new DSSRuntimeExceptionFault( "KnowledgeBuilder did not find any VALID '.drl', '.bpmn' or '.pkg' files for: '" + requestedKmId + "', " + knowledgeBuilder.getErrors().toString()); } knowledgeBase.addKnowledgePackages(knowledgeBuilder.getKnowledgePackages()); SimpleKnowledgeRepository.putKnowledgeBaseCache(requestedKmId, knowledgeBase); log.debug("II: " + interactionId + " KMId: " + requestedKmId + " knowledgeBase built"); } dssRequestKMItem.getKmTimingData().setFinishInsertKnowledgeTime(new AtomicLong(System.nanoTime())); StatelessKnowledgeSession statelessKnowledgeSession = knowledgeBase.newStatelessKnowledgeSession(); /** * to create a new Drools Working Memory Logger for in depth Drools debugging - Use either the InMemoryLogger * to record logs on all input, or use the FileLogger for debugging of one input at a time in Drools and JBPM */ // WorkingMemoryInMemoryLogger memoryLogger = new WorkingMemoryInMemoryLogger (statelessKnowledgeSession); // WorkingMemoryFileLogger fileLogger = new WorkingMemoryFileLogger (statelessKnowledgeSession); // // If using the FileLogger, Set the log file that we will be using to log Working Memory (aka session) // fileLogger.setFileName("C:/opencds-logs/OpenCDS-Drools-event-log"); //TODO: make the above choice based on configuration settings dssRequestKMItem.getKmTimingData().setFinishStartKnowledgeSessionTime(new AtomicLong(System.nanoTime())); /** * Load the Globals and Fact lists: evalTime, language, timezoneOffset * */ @SuppressWarnings("rawtypes") List<Command> cmds = Collections.synchronizedList(new ArrayList<Command>()); /** * Load the Globals: evalTime, language, timezoneOffset, focalPersonId, assertions, namedObjects * */ cmds.add(CommandFactory.newSetGlobal("evalTime", evalTime)); cmds.add(CommandFactory.newSetGlobal("clientLanguage", clientLanguage)); cmds.add(CommandFactory.newSetGlobal("clientTimeZoneOffset", clientTimeZoneOffset)); cmds.add(CommandFactory.newSetGlobal("focalPersonId", dssRequestKMItem.getFocalPersonId())); //following global used to store flags for inter-task communication in a JBPM Process java.util.Set<String> assertions = new java.util.HashSet<String>(); cmds.add(CommandFactory.newSetGlobal("assertions", assertions)); //following global used to return facts added by rules, such as new observationResults java.util.Map<String, Object> namedObjects = new java.util.HashMap<String, Object>(); cmds.add(CommandFactory.newSetGlobal("namedObjects", namedObjects)); dssRequestKMItem.getKmTimingData().setFinishLoadGlobalsTime(new AtomicLong(System.nanoTime())); /** * Load the FactLists into Commands: Only ones that are not empty... * */ //does this whole thing needs to be made concurrent safe ?? Will this do it?? synchronized (allFactLists) { for (String oneName : allFactLists.keySet()) { @SuppressWarnings("unchecked") List<Object> oneFactList = (List<Object>) allFactLists.get(oneName); String oneTypeName = ""; for (Object oneFact : (List<Object>) oneFactList) { oneTypeName = oneFact.getClass().getSimpleName(); } if (oneFactList.size() > 0) { cmds.add(CommandFactory.newInsertElements((List<?>) oneFactList, oneTypeName, true, null)); } else { allFactLists.remove(oneTypeName); } } } dssRequestKMItem.getKmTimingData().setFinishLoadFactListsTime(new AtomicLong(System.nanoTime())); /** * If this is a PKG (for package with process, initiate the configured Primary Process for JBPM. * */ if ((requestedKmPrimaryProcessName != null) && (!"".equals(requestedKmPrimaryProcessName))) { if ("".equals(requestedKmPrimaryProcessName)) { throw new DSSRuntimeExceptionFault("DroolsAdapter found improperly configured KM: " + requestedKmId + ". This KM includes a BPMN file, but does not have a value " + "for 'knowledgeModulePrimaryProcessName' in its configuration."); } cmds.add(CommandFactory.newStartProcess(requestedKmPrimaryProcessName)); log.debug("II: " + interactionId + " KMId: " + requestedKmId + " knowledgeBase Primary Process: " + requestedKmPrimaryProcessName); } dssRequestKMItem.getKmTimingData().setStartInferenceEngine(new AtomicLong(System.nanoTime())); /** * Use Drools to process everything * Added try/catch around stateless session. because Drools has an unhandled exception * when a JBPM Process improperly re-declares a global that is constraining a gateway * and the resultant global is null - des 20120727 ******************************************************************************** */ ExecutionResults results = null; try { results = statelessKnowledgeSession.execute(CommandFactory.newBatchExecution((cmds))); } catch (Exception e) { String err = "OpenCDS call to Drools.execute failed with error: " + e.getMessage(); log.error(err); StackTraceElement elements[] = e.getStackTrace(); for (int i = 0, n = elements.length; i < n; i++) { String detail = elements[i].getClassName() + ":" + elements[i].getMethodName() + ":" + elements[i].getLineNumber(); log.error(detail); err += "\n" + elements[i].getMethodName(); } throw new DSSRuntimeExceptionFault(err); } /** ******************************************************************************** * END Drools * */ dssRequestKMItem.getKmTimingData().setFinishInferenceEngine(new AtomicLong(System.nanoTime())); //grab session logging of whichever type was started... // log.trace(memoryLogger.getEvents()); // fileLogger.writeToDisk(); //update original entries from allFactLists to capture any new or updated elements //** need to look for every possible fact list, because rules may have created new ones... //NOTE that results contains the original objects passed in via CMD structure, with any //changes introduced by rules. Map<String, List<?>> resultFactLists = Collections.synchronizedMap(new WeakHashMap<String, List<?>>()); synchronized (results) { Collection<String> allResultNames = results.getIdentifiers(); //includes concepts but not globals? for (String oneName : allResultNames) { if (!("evalTime".equals(oneName)) && !("clientLanguage".equals(oneName)) && !("clientTimeZoneOffset".equals(oneName))) { // ignore these submitted globals, they should not have been changed by rules, and look at everything else Object oneList = results.getValue(oneName); resultFactLists.put(oneName, (List<?>) oneList); } } } /** * now process the returned namedObjects and add them to the resultFactLists * */ synchronized (namedObjects) { for (String key : namedObjects.keySet()) { if (namedObjects.get(key) != null) { Object oneNamedObject = namedObjects.get(key); // String className = oneNamedObject.getClass().getSimpleName(); @SuppressWarnings("unchecked") List<Object> oneList = (List<Object>) resultFactLists .get(oneNamedObject.getClass().getSimpleName()); if (oneList == null) { oneList = new ArrayList<Object>(); oneList.add(oneNamedObject); } else { oneList.add(oneNamedObject); } resultFactLists.put(oneNamedObject.getClass().getSimpleName(), oneList); } } } /** * Retrieve the Results for this requested KM and stack them in the DSS fkmResponse * NOTE: Each additional requested KM will have a separate output payload */ dssRequestKMItem.getKmTimingData().setFinishInferenceEngineAdapterTime(new AtomicLong(System.nanoTime())); log.debug( "II: " + interactionId + " KMId: " + requestedKmId + " begin marshalling results to external VMR "); //FIXME probably need to create static final string to identify output SSID, probably always as VMR // String payloadCreatorName = SimpleKnowledgeRepository.getRequiredPayloadCreatorForSSID(externalFactModelSSId); IOutboundPayloadProcessor payloadCreator = (IOutboundPayloadProcessor) SimpleKnowledgeRepository .getPayloadCreatorInstanceForClassNameCache( SimpleKnowledgeRepository.getRequiredPayloadCreatorForSSID(externalFactModelSSId)); /** * following is normally instantiated as MarshalVMR2VMRSchemaPayload.getInstance().mappingOutbound( resultFactLists, dssRequestKMItem ); * */ String outputString = payloadCreator.mappingOutbound(resultFactLists, dssRequestKMItem); log.trace("II: " + interactionId + " KMId: " + requestedKmId + " finished marshalling results to external VMR, " + outputString.length() + " chars."); /** * clear out maps and arrays * */ // BuildCDSInputFactLists.clearAllFactLists(allFactLists); // synchronized (allFactLists) { log.debug("clearAllFactLists"); // for (String eachKey : ((Map<String, List<?>>)allFactLists).keySet()) { // if (allFactLists.get(eachKey) != null) { // List<?> eachList = allFactLists.get(eachKey); // eachList.clear(); // allFactLists.remove(eachKey); // } // } // } // long usedMemory4 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // // this is not needed, but it will make it easier to see the leak // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter before clear: " + usedMemory4 / 1000 + "KB"); allFactLists.clear(); // long usedMemory5 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after allFactLists.clear(): " + usedMemory5 / 1000 + "KB, diff = " + (usedMemory5 - usedMemory4) / 1000 + "KB"); allFactLists = null; // long usedMemory6 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after allFactLists = null: " + usedMemory6 / 1000 + "KB, diff = " + (usedMemory6 - usedMemory5) / 1000 + "KB"); cmds.clear(); // long usedMemory7 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after cmds.clear(): " + usedMemory7 / 1000 + "KB, diff = " + (usedMemory7 - usedMemory6) / 1000 + "KB"); cmds = null; // long usedMemory8 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after cmds = null: " + usedMemory8 / 1000 + "KB, diff = " + (usedMemory8 - usedMemory7) / 1000 + "KB"); assertions.clear(); // long usedMemory9 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after assertions.clear(): " + usedMemory9 / 1000 + "KB, diff = " + (usedMemory9 - usedMemory8) / 1000 + "KB"); assertions = null; // long usedMemory10 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after assertions = null;: " + usedMemory10 / 1000 + "KB, diff = " + (usedMemory10 - usedMemory9) / 1000 + "KB"); namedObjects.clear(); // long usedMemory11 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after namedObjects.clear(): " + usedMemory11 / 1000 + "KB, diff = " + (usedMemory11 - usedMemory10) / 1000 + "KB"); namedObjects = null; // long usedMemory12 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after namedObjects = null: " + usedMemory12 / 1000 + "KB, diff = " + (usedMemory12 - usedMemory11) / 1000 + "KB"); for (String oneId : results.getIdentifiers()) { //results.getFactHandle(oneId) results.getIdentifiers().remove(results.getValue(oneId)); } results = null; // long usedMemory13 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after results = null: " + usedMemory13 / 1000 + "KB, diff = " + (usedMemory13 - usedMemory12) / 1000 + "KB"); resultFactLists.values().clear(); resultFactLists.clear(); // long usedMemory14 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after resultFactLists.clear(): " + usedMemory14 / 1000 + "KB, diff = " + (usedMemory14 - usedMemory13) / 1000 + "KB"); resultFactLists = null; // long usedMemory15 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after resultFactLists = null: " + usedMemory15 / 1000 + "KB, diff = " + (usedMemory15 - usedMemory14) / 1000 + "KB"); // dssRequestKMItem = null; // statelessKnowledgeSession = null; log.debug("II: " + interactionId + " KMId: " + requestedKmId + " completed Drools inferencing engine"); return outputString; }
From source file:com.sonyericsson.hudson.plugins.gerrit.trigger.playback.GerritMissedEventsPlaybackManager.java
/** * Get events for a given lower bound date. * @param lowerDate lower bound for which to request missed events. * @return collection of gerrit events./*from www . j a va 2s .co m*/ * @throws IOException if HTTP errors occur */ protected List<GerritTriggeredEvent> getEventsFromDateRange(Date lowerDate) throws IOException { GerritServer server = PluginImpl.getServer_(serverName); if (server == null) { logger.error("Server for {} could not be found.", serverName); return Collections.synchronizedList(new ArrayList<GerritTriggeredEvent>()); } IGerritHudsonTriggerConfig config = server.getConfig(); String events = getEventsFromEventsLogPlugin(config, buildEventsLogURL(config, lowerDate)); return createEventsFromString(events); }
From source file:org.tinymediamanager.core.movie.MovieList.java
void loadMovieSetsFromDatabase(MVMap<UUID, String> movieSetMap, ObjectMapper objectMapper) { // load movie sets movieSetList = ObservableCollections .observableList(Collections.synchronizedList(new ArrayList<MovieSet>())); ObjectReader movieSetObjectReader = objectMapper.readerFor(MovieSet.class); for (UUID uuid : movieSetMap.keyList()) { try {//from ww w . j av a 2 s . c o m MovieSet movieSet = movieSetObjectReader.readValue(movieSetMap.get(uuid)); movieSet.setDbId(uuid); // for performance reasons we add movies sets directly movieSetList.add(movieSet); } catch (Exception e) { LOGGER.warn("problem decoding movie set json string: ", e); } } LOGGER.info("found " + movieSetList.size() + " movieSets in database"); }
From source file:eu.eidas.auth.commons.PersonalAttribute.java
/** * Setter for the list of values./*from ww w . ja v a 2 s.c om*/ * * @param attrValue The personal attribute value. */ public synchronized void setValue(@Nullable List<String> attrValue) { // no defensive copy needed when there is no reference update if (value == attrValue) { return; } if (null == attrValue || attrValue.isEmpty()) { value.clear(); return; } ArrayList<String> defensiveCopy = new ArrayList<String>(); defensiveCopy.addAll(attrValue); value = Collections.synchronizedList(defensiveCopy); }
From source file:com.ebay.erl.mobius.core.collection.BigTupleList.java
private File newLocalFile() throws IOException { // creating a new file. File newFile = new File(this.workOutput, System.currentTimeMillis() + ".tuples"); try {/*from www .j a v a2 s.c o m*/ while (newFile.exists()) { Thread.sleep(10); newFile = new File(this.workOutput, System.currentTimeMillis() + ".tuples"); } } catch (InterruptedException e) { throw new RuntimeException("Cannot create new tempory file.", e); } // push the new file into buffer to remember it and return LOGGER.debug(Thread.currentThread().toString() + " BID[" + this._ID + "]" + " new local file:" + newFile.getAbsolutePath()); if (this.buffer_on_disk == null) this.buffer_on_disk = Collections.synchronizedList(new LinkedList<File>()); this.buffer_on_disk.add(newFile); return newFile; }
From source file:com.sonyericsson.hudson.plugins.gerrit.trigger.playback.GerritMissedEventsPlaybackManager.java
/** * Takes a string of json events and creates a collection. * @param eventsString Events in json in a string. * @return collection of events.//from w ww . ja v a 2 s .c o m */ private List<GerritTriggeredEvent> createEventsFromString(String eventsString) { List<GerritTriggeredEvent> events = Collections.synchronizedList(new ArrayList<GerritTriggeredEvent>()); Scanner scanner = new Scanner(eventsString); while (scanner.hasNextLine()) { String line = scanner.nextLine(); logger.debug("found line: {}", line); JSONObject jsonObject = null; try { jsonObject = GerritJsonEventFactory.getJsonObjectIfInterestingAndUsable(line); if (jsonObject == null) { continue; } } catch (Exception ex) { logger.warn("Unanticipated error when creating DTO representation of JSON string.", ex); continue; } GerritEvent evt = GerritJsonEventFactory.getEvent(jsonObject); if (evt instanceof GerritTriggeredEvent) { Provider provider = new Provider(); provider.setName(serverName); ((GerritTriggeredEvent) evt).setProvider(provider); events.add((GerritTriggeredEvent) evt); } } scanner.close(); return events; }
From source file:com.gargoylesoftware.htmlunit.WebClientWaitForBackgroundJobsTest.java
/** * Test the case where a job is being executed at the time where waitForBackgroundJavaScriptStartingBefore * and where this jobs schedules a new job after call to waitForBackgroundJavaScriptStartingBefore, * ./*from w w w . j a v a2s. c o m*/ * @throws Exception if the test fails */ @Test @Tries(3) public void newJobStartedAfterWait() throws Exception { final String html = "<html>\n" + "<head>\n" + " <title>test</title>\n" + " <script>\n" + " var request;\n" + " function onReadyStateChange() {\n" + " if (request.readyState == 4) {\n" + " alert('xhr onchange');\n" + " setTimeout(doWork1, 200);\n" + " }\n" + " }\n" + " function test() {\n" + " request = " + XHRInstantiation_ + ";\n" + " request.open('GET', 'wait', true);\n" + " request.onreadystatechange = onReadyStateChange;\n" + " // waitForBackgroundJavaScriptStartingBefore should be called when JS execution is in send()\n" + " request.send('');\n" + " }\n" + " function doWork1() {\n" + " alert('work1');\n" + " }\n" + " </script>\n" + "</head>\n" + "<body onload='test()'>\n" + "</body>\n" + "</html>"; final ThreadSynchronizer threadSynchronizer = new ThreadSynchronizer(); final MockWebConnection webConnection = new MockWebConnection() { @Override public WebResponse getResponse(final WebRequest request) throws IOException { if (request.getUrl().toExternalForm().endsWith("/wait")) { threadSynchronizer.waitForState("just before waitForBackgroundJavaScriptStartingBefore"); threadSynchronizer.sleep(400); // main thread need to be able to process next instruction } return super.getResponse(request); } }; webConnection.setResponse(URL_FIRST, html); webConnection.setDefaultResponse(""); final WebClient client = getWebClient(); client.setWebConnection(webConnection); final List<String> collectedAlerts = Collections.synchronizedList(new ArrayList<String>()); client.setAlertHandler(new CollectingAlertHandler(collectedAlerts)); final HtmlPage page = client.getPage(URL_FIRST); final JavaScriptJobManager jobManager = page.getEnclosingWindow().getJobManager(); assertNotNull(jobManager); assertEquals(1, jobManager.getJobCount()); startTimedTest(); threadSynchronizer.setState("just before waitForBackgroundJavaScriptStartingBefore"); assertEquals(0, client.waitForBackgroundJavaScriptStartingBefore(20_000)); assertMaxTestRunTime(1000); assertEquals(0, jobManager.getJobCount()); final String[] expectedAlerts = { "xhr onchange", "work1" }; assertEquals(expectedAlerts, collectedAlerts); }