List of usage examples for java.util Collections synchronizedList
public static <T> List<T> synchronizedList(List<T> list)
From source file:com.emc.ecs.sync.CasMigrationTest.java
protected List<String> createTestClips(FPPool pool, int maxBlobSize, int thisMany, Writer summaryWriter) throws Exception { ExecutorService service = Executors.newFixedThreadPool(CAS_THREADS); System.out.print("Creating clips"); List<String> clipIds = Collections.synchronizedList(new ArrayList<String>()); List<String> summaries = Collections.synchronizedList(new ArrayList<String>()); for (int clipIdx = 0; clipIdx < thisMany; clipIdx++) { service.submit(new ClipWriter(pool, clipIds, maxBlobSize, summaries)); }//ww w .j av a 2 s . c o m service.shutdown(); service.awaitTermination(CAS_SETUP_WAIT_MINUTES, TimeUnit.MINUTES); service.shutdownNow(); Collections.sort(summaries); for (String summary : summaries) { summaryWriter.append(summary); } System.out.println(); return clipIds; }
From source file:org.opencds.service.drools.v54.DroolsAdapter.java
/** * big picture pseudo code for following method: * * for this requestedKmId { // ww w. j a va 2s . c om * getResponse: * create Drools session * load KM into session * load globals into session * load data from allFactLists into session * KBase.execute (calls Drools) * unload result from KM to outputString * } * * This means that we are considering the OMG-CDSS concept of KnowledgeModule equivalent to * the Drools concept of KnowledgeBase. */ public String getOneResponse(DSSRequestKMItem dssRequestKMItem) throws InvalidDriDataFormatExceptionFault, RequiredDataNotProvidedExceptionFault, EvaluationExceptionFault, InvalidTimeZoneOffsetExceptionFault, UnrecognizedScopedEntityExceptionFault, UnrecognizedLanguageExceptionFault, UnsupportedLanguageExceptionFault, DSSRuntimeExceptionFault { // SimpleKnowledgeRepository simpleKR = SimpleKnowledgeRepository.getInstance(); String requestedKmId = dssRequestKMItem.getRequestedKmId(); String requestedKmPrimaryProcessName = SimpleKnowledgeRepository .getRequiredKMPrimaryProcessNameForKMId(requestedKmId); TimingDataKM timingDataKM = dssRequestKMItem.getKmTimingData(); @SuppressWarnings("unchecked") JAXBElement<org.opencds.vmr.v1_0.schema.CDSInput> cdsInput = (JAXBElement<org.opencds.vmr.v1_0.schema.CDSInput>) dssRequestKMItem .getDssRequestDataItem().getCdsInput(); DSSRequestDataItem dssRequestDataItem = dssRequestKMItem.getDssRequestDataItem(); String externalFactModelSSId = dssRequestDataItem.getExternalFactModelSSId(); Date evalTime = dssRequestDataItem.getEvalTime(); String clientLanguage = dssRequestDataItem.getClientLanguage(); String clientTimeZoneOffset = dssRequestDataItem.getClientTimeZoneOffset(); String interactionId = dssRequestDataItem.getInteractionId(); log.debug("II: " + interactionId + " KMId: " + requestedKmId + " (" + requestedKmPrimaryProcessName + ")" + ", SSId: " + externalFactModelSSId + ", evalTime: " + evalTime + ", clTimeZone: " + clientTimeZoneOffset + ", clLang: " + clientLanguage); /** * Load fact map from specific externalFactModels, as specified in externalFactModel SSId... * * Every separately identified SSId, by definition, specifies separate input and output mappings. * Input mappings are used here, and then output mappings are used following the session.execute. */ // Map<String, List<?>> allFactLists = Collections.synchronizedMap(new WeakHashMap<String, List<?>>()); Map<String, List<?>> allFactLists = new HashMap<String, List<?>>(); //allFactLists are updated in place by the following call, including both facts and concepts... //================================================================== // long usedMemory = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // // this is not needed, but it will make it easier to see the leak // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory before buildFactLists: " + usedMemory / 1000 + "KB"); String focalPersonId = BuildCDSInputFactLists.buildFactLists(cdsInput, evalTime, allFactLists, timingDataKM); //String focalPersonId = "123^1.2.3"; // long usedMemory2 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // // this is not needed, but it will make it easier to see the leak // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory after buildFactLists: " + usedMemory2 / 1000 + "KB, diff = " + (usedMemory2 - usedMemory) / 1000 + "KB"); //================================================================== dssRequestKMItem.setFocalPersonId(focalPersonId); log.debug("II: " + interactionId + " KMId: " + requestedKmId + " built fact/concept lists for " + focalPersonId); timingDataKM.setFinishBuildConceptListsTime(new AtomicLong(System.nanoTime())); /** * Get the KMs and Load them into a stateless session * * Currently, assumption is made that each requested knowledge module will be run separately * (i.e., as part of a separate distinct knowledge base) * */ File drlFile = null; File bpmnFile = null; File pkgFile = null; KnowledgeBase knowledgeBase = (KnowledgeBase) SimpleKnowledgeRepository .getKnowledgeBaseCache(requestedKmId); // long usedMemory16 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // // this is not needed, but it will make it easier to see the leak // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory after getKnowledgeBaseCache: " + usedMemory16 / 1000 + "KB, diff = " + (usedMemory16 - usedMemory2) / 1000 + "KB"); // long usedMemory17 = 0; // synchronized (knowledgeBase) { if (knowledgeBase != null) { log.debug("II: " + interactionId + " KMId: " + requestedKmId + " knowledgeBase from cache"); } else { knowledgeBase = KnowledgeBaseFactory.newKnowledgeBase(); KnowledgeBuilder knowledgeBuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); drlFile = SimpleKnowledgeRepository.getResourceAsFileWithoutException("knowledgeModules", requestedKmId + ".drl"); bpmnFile = SimpleKnowledgeRepository.getResourceAsFileWithoutException("knowledgeModules", requestedKmId + ".bpmn"); pkgFile = SimpleKnowledgeRepository.getResourceAsFileWithoutException("knowledgeModules", requestedKmId + ".pkg"); if (drlFile != null) knowledgeBuilder.add(ResourceFactory.newFileResource(drlFile), ResourceType.DRL); if (bpmnFile != null) knowledgeBuilder.add(ResourceFactory.newFileResource(bpmnFile), ResourceType.BPMN2); if (pkgFile != null) knowledgeBuilder.add(ResourceFactory.newFileResource(pkgFile), ResourceType.PKG); if (knowledgeBuilder.hasErrors()) { throw new DSSRuntimeExceptionFault("KnowledgeBuilder had errors on build of: '" + requestedKmId + "', " + knowledgeBuilder.getErrors().toString()); } if (knowledgeBuilder.getKnowledgePackages().size() == 0) { throw new DSSRuntimeExceptionFault( "KnowledgeBuilder did not find any VALID '.drl', '.bpmn' or '.pkg' files for: '" + requestedKmId + "', " + knowledgeBuilder.getErrors().toString()); } knowledgeBase.addKnowledgePackages(knowledgeBuilder.getKnowledgePackages()); SimpleKnowledgeRepository.putKnowledgeBaseCache(requestedKmId, knowledgeBase); log.debug("II: " + interactionId + " KMId: " + requestedKmId + " knowledgeBase built"); // usedMemory17 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // // this is not needed, but it will make it easier to see the leak // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory after buildKnowledgeBase: " + usedMemory17 / 1000 + "KB, diff = " + (usedMemory17 - usedMemory16) / 1000 + "KB"); } // } dssRequestKMItem.getKmTimingData().setFinishInsertKnowledgeTime(new AtomicLong(System.nanoTime())); StatelessKnowledgeSession statelessKnowledgeSession = knowledgeBase.newStatelessKnowledgeSession(); // long usedMemory18 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // // this is not needed, but it will make it easier to see the leak // System.gc(); // if (usedMemory17 == 0) { //// System.out.println("KMId: " + requestedKmId + " used memory after newStatelessKnowledgeSession: " + usedMemory18 / 1000 + "KB, diff = " + (usedMemory18 - usedMemory16) / 1000 + "KB"); // } else { // System.out.println("KMId: " + requestedKmId + " used memory after newStatelessKnowledgeSession: " + usedMemory18 / 1000 + "KB, diff = " + (usedMemory18 - usedMemory17) / 1000 + "KB"); // } // // to create a new Drools Working Memory Logger for in depth Drools debugging - Use either the InMemory // // to record logs on all input, or use the FileLogger for debugging of one input at a time in Drools and JBPM // WorkingMemoryInMemoryLogger memoryLogger = new WorkingMemoryInMemoryLogger (statelessKnowledgeSession); // WorkingMemoryFileLogger fileLogger = new WorkingMemoryFileLogger (statelessKnowledgeSession); // // If using the FileLogger, Set the log file that we will be using to log Working Memory (aka session) // fileLogger.setFileName("C:/opencds-logs/OpenCDS-Drools-event-log"); //TODO: make the above choice based on configuration settings dssRequestKMItem.getKmTimingData().setFinishStartKnowledgeSessionTime(new AtomicLong(System.nanoTime())); /** * Load the Globals and Fact lists: evalTime, language, timezoneOffset * */ @SuppressWarnings("rawtypes") List<Command> cmds = Collections.synchronizedList(new ArrayList<Command>()); /** * Load the Globals: evalTime, language, timezoneOffset, focalPersonId, assertions, namedObjects * */ cmds.add(CommandFactory.newSetGlobal("evalTime", evalTime)); cmds.add(CommandFactory.newSetGlobal("clientLanguage", clientLanguage)); cmds.add(CommandFactory.newSetGlobal("clientTimeZoneOffset", clientTimeZoneOffset)); cmds.add(CommandFactory.newSetGlobal("focalPersonId", dssRequestKMItem.getFocalPersonId())); //following global used to store flags for inter-task communication in a JBPM Process java.util.Set<String> assertions = new java.util.HashSet<String>(); cmds.add(CommandFactory.newSetGlobal("assertions", assertions)); //following global used to return facts added by rules, such as new observationResults java.util.Map<String, Object> namedObjects = new java.util.HashMap<String, Object>(); cmds.add(CommandFactory.newSetGlobal("namedObjects", namedObjects)); dssRequestKMItem.getKmTimingData().setFinishLoadGlobalsTime(new AtomicLong(System.nanoTime())); // long usedMemory19 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // // this is not needed, but it will make it easier to see the leak // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory after CommandFactory.newSetGlobal: " + usedMemory19 / 1000 + "KB, diff = " + (usedMemory19 - usedMemory18) / 1000 + "KB"); /** * Load the FactLists into Commands: Only ones that are not empty... * */ //does this whole thing needs to be made concurrent safe ?? Will this do it?? synchronized (allFactLists) { for (String oneName : allFactLists.keySet()) { @SuppressWarnings("unchecked") List<Object> oneFactList = (List<Object>) allFactLists.get(oneName); String oneTypeName = ""; for (Object oneFact : (List<Object>) oneFactList) { oneTypeName = oneFact.getClass().getSimpleName(); } if (oneFactList.size() > 0) { cmds.add(CommandFactory.newInsertElements((List<?>) oneFactList, oneTypeName, true, null)); } else { allFactLists.remove(oneTypeName); } } } // long usedMemory20 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // // this is not needed, but it will make it easier to see the leak // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory after CommandFactory.newInsertElements: " + usedMemory20 / 1000 + "KB, diff = " + (usedMemory20 - usedMemory19) / 1000 + "KB"); dssRequestKMItem.getKmTimingData().setFinishLoadFactListsTime(new AtomicLong(System.nanoTime())); /** * If this is a PKG (for package with process, initiate the configured Primary Process for JBPM. * */ if ((requestedKmPrimaryProcessName != null) && (!"".equals(requestedKmPrimaryProcessName))) { if ("".equals(requestedKmPrimaryProcessName)) { throw new DSSRuntimeExceptionFault("DroolsAdapter found improperly configured KM: " + requestedKmId + ". This KM includes a BPMN file, but does not have a value " + "for 'knowledgeModulePrimaryProcessName' in its configuration."); } cmds.add(CommandFactory.newStartProcess(requestedKmPrimaryProcessName)); log.debug("II: " + interactionId + " KMId: " + requestedKmId + " knowledgeBase Primary Process: " + requestedKmPrimaryProcessName); } // long usedMemory21 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // // this is not needed, but it will make it easier to see the leak // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory after CommandFactory.newInsertPrimaryProcess: " + usedMemory21 / 1000 + "KB, diff = " + (usedMemory21 - usedMemory20) / 1000 + "KB"); dssRequestKMItem.getKmTimingData().setStartInferenceEngine(new AtomicLong(System.nanoTime())); /** * Use Drools to process everything * Added try/catch around stateless session. because Drools has an unhandled exception * when a JBPM Process improperly re-declares a global that is constraining a gateway * and the resultant global is null - des 20120727 ******************************************************************************** */ ExecutionResults results = null; try { results = statelessKnowledgeSession.execute(CommandFactory.newBatchExecution((cmds))); } catch (Exception e) { String err = "OpenCDS call to Drools.execute failed with error: " + e.getMessage(); log.error(err); StackTraceElement elements[] = e.getStackTrace(); for (int i = 0, n = elements.length; i < n; i++) { String detail = elements[i].getClassName() + ":" + elements[i].getMethodName() + ":" + elements[i].getLineNumber(); log.error(detail); err += "\n" + elements[i].getMethodName(); } throw new DSSRuntimeExceptionFault(err); } // long usedMemory22 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // // this is not needed, but it will make it easier to see the leak // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory after statelessKnowledgeSession.execute: " + usedMemory22 / 1000 + "KB, diff = " + (usedMemory22 - usedMemory21) / 1000 + "KB"); /** ******************************************************************************** * END Drools * */ dssRequestKMItem.getKmTimingData().setFinishInferenceEngine(new AtomicLong(System.nanoTime())); //grab session logging of whichever type was started... // log.trace(memoryLogger.getEvents()); // fileLogger.writeToDisk(); //update original entries from allFactLists to capture any new or updated elements //** need to look for every possible fact list, because rules may have created new ones... //NOTE that results contains the original objects passed in via CMD structure, with any //changes introduced by rules. Map<String, List<?>> resultFactLists = Collections.synchronizedMap(new WeakHashMap<String, List<?>>()); synchronized (results) { Collection<String> allResultNames = results.getIdentifiers(); //includes concepts but not globals? for (String oneName : allResultNames) { if (!("evalTime".equals(oneName)) && !("clientLanguage".equals(oneName)) && !("clientTimeZoneOffset".equals(oneName))) { // ignore these submitted globals, they should not have been changed by rules, and look at everything else Object oneList = results.getValue(oneName); resultFactLists.put(oneName, (List<?>) oneList); } } } // long usedMemory23 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // // this is not needed, but it will make it easier to see the leak // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory after resultFactLists: " + usedMemory23 / 1000 + "KB, diff = " + (usedMemory23 - usedMemory22) / 1000 + "KB"); /* * now process the returned namedObjects and add them to the resultFactLists * */ synchronized (namedObjects) { for (String key : namedObjects.keySet()) { if (namedObjects.get(key) != null) { Object oneNamedObject = namedObjects.get(key); // String className = oneNamedObject.getClass().getSimpleName(); @SuppressWarnings("unchecked") List<Object> oneList = (List<Object>) resultFactLists .get(oneNamedObject.getClass().getSimpleName()); if (oneList == null) { oneList = new ArrayList<Object>(); oneList.add(oneNamedObject); } else { oneList.add(oneNamedObject); } resultFactLists.put(oneNamedObject.getClass().getSimpleName(), oneList); } } } // long usedMemory24 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // // this is not needed, but it will make it easier to see the leak // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory after adding namedObjects: " + usedMemory24 / 1000 + "KB, diff = " + (usedMemory24 - usedMemory23) / 1000 + "KB"); /** * Retrieve the Results for this requested KM and stack them in the DSS fkmResponse * NOTE: Each additional requested KM will have a separate output payload */ dssRequestKMItem.getKmTimingData().setFinishInferenceEngineAdapterTime(new AtomicLong(System.nanoTime())); log.debug( "II: " + interactionId + " KMId: " + requestedKmId + " begin marshalling results to external VMR "); //FIXME probably need to create static final string to identify output SSID, probably always as VMR // String payloadCreatorName = SimpleKnowledgeRepository.getRequiredPayloadCreatorForSSID(externalFactModelSSId); IOutboundPayloadProcessor payloadCreator = (IOutboundPayloadProcessor) SimpleKnowledgeRepository .getPayloadCreatorInstanceForClassNameCache( SimpleKnowledgeRepository.getRequiredPayloadCreatorForSSID(externalFactModelSSId)); // long usedMemory25 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // // this is not needed, but it will make it easier to see the leak // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory after getting payloadCreatorInstance: " + usedMemory25 / 1000 + "KB, diff = " + (usedMemory25 - usedMemory24) / 1000 + "KB"); /* * following is normally instantiated as MarshalVMR2VMRSchemaPayload.getInstance().mappingOutbound( resultFactLists, dssRequestKMItem ); * */ String outputString = payloadCreator.mappingOutbound(resultFactLists, dssRequestKMItem); // long usedMemory26 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // // this is not needed, but it will make it easier to see the leak // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory after creating outputString: " + usedMemory26 / 1000 + "KB, diff = " + (usedMemory26 - usedMemory25) / 1000 + "KB"); log.trace("II: " + interactionId + " KMId: " + requestedKmId + " finished marshalling results to external VMR, " + outputString.length() + " chars."); /* * clear out maps and arrays * */ // BuildCDSInputFactLists.clearAllFactLists(allFactLists); // synchronized (allFactLists) { log.debug("clearAllFactLists"); // for (String eachKey : ((Map<String, List<?>>)allFactLists).keySet()) { // if (allFactLists.get(eachKey) != null) { // List<?> eachList = allFactLists.get(eachKey); // eachList.clear(); // allFactLists.remove(eachKey); // } // } // } // long usedMemory4 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // // this is not needed, but it will make it easier to see the leak // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter before clear: " + usedMemory4 / 1000 + "KB"); allFactLists.clear(); // long usedMemory5 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after allFactLists.clear(): " + usedMemory5 / 1000 + "KB, diff = " + (usedMemory5 - usedMemory4) / 1000 + "KB"); allFactLists = null; // long usedMemory6 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after allFactLists = null: " + usedMemory6 / 1000 + "KB, diff = " + (usedMemory6 - usedMemory5) / 1000 + "KB"); cmds.clear(); // long usedMemory7 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after cmds.clear(): " + usedMemory7 / 1000 + "KB, diff = " + (usedMemory7 - usedMemory6) / 1000 + "KB"); cmds = null; // long usedMemory8 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after cmds = null: " + usedMemory8 / 1000 + "KB, diff = " + (usedMemory8 - usedMemory7) / 1000 + "KB"); assertions.clear(); // long usedMemory9 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after assertions.clear(): " + usedMemory9 / 1000 + "KB, diff = " + (usedMemory9 - usedMemory8) / 1000 + "KB"); assertions = null; // long usedMemory10 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after assertions = null;: " + usedMemory10 / 1000 + "KB, diff = " + (usedMemory10 - usedMemory9) / 1000 + "KB"); namedObjects.clear(); // long usedMemory11 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after namedObjects.clear(): " + usedMemory11 / 1000 + "KB, diff = " + (usedMemory11 - usedMemory10) / 1000 + "KB"); namedObjects = null; // long usedMemory12 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after namedObjects = null: " + usedMemory12 / 1000 + "KB, diff = " + (usedMemory12 - usedMemory11) / 1000 + "KB"); for (String oneId : results.getIdentifiers()) { //results.getFactHandle(oneId) results.getIdentifiers().remove(results.getValue(oneId)); } results = null; // long usedMemory13 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after results = null: " + usedMemory13 / 1000 + "KB, diff = " + (usedMemory13 - usedMemory12) / 1000 + "KB"); resultFactLists.values().clear(); resultFactLists.clear(); // long usedMemory14 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after resultFactLists.clear(): " + usedMemory14 / 1000 + "KB, diff = " + (usedMemory14 - usedMemory13) / 1000 + "KB"); resultFactLists = null; // long usedMemory15 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after resultFactLists = null: " + usedMemory15 / 1000 + "KB, diff = " + (usedMemory15 - usedMemory14) / 1000 + "KB"); // dssRequestKMItem = null; // statelessKnowledgeSession = null; log.debug("II: " + interactionId + " KMId: " + requestedKmId + " completed Drools inferencing engine"); // long usedMemory3 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after clear: " + usedMemory3 / 1000 + "KB, diff = " + (usedMemory3 - usedMemory4) / 1000 + "KB"); return outputString; // return ""; }
From source file:org.lightjason.agentspeak.action.builtin.TestCActionCollectionList.java
/** * test zip action//from w w w . ja v a 2 s .c o m */ @Test public final void zip() { final List<ITerm> l_return = new ArrayList<>(); new CZip().execute(false, IContext.EMPTYPLAN, IntStream.range(0, 6).boxed().map(CRawTerm::from).collect(Collectors.toList()), l_return); Assert.assertEquals(l_return.size(), 1); Assert.assertEquals(l_return.get(0).<List<?>>raw().size(), 3); Assert.assertEquals(l_return.get(0).<List<AbstractMap.SimpleEntry<?, ?>>>raw().get(0).getKey(), 0); Assert.assertEquals(l_return.get(0).<List<AbstractMap.SimpleEntry<?, ?>>>raw().get(0).getValue(), 3); Assert.assertEquals(l_return.get(0).<List<AbstractMap.SimpleEntry<?, ?>>>raw().get(1).getKey(), 1); Assert.assertEquals(l_return.get(0).<List<AbstractMap.SimpleEntry<?, ?>>>raw().get(1).getValue(), 4); Assert.assertEquals(l_return.get(0).<List<AbstractMap.SimpleEntry<?, ?>>>raw().get(2).getKey(), 2); Assert.assertEquals(l_return.get(0).<List<AbstractMap.SimpleEntry<?, ?>>>raw().get(2).getValue(), 5); new CZip().execute(true, IContext.EMPTYPLAN, Stream.of(1, 2).map(CRawTerm::from).collect(Collectors.toList()), l_return); Assert.assertEquals(l_return.size(), 2); Assert.assertEquals(l_return.get(1).<List<?>>raw().getClass(), Collections.synchronizedList(Collections.emptyList()).getClass()); }
From source file:jp.co.acroquest.endosnipe.perfdoctor.rule.RuleManager.java
/** * ???xml???/*from w w w .ja v a2 s . c om*/ */ @SuppressWarnings("deprecation") public synchronized void commit() { // ?ID?? RulePreferenceUtil.saveActiveRuleSetId(this.activeRuleSetId_); // ?? List<String> ruleSetIdList = new ArrayList<String>(); Collection<RuleSetConfig> ruleSetConfigs = this.ruleSetConfigMap_.values(); for (RuleSetConfig config : ruleSetConfigs) { String id = config.getId(); if (isDefaultRuleSet(id)) { continue; } RulePreferenceUtil.saveRuleSet(config); // ruleSetIdList.add(id); } // ID?? String[] ruleSetIds = ruleSetIdList.toArray(new String[ruleSetIdList.size()]); RulePreferenceUtil.saveRuleSetIds(ruleSetIds); // ?? // ???????? for (String ruleId : this.dirtyRuleSetIds_) { if (isDefaultRuleSet(ruleId)) { continue; } RuleSetConfig config = this.ruleSetConfigMap_.get(ruleId); if (config == null) { continue; } RuleSetDef def = this.ruleSetMap_.get(ruleId); this.accessor_.updateRuleSet(def, config.getFileName()); } // ?? // ????????? // ???? for (RuleSetConfig config : ruleSetConfigs) { String id = config.getId(); if (isDefaultRuleSet(id)) { continue; } File file = new File(config.getFileName()); if (file.exists() && file.isFile()) { continue; } File parentFile = file.getParentFile(); if (parentFile != null && parentFile.exists() == false) { try { parentFile.mkdirs(); } catch (SecurityException ex) { LOGGER.error(ex.getMessage(), ex); } } // ????? try { RuleSetDef defaultRuleSetClone = new RuleSetDef(getRuleSetDef(DEFAULT_RULESET_ID)); defaultRuleSetClone.setName(config.getName()); this.accessor_.updateRuleSet(defaultRuleSetClone, config.getFileName()); } catch (RuleCreateException ex) { LOGGER.error(ex.getMessage(), ex); } } // ? for (RuleSetConfig config : this.removeList_) { File file = new File(config.getFileName()); if (file.exists()) { try { file.delete(); } catch (SecurityException ex) { LOGGER.error(ex.getMessage(), ex); } } } this.removeList_ = Collections.synchronizedList(new ArrayList<RuleSetConfig>()); }
From source file:org.lightjason.agentspeak.action.builtin.TestCActionMathStatistics.java
/** * test exponential selection with strict parameter *//*ww w . ja va 2 s .com*/ @Test public final void exponentialselectionstrict() { final List<ITerm> l_return = Collections.synchronizedList(new ArrayList<>()); IntStream.range(0, 5000).parallel() .forEach(i -> new CExponentialSelection().execute(false, IContext.EMPTYPLAN, Stream.of(Stream.of("a", "b").collect(Collectors.toList()), Stream.of(4.5, 3.5).collect(Collectors.toList()), 1).map(CRawTerm::from) .collect(Collectors.toList()), l_return)); Assert.assertEquals( (double) Collections.frequency(l_return.stream().map(ITerm::raw).collect(Collectors.toList()), "a") / l_return.size(), 0.73, 0.02); Assert.assertEquals( (double) Collections.frequency(l_return.stream().map(ITerm::raw).collect(Collectors.toList()), "b") / l_return.size(), 0.27, 0.02); }
From source file:org.apache.hadoop.hbase.util.RegionMover.java
private void loadRegions(Admin admin, String hostname, int port, List<HRegionInfo> regionsToMove, boolean ack) throws Exception { String server = null;//w w w. j av a2s . c o m List<HRegionInfo> movedRegions = Collections.synchronizedList(new ArrayList<HRegionInfo>()); int maxWaitInSeconds = admin.getConfiguration().getInt(SERVERSTART_WAIT_MAX_KEY, DEFAULT_SERVERSTART_WAIT_MAX); long maxWait = EnvironmentEdgeManager.currentTime() + maxWaitInSeconds * 1000; while ((EnvironmentEdgeManager.currentTime() < maxWait) && (server == null)) { try { ArrayList<String> regionServers = getServers(admin); // Remove the host Region server from target Region Servers list server = stripServer(regionServers, hostname, port); if (server != null) { break; } } catch (IOException e) { LOG.warn("Could not get list of region servers", e); } catch (Exception e) { LOG.info("hostname=" + hostname + " is not up yet, waiting"); } try { Thread.sleep(500); } catch (InterruptedException e) { LOG.error("Interrupted while waiting for " + hostname + " to be up.Quitting now", e); throw e; } } if (server == null) { LOG.error("Host:" + hostname + " is not up.Giving up."); throw new Exception("Host to load regions not online"); } LOG.info("Moving " + regionsToMove.size() + " regions to " + server + " using " + this.maxthreads + " threads.Ack mode:" + this.ack); ExecutorService moveRegionsPool = Executors.newFixedThreadPool(this.maxthreads); List<Future<Boolean>> taskList = new ArrayList<Future<Boolean>>(); int counter = 0; while (counter < regionsToMove.size()) { HRegionInfo region = regionsToMove.get(counter); String currentServer = getServerNameForRegion(admin, region); if (currentServer == null) { LOG.warn("Could not get server for Region:" + region.getEncodedName() + " moving on"); counter++; continue; } else if (server.equals(currentServer)) { LOG.info("Region " + region.getRegionNameAsString() + "already on target server=" + server); counter++; continue; } if (ack) { Future<Boolean> task = moveRegionsPool .submit(new MoveWithAck(admin, region, currentServer, server, movedRegions)); taskList.add(task); } else { Future<Boolean> task = moveRegionsPool .submit(new MoveWithoutAck(admin, region, currentServer, server, movedRegions)); taskList.add(task); } counter++; } moveRegionsPool.shutdown(); long timeoutInSeconds = regionsToMove.size() * admin.getConfiguration().getInt(MOVE_WAIT_MAX_KEY, DEFAULT_MOVE_WAIT_MAX); try { if (!moveRegionsPool.awaitTermination(timeoutInSeconds, TimeUnit.SECONDS)) { moveRegionsPool.shutdownNow(); } } catch (InterruptedException e) { moveRegionsPool.shutdownNow(); Thread.currentThread().interrupt(); } for (Future<Boolean> future : taskList) { try { // if even after shutdownNow threads are stuck we wait for 5 secs max if (!future.get(5, TimeUnit.SECONDS)) { LOG.error("Was Not able to move region....Exiting Now"); throw new Exception("Could not move region Exception"); } } catch (InterruptedException e) { LOG.error("Interrupted while waiting for Thread to Complete " + e.getMessage(), e); throw e; } catch (ExecutionException e) { LOG.error("Got Exception From Thread While moving region " + e.getMessage(), e); throw e; } catch (CancellationException e) { LOG.error( "Thread for moving region cancelled. Timeout for cancellation:" + timeoutInSeconds + "secs", e); throw e; } } }
From source file:org.mobicents.servlet.restcomm.telephony.Call.java
public Call(final SipFactory factory, final ActorRef mediaSessionController, final Configuration configuration) { super();//from w ww. j a v a 2s. c o m final ActorRef source = self(); // States for the FSM this.uninitialized = new State("uninitialized", null, null); this.initializing = new State("initializing", new Initializing(source), null); this.queued = new State("queued", new Queued(source), null); this.ringing = new State("ringing", new Ringing(source), null); this.failingBusy = new State("failing busy", new FailingBusy(source), null); this.busy = new State("busy", new Busy(source), null); this.notFound = new State("not found", new NotFound(source), null); //This time the --new Canceling(source)-- is an ActionOnState. Overloaded constructor is used here this.canceling = new State("canceling", new Canceling(source)); this.canceled = new State("canceled", new Canceled(source), null); this.failingNoAnswer = new State("failing no answer", new FailingNoAnswer(source), null); this.noAnswer = new State("no answer", new NoAnswer(source), null); this.dialing = new State("dialing", new Dialing(source), null); this.updatingMediaSession = new State("updating media session", new UpdatingMediaSession(source), null); this.inProgress = new State("in progress", new InProgress(source), null); this.joining = new State("joining", new Joining(source), null); this.leaving = new State("leaving", new Leaving(source), null); this.stopping = new State("stopping", new Stopping(source), null); this.completed = new State("completed", new Completed(source), null); this.failed = new State("failed", new Failed(source), null); // Transitions for the FSM final Set<Transition> transitions = new HashSet<Transition>(); transitions.add(new Transition(this.uninitialized, this.ringing)); transitions.add(new Transition(this.uninitialized, this.queued)); transitions.add(new Transition(this.uninitialized, this.canceled)); transitions.add(new Transition(this.uninitialized, this.completed)); transitions.add(new Transition(this.queued, this.canceled)); transitions.add(new Transition(this.queued, this.initializing)); transitions.add(new Transition(this.ringing, this.busy)); transitions.add(new Transition(this.ringing, this.notFound)); transitions.add(new Transition(this.ringing, this.canceling)); transitions.add(new Transition(this.ringing, this.canceled)); transitions.add(new Transition(this.ringing, this.failingNoAnswer)); transitions.add(new Transition(this.ringing, this.failingBusy)); transitions.add(new Transition(this.ringing, this.noAnswer)); transitions.add(new Transition(this.ringing, this.initializing)); transitions.add(new Transition(this.ringing, this.updatingMediaSession)); transitions.add(new Transition(this.ringing, this.completed)); transitions.add(new Transition(this.ringing, this.stopping)); transitions.add(new Transition(this.ringing, this.failed)); transitions.add(new Transition(this.initializing, this.canceling)); transitions.add(new Transition(this.initializing, this.dialing)); transitions.add(new Transition(this.initializing, this.failed)); transitions.add(new Transition(this.initializing, this.inProgress)); transitions.add(new Transition(this.initializing, this.stopping)); transitions.add(new Transition(this.dialing, this.canceling)); transitions.add(new Transition(this.dialing, this.stopping)); transitions.add(new Transition(this.dialing, this.failingBusy)); transitions.add(new Transition(this.dialing, this.ringing)); transitions.add(new Transition(this.dialing, this.updatingMediaSession)); transitions.add(new Transition(this.inProgress, this.stopping)); transitions.add(new Transition(this.inProgress, this.joining)); transitions.add(new Transition(this.inProgress, this.leaving)); transitions.add(new Transition(this.inProgress, this.failed)); transitions.add(new Transition(this.joining, this.inProgress)); transitions.add(new Transition(this.joining, this.stopping)); transitions.add(new Transition(this.joining, this.failed)); transitions.add(new Transition(this.leaving, this.inProgress)); transitions.add(new Transition(this.leaving, this.stopping)); transitions.add(new Transition(this.leaving, this.failed)); transitions.add(new Transition(this.canceling, this.canceled)); transitions.add(new Transition(this.canceling, this.completed)); transitions.add(new Transition(this.failingBusy, this.busy)); transitions.add(new Transition(this.failingNoAnswer, this.noAnswer)); transitions.add(new Transition(this.failingNoAnswer, this.canceling)); transitions.add(new Transition(this.updatingMediaSession, this.inProgress)); transitions.add(new Transition(this.updatingMediaSession, this.failed)); transitions.add(new Transition(this.stopping, this.completed)); transitions.add(new Transition(this.stopping, this.failed)); // FSM this.fsm = new FiniteStateMachine(this.uninitialized, transitions); // SIP runtime stuff. this.factory = factory; // Conferencing this.conferencing = false; // Media Session Control runtime stuff. this.msController = mediaSessionController; this.fail = false; // Initialize the runtime stuff. this.id = Sid.generate(Sid.Type.CALL); this.instanceId = RestcommConfiguration.getInstance().getMain().getInstanceId(); this.created = DateTime.now(); this.observers = Collections.synchronizedList(new ArrayList<ActorRef>()); this.receivedBye = false; // Media Group runtime stuff this.liveCallModification = false; this.recording = false; this.configuration = configuration; this.disableSdpPatchingOnUpdatingMediaSession = this.configuration.subset("runtime-settings") .getBoolean("disable-sdp-patching-on-updating-mediasession", false); }
From source file:org.alfresco.repo.node.archive.NodeArchiveServiceImpl.java
/** * Uses batch processing and job locking to purge all archived nodes * //from www .j av a 2 s . co m * @deprecated In 3.4: to be removed */ public List<RestoreNodeReport> restoreAllArchivedNodes(StoreRef originalStoreRef) { final String user = AuthenticationUtil.getFullyAuthenticatedUser(); if (user == null) { throw new IllegalStateException("Cannot restore as there is no authenticated user."); } final List<RestoreNodeReport> results = Collections .synchronizedList(new ArrayList<RestoreNodeReport>(1000)); /** * Worker that restores each node */ BatchProcessWorker<NodeRef> worker = new BatchProcessor.BatchProcessWorkerAdaptor<NodeRef>() { @Override public void beforeProcess() throws Throwable { AuthenticationUtil.pushAuthentication(); } public void process(NodeRef entry) throws Throwable { AuthenticationUtil.setFullyAuthenticatedUser(user); if (nodeService.exists(entry)) { RestoreNodeReport report = restoreArchivedNode(entry); // Append the results (it is synchronized) results.add(report); } } @Override public void afterProcess() throws Throwable { AuthenticationUtil.popAuthentication(); } }; doBulkOperation(user, originalStoreRef, worker); return results; }
From source file:com.gargoylesoftware.htmlunit.WebClientWaitForBackgroundJobsTest.java
/** * Tests that waitForBackgroundJavaScriptStartingBefore waits for jobs that should have been started earlier * but that are "late" due to processing of previous job. * This test needs to start many setTimeout to expect to reach the state, where a check for future * jobs occurs when one of this job is not active. * @throws Exception if the test fails/*www . ja v a 2 s .com*/ */ @Test @Tries(3) public void waitForJobThatIsAlreadyLate() throws Exception { final String html = "<html>\n" + "<head>\n" + " <script>\n" + " var counter = 0;\n" + " function test() {\n" + " setTimeout(doWork1, 0);\n" + " }\n" + " function doWork1() {\n" + " if (counter++ < 50) {\n" + " setTimeout(doWork1, 0);\n" + " }\n" + " alert('work1');\n" + " }\n" + " </script>\n" + "</head>\n" + "<body onload='test()'>\n" + "</body>\n" + "</html>"; final MockWebConnection webConnection = new MockWebConnection(); webConnection.setResponse(URL_FIRST, html); webConnection.setDefaultResponse(""); final WebClient client = getWebClient(); client.setWebConnection(webConnection); final List<String> collectedAlerts = Collections.synchronizedList(new ArrayList<String>()); client.setAlertHandler(new CollectingAlertHandler(collectedAlerts)); client.getPage(URL_FIRST); startTimedTest(); assertEquals(0, client.waitForBackgroundJavaScriptStartingBefore(1000)); assertMaxTestRunTime(1000); assertEquals(51, collectedAlerts.size()); }
From source file:com.emc.vipr.sync.CasMigrationTest.java
protected String summarize(FPPool pool, List<String> clipIds) throws Exception { List<String> summaries = Collections.synchronizedList(new ArrayList<String>()); ExecutorService service = Executors.newFixedThreadPool(CAS_SETUP_THREADS); System.out.print("Summarizing clips"); for (String clipId : clipIds) { service.submit(new ClipReader(pool, clipId, summaries)); }// w w w. j a va2s . com service.shutdown(); service.awaitTermination(CAS_SETUP_WAIT_MINUTES, TimeUnit.MINUTES); service.shutdownNow(); System.out.println(); Collections.sort(summaries); StringBuilder out = new StringBuilder(); for (String summary : summaries) { out.append(summary); } return out.toString(); }