List of usage examples for java.util WeakHashMap WeakHashMap
public WeakHashMap()
From source file:org.openmrs.module.ModuleClassLoader.java
/** * Get and cache the imports for this module. The imports should just be the modules that set as * "required" by this module// www . java 2 s . c o m */ protected static Module[] collectRequiredModuleImports(Module module) { // collect imported modules (exclude duplicates) Map<String, Module> publicImportsMap = new WeakHashMap<String, Module>(); //<module ID, Module> for (String moduleId : ModuleConstants.CORE_MODULES.keySet()) { Module coreModule = ModuleFactory.getModuleById(moduleId); if (coreModule == null && !ModuleUtil.ignoreCoreModules()) { log.error("Unable to find an openmrs core loaded module with id: " + moduleId); throw new APIException("Module.error.shouldNotBeHere", (Object[]) null); } // if this is already the classloader for one of the core modules, don't put it on the import list if (coreModule != null && !moduleId.equals(module.getModuleId())) { publicImportsMap.put(moduleId, coreModule); } } for (String requiredPackage : module.getRequiredModules()) { Module requiredModule = ModuleFactory.getModuleByPackage(requiredPackage); if (ModuleFactory.isModuleStarted(requiredModule)) { publicImportsMap.put(requiredModule.getModuleId(), requiredModule); } } return publicImportsMap.values().toArray(new Module[publicImportsMap.size()]); }
From source file:org.openmrs.module.ModuleFactory.java
/** * Returns all modules found/loaded into the system (started and not started) in the form of a * map<ModuleId, Module>/*w w w. ja v a 2 s . c o m*/ * * @return map<ModuleId, Module> */ public static Map<String, Module> getLoadedModulesMap() { if (loadedModules == null) { loadedModules = new WeakHashMap<String, Module>(); } return loadedModules; }
From source file:org.openmrs.module.ModuleFactory.java
/** * Returns all modules found/loaded into the system (started and not started) in the form of a * map<PackageName, Module> * //w w w . j a v a 2s .com * @return map<PackageName, Module> */ public static Map<String, Module> getLoadedModulesMapPackage() { if (loadedModules == null) { loadedModules = new WeakHashMap<String, Module>(); return loadedModules; } Map<String, Module> map = new WeakHashMap<String, Module>(); for (String key : loadedModules.keySet()) { map.put(loadedModules.get(key).getPackageName(), loadedModules.get(key)); } return map; }
From source file:org.openmrs.module.ModuleClassLoader.java
/** * Get and cache the imports for this module. The imports should just be the modules that set as * "aware of" by this module/*from ww w . j av a 2s . c om*/ */ protected static Module[] collectAwareOfModuleImports(Module module) { // collect imported modules (exclude duplicates) Map<String, Module> publicImportsMap = new WeakHashMap<String, Module>(); //<module ID, Module> for (String awareOfPackage : module.getAwareOfModules()) { Module awareOfModule = ModuleFactory.getModuleByPackage(awareOfPackage); if (ModuleFactory.isModuleStarted(awareOfModule)) { publicImportsMap.put(awareOfModule.getModuleId(), awareOfModule); } } return publicImportsMap.values().toArray(new Module[publicImportsMap.size()]); }
From source file:org.openmrs.module.ModuleFactory.java
/** * Returns the modules that have been successfully started in the form of a map<ModuleId, * Module>//from w w w.java 2 s. co m * * @return Map<ModuleId, Module> */ public static Map<String, Module> getStartedModulesMap() { if (startedModules == null) { startedModules = new WeakHashMap<String, Module>(); } return startedModules; }
From source file:org.opencds.service.drools.v54.DroolsAdapter.java
/** * big picture pseudo code for following method: * * for this requestedKmId { //from w w w . j av a2 s . com * getResponse: * create Drools session * load KM into session * load globals into session * load data from allFactLists into session * KBase.execute (calls Drools) * unload result from KM to outputString * } * * This means that we are considering the OMG-CDSS concept of KnowledgeModule equivalent to * the Drools concept of KnowledgeBase. */ public String getOneResponse(DSSRequestKMItem dssRequestKMItem) throws InvalidDriDataFormatExceptionFault, RequiredDataNotProvidedExceptionFault, EvaluationExceptionFault, InvalidTimeZoneOffsetExceptionFault, UnrecognizedScopedEntityExceptionFault, UnrecognizedLanguageExceptionFault, UnsupportedLanguageExceptionFault, DSSRuntimeExceptionFault { // SimpleKnowledgeRepository simpleKR = SimpleKnowledgeRepository.getInstance(); String requestedKmId = dssRequestKMItem.getRequestedKmId(); String requestedKmPrimaryProcessName = SimpleKnowledgeRepository .getRequiredKMPrimaryProcessNameForKMId(requestedKmId); TimingDataKM timingDataKM = dssRequestKMItem.getKmTimingData(); @SuppressWarnings("unchecked") JAXBElement<org.opencds.vmr.v1_0.schema.CDSInput> cdsInput = (JAXBElement<org.opencds.vmr.v1_0.schema.CDSInput>) dssRequestKMItem .getDssRequestDataItem().getCdsInput(); DSSRequestDataItem dssRequestDataItem = dssRequestKMItem.getDssRequestDataItem(); String externalFactModelSSId = dssRequestDataItem.getExternalFactModelSSId(); Date evalTime = dssRequestDataItem.getEvalTime(); String clientLanguage = dssRequestDataItem.getClientLanguage(); String clientTimeZoneOffset = dssRequestDataItem.getClientTimeZoneOffset(); String interactionId = dssRequestDataItem.getInteractionId(); log.debug("II: " + interactionId + " KMId: " + requestedKmId + " (" + requestedKmPrimaryProcessName + ")" + ", SSId: " + externalFactModelSSId + ", evalTime: " + evalTime + ", clTimeZone: " + clientTimeZoneOffset + ", clLang: " + clientLanguage); /** * Load fact map from specific externalFactModels, as specified in externalFactModel SSId... * * Every separately identified SSId, by definition, specifies separate input and output mappings. * Input mappings are used here, and then output mappings are used following the session.execute. */ // Map<String, List<?>> allFactLists = Collections.synchronizedMap(new WeakHashMap<String, List<?>>()); Map<String, List<?>> allFactLists = new HashMap<String, List<?>>(); //allFactLists are updated in place by the following call, including both facts and concepts... //================================================================== // long usedMemory = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // // this is not needed, but it will make it easier to see the leak // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory before buildFactLists: " + usedMemory / 1000 + "KB"); String focalPersonId = BuildCDSInputFactLists.buildFactLists(cdsInput, evalTime, allFactLists, timingDataKM); //String focalPersonId = "123^1.2.3"; // long usedMemory2 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // // this is not needed, but it will make it easier to see the leak // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory after buildFactLists: " + usedMemory2 / 1000 + "KB, diff = " + (usedMemory2 - usedMemory) / 1000 + "KB"); //================================================================== dssRequestKMItem.setFocalPersonId(focalPersonId); log.debug("II: " + interactionId + " KMId: " + requestedKmId + " built fact/concept lists for " + focalPersonId); timingDataKM.setFinishBuildConceptListsTime(new AtomicLong(System.nanoTime())); /** * Get the KMs and Load them into a stateless session * * Currently, assumption is made that each requested knowledge module will be run separately * (i.e., as part of a separate distinct knowledge base) * */ File drlFile = null; File bpmnFile = null; File pkgFile = null; KnowledgeBase knowledgeBase = (KnowledgeBase) SimpleKnowledgeRepository .getKnowledgeBaseCache(requestedKmId); // long usedMemory16 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // // this is not needed, but it will make it easier to see the leak // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory after getKnowledgeBaseCache: " + usedMemory16 / 1000 + "KB, diff = " + (usedMemory16 - usedMemory2) / 1000 + "KB"); // long usedMemory17 = 0; // synchronized (knowledgeBase) { if (knowledgeBase != null) { log.debug("II: " + interactionId + " KMId: " + requestedKmId + " knowledgeBase from cache"); } else { knowledgeBase = KnowledgeBaseFactory.newKnowledgeBase(); KnowledgeBuilder knowledgeBuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); drlFile = SimpleKnowledgeRepository.getResourceAsFileWithoutException("knowledgeModules", requestedKmId + ".drl"); bpmnFile = SimpleKnowledgeRepository.getResourceAsFileWithoutException("knowledgeModules", requestedKmId + ".bpmn"); pkgFile = SimpleKnowledgeRepository.getResourceAsFileWithoutException("knowledgeModules", requestedKmId + ".pkg"); if (drlFile != null) knowledgeBuilder.add(ResourceFactory.newFileResource(drlFile), ResourceType.DRL); if (bpmnFile != null) knowledgeBuilder.add(ResourceFactory.newFileResource(bpmnFile), ResourceType.BPMN2); if (pkgFile != null) knowledgeBuilder.add(ResourceFactory.newFileResource(pkgFile), ResourceType.PKG); if (knowledgeBuilder.hasErrors()) { throw new DSSRuntimeExceptionFault("KnowledgeBuilder had errors on build of: '" + requestedKmId + "', " + knowledgeBuilder.getErrors().toString()); } if (knowledgeBuilder.getKnowledgePackages().size() == 0) { throw new DSSRuntimeExceptionFault( "KnowledgeBuilder did not find any VALID '.drl', '.bpmn' or '.pkg' files for: '" + requestedKmId + "', " + knowledgeBuilder.getErrors().toString()); } knowledgeBase.addKnowledgePackages(knowledgeBuilder.getKnowledgePackages()); SimpleKnowledgeRepository.putKnowledgeBaseCache(requestedKmId, knowledgeBase); log.debug("II: " + interactionId + " KMId: " + requestedKmId + " knowledgeBase built"); // usedMemory17 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // // this is not needed, but it will make it easier to see the leak // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory after buildKnowledgeBase: " + usedMemory17 / 1000 + "KB, diff = " + (usedMemory17 - usedMemory16) / 1000 + "KB"); } // } dssRequestKMItem.getKmTimingData().setFinishInsertKnowledgeTime(new AtomicLong(System.nanoTime())); StatelessKnowledgeSession statelessKnowledgeSession = knowledgeBase.newStatelessKnowledgeSession(); // long usedMemory18 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // // this is not needed, but it will make it easier to see the leak // System.gc(); // if (usedMemory17 == 0) { //// System.out.println("KMId: " + requestedKmId + " used memory after newStatelessKnowledgeSession: " + usedMemory18 / 1000 + "KB, diff = " + (usedMemory18 - usedMemory16) / 1000 + "KB"); // } else { // System.out.println("KMId: " + requestedKmId + " used memory after newStatelessKnowledgeSession: " + usedMemory18 / 1000 + "KB, diff = " + (usedMemory18 - usedMemory17) / 1000 + "KB"); // } // // to create a new Drools Working Memory Logger for in depth Drools debugging - Use either the InMemory // // to record logs on all input, or use the FileLogger for debugging of one input at a time in Drools and JBPM // WorkingMemoryInMemoryLogger memoryLogger = new WorkingMemoryInMemoryLogger (statelessKnowledgeSession); // WorkingMemoryFileLogger fileLogger = new WorkingMemoryFileLogger (statelessKnowledgeSession); // // If using the FileLogger, Set the log file that we will be using to log Working Memory (aka session) // fileLogger.setFileName("C:/opencds-logs/OpenCDS-Drools-event-log"); //TODO: make the above choice based on configuration settings dssRequestKMItem.getKmTimingData().setFinishStartKnowledgeSessionTime(new AtomicLong(System.nanoTime())); /** * Load the Globals and Fact lists: evalTime, language, timezoneOffset * */ @SuppressWarnings("rawtypes") List<Command> cmds = Collections.synchronizedList(new ArrayList<Command>()); /** * Load the Globals: evalTime, language, timezoneOffset, focalPersonId, assertions, namedObjects * */ cmds.add(CommandFactory.newSetGlobal("evalTime", evalTime)); cmds.add(CommandFactory.newSetGlobal("clientLanguage", clientLanguage)); cmds.add(CommandFactory.newSetGlobal("clientTimeZoneOffset", clientTimeZoneOffset)); cmds.add(CommandFactory.newSetGlobal("focalPersonId", dssRequestKMItem.getFocalPersonId())); //following global used to store flags for inter-task communication in a JBPM Process java.util.Set<String> assertions = new java.util.HashSet<String>(); cmds.add(CommandFactory.newSetGlobal("assertions", assertions)); //following global used to return facts added by rules, such as new observationResults java.util.Map<String, Object> namedObjects = new java.util.HashMap<String, Object>(); cmds.add(CommandFactory.newSetGlobal("namedObjects", namedObjects)); dssRequestKMItem.getKmTimingData().setFinishLoadGlobalsTime(new AtomicLong(System.nanoTime())); // long usedMemory19 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // // this is not needed, but it will make it easier to see the leak // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory after CommandFactory.newSetGlobal: " + usedMemory19 / 1000 + "KB, diff = " + (usedMemory19 - usedMemory18) / 1000 + "KB"); /** * Load the FactLists into Commands: Only ones that are not empty... * */ //does this whole thing needs to be made concurrent safe ?? Will this do it?? synchronized (allFactLists) { for (String oneName : allFactLists.keySet()) { @SuppressWarnings("unchecked") List<Object> oneFactList = (List<Object>) allFactLists.get(oneName); String oneTypeName = ""; for (Object oneFact : (List<Object>) oneFactList) { oneTypeName = oneFact.getClass().getSimpleName(); } if (oneFactList.size() > 0) { cmds.add(CommandFactory.newInsertElements((List<?>) oneFactList, oneTypeName, true, null)); } else { allFactLists.remove(oneTypeName); } } } // long usedMemory20 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // // this is not needed, but it will make it easier to see the leak // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory after CommandFactory.newInsertElements: " + usedMemory20 / 1000 + "KB, diff = " + (usedMemory20 - usedMemory19) / 1000 + "KB"); dssRequestKMItem.getKmTimingData().setFinishLoadFactListsTime(new AtomicLong(System.nanoTime())); /** * If this is a PKG (for package with process, initiate the configured Primary Process for JBPM. * */ if ((requestedKmPrimaryProcessName != null) && (!"".equals(requestedKmPrimaryProcessName))) { if ("".equals(requestedKmPrimaryProcessName)) { throw new DSSRuntimeExceptionFault("DroolsAdapter found improperly configured KM: " + requestedKmId + ". This KM includes a BPMN file, but does not have a value " + "for 'knowledgeModulePrimaryProcessName' in its configuration."); } cmds.add(CommandFactory.newStartProcess(requestedKmPrimaryProcessName)); log.debug("II: " + interactionId + " KMId: " + requestedKmId + " knowledgeBase Primary Process: " + requestedKmPrimaryProcessName); } // long usedMemory21 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // // this is not needed, but it will make it easier to see the leak // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory after CommandFactory.newInsertPrimaryProcess: " + usedMemory21 / 1000 + "KB, diff = " + (usedMemory21 - usedMemory20) / 1000 + "KB"); dssRequestKMItem.getKmTimingData().setStartInferenceEngine(new AtomicLong(System.nanoTime())); /** * Use Drools to process everything * Added try/catch around stateless session. because Drools has an unhandled exception * when a JBPM Process improperly re-declares a global that is constraining a gateway * and the resultant global is null - des 20120727 ******************************************************************************** */ ExecutionResults results = null; try { results = statelessKnowledgeSession.execute(CommandFactory.newBatchExecution((cmds))); } catch (Exception e) { String err = "OpenCDS call to Drools.execute failed with error: " + e.getMessage(); log.error(err); StackTraceElement elements[] = e.getStackTrace(); for (int i = 0, n = elements.length; i < n; i++) { String detail = elements[i].getClassName() + ":" + elements[i].getMethodName() + ":" + elements[i].getLineNumber(); log.error(detail); err += "\n" + elements[i].getMethodName(); } throw new DSSRuntimeExceptionFault(err); } // long usedMemory22 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // // this is not needed, but it will make it easier to see the leak // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory after statelessKnowledgeSession.execute: " + usedMemory22 / 1000 + "KB, diff = " + (usedMemory22 - usedMemory21) / 1000 + "KB"); /** ******************************************************************************** * END Drools * */ dssRequestKMItem.getKmTimingData().setFinishInferenceEngine(new AtomicLong(System.nanoTime())); //grab session logging of whichever type was started... // log.trace(memoryLogger.getEvents()); // fileLogger.writeToDisk(); //update original entries from allFactLists to capture any new or updated elements //** need to look for every possible fact list, because rules may have created new ones... //NOTE that results contains the original objects passed in via CMD structure, with any //changes introduced by rules. Map<String, List<?>> resultFactLists = Collections.synchronizedMap(new WeakHashMap<String, List<?>>()); synchronized (results) { Collection<String> allResultNames = results.getIdentifiers(); //includes concepts but not globals? for (String oneName : allResultNames) { if (!("evalTime".equals(oneName)) && !("clientLanguage".equals(oneName)) && !("clientTimeZoneOffset".equals(oneName))) { // ignore these submitted globals, they should not have been changed by rules, and look at everything else Object oneList = results.getValue(oneName); resultFactLists.put(oneName, (List<?>) oneList); } } } // long usedMemory23 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // // this is not needed, but it will make it easier to see the leak // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory after resultFactLists: " + usedMemory23 / 1000 + "KB, diff = " + (usedMemory23 - usedMemory22) / 1000 + "KB"); /* * now process the returned namedObjects and add them to the resultFactLists * */ synchronized (namedObjects) { for (String key : namedObjects.keySet()) { if (namedObjects.get(key) != null) { Object oneNamedObject = namedObjects.get(key); // String className = oneNamedObject.getClass().getSimpleName(); @SuppressWarnings("unchecked") List<Object> oneList = (List<Object>) resultFactLists .get(oneNamedObject.getClass().getSimpleName()); if (oneList == null) { oneList = new ArrayList<Object>(); oneList.add(oneNamedObject); } else { oneList.add(oneNamedObject); } resultFactLists.put(oneNamedObject.getClass().getSimpleName(), oneList); } } } // long usedMemory24 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // // this is not needed, but it will make it easier to see the leak // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory after adding namedObjects: " + usedMemory24 / 1000 + "KB, diff = " + (usedMemory24 - usedMemory23) / 1000 + "KB"); /** * Retrieve the Results for this requested KM and stack them in the DSS fkmResponse * NOTE: Each additional requested KM will have a separate output payload */ dssRequestKMItem.getKmTimingData().setFinishInferenceEngineAdapterTime(new AtomicLong(System.nanoTime())); log.debug( "II: " + interactionId + " KMId: " + requestedKmId + " begin marshalling results to external VMR "); //FIXME probably need to create static final string to identify output SSID, probably always as VMR // String payloadCreatorName = SimpleKnowledgeRepository.getRequiredPayloadCreatorForSSID(externalFactModelSSId); IOutboundPayloadProcessor payloadCreator = (IOutboundPayloadProcessor) SimpleKnowledgeRepository .getPayloadCreatorInstanceForClassNameCache( SimpleKnowledgeRepository.getRequiredPayloadCreatorForSSID(externalFactModelSSId)); // long usedMemory25 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // // this is not needed, but it will make it easier to see the leak // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory after getting payloadCreatorInstance: " + usedMemory25 / 1000 + "KB, diff = " + (usedMemory25 - usedMemory24) / 1000 + "KB"); /* * following is normally instantiated as MarshalVMR2VMRSchemaPayload.getInstance().mappingOutbound( resultFactLists, dssRequestKMItem ); * */ String outputString = payloadCreator.mappingOutbound(resultFactLists, dssRequestKMItem); // long usedMemory26 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // // this is not needed, but it will make it easier to see the leak // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory after creating outputString: " + usedMemory26 / 1000 + "KB, diff = " + (usedMemory26 - usedMemory25) / 1000 + "KB"); log.trace("II: " + interactionId + " KMId: " + requestedKmId + " finished marshalling results to external VMR, " + outputString.length() + " chars."); /* * clear out maps and arrays * */ // BuildCDSInputFactLists.clearAllFactLists(allFactLists); // synchronized (allFactLists) { log.debug("clearAllFactLists"); // for (String eachKey : ((Map<String, List<?>>)allFactLists).keySet()) { // if (allFactLists.get(eachKey) != null) { // List<?> eachList = allFactLists.get(eachKey); // eachList.clear(); // allFactLists.remove(eachKey); // } // } // } // long usedMemory4 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // // this is not needed, but it will make it easier to see the leak // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter before clear: " + usedMemory4 / 1000 + "KB"); allFactLists.clear(); // long usedMemory5 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after allFactLists.clear(): " + usedMemory5 / 1000 + "KB, diff = " + (usedMemory5 - usedMemory4) / 1000 + "KB"); allFactLists = null; // long usedMemory6 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after allFactLists = null: " + usedMemory6 / 1000 + "KB, diff = " + (usedMemory6 - usedMemory5) / 1000 + "KB"); cmds.clear(); // long usedMemory7 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after cmds.clear(): " + usedMemory7 / 1000 + "KB, diff = " + (usedMemory7 - usedMemory6) / 1000 + "KB"); cmds = null; // long usedMemory8 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after cmds = null: " + usedMemory8 / 1000 + "KB, diff = " + (usedMemory8 - usedMemory7) / 1000 + "KB"); assertions.clear(); // long usedMemory9 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after assertions.clear(): " + usedMemory9 / 1000 + "KB, diff = " + (usedMemory9 - usedMemory8) / 1000 + "KB"); assertions = null; // long usedMemory10 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after assertions = null;: " + usedMemory10 / 1000 + "KB, diff = " + (usedMemory10 - usedMemory9) / 1000 + "KB"); namedObjects.clear(); // long usedMemory11 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after namedObjects.clear(): " + usedMemory11 / 1000 + "KB, diff = " + (usedMemory11 - usedMemory10) / 1000 + "KB"); namedObjects = null; // long usedMemory12 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after namedObjects = null: " + usedMemory12 / 1000 + "KB, diff = " + (usedMemory12 - usedMemory11) / 1000 + "KB"); for (String oneId : results.getIdentifiers()) { //results.getFactHandle(oneId) results.getIdentifiers().remove(results.getValue(oneId)); } results = null; // long usedMemory13 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after results = null: " + usedMemory13 / 1000 + "KB, diff = " + (usedMemory13 - usedMemory12) / 1000 + "KB"); resultFactLists.values().clear(); resultFactLists.clear(); // long usedMemory14 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after resultFactLists.clear(): " + usedMemory14 / 1000 + "KB, diff = " + (usedMemory14 - usedMemory13) / 1000 + "KB"); resultFactLists = null; // long usedMemory15 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after resultFactLists = null: " + usedMemory15 / 1000 + "KB, diff = " + (usedMemory15 - usedMemory14) / 1000 + "KB"); // dssRequestKMItem = null; // statelessKnowledgeSession = null; log.debug("II: " + interactionId + " KMId: " + requestedKmId + " completed Drools inferencing engine"); // long usedMemory3 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory(); // System.gc(); // System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after clear: " + usedMemory3 / 1000 + "KB, diff = " + (usedMemory3 - usedMemory4) / 1000 + "KB"); return outputString; // return ""; }
From source file:de.mrapp.android.util.multithreading.AbstractDataBinder.java
/** * Creates a new data binder, which uses a specifc executor service and cache. Caching is * enabled by default.//w w w . ja v a 2 s. c o m * * @param context * The context, which should be used by the data binder, as an instance of the class * {@link Context}. The context may not be null * @param threadPool * The executor service, which should be used to manage asynchronous tasks, as an * instance of the type {@link ExecutorService}. The executor service may not be null * @param cache * The LRU cache, which should be used to cache already loaded data, as an instance of * the class LruCache. The cache may not be null */ public AbstractDataBinder(@NonNull final Context context, @NonNull final ExecutorService threadPool, @NonNull final LruCache<KeyType, DataType> cache) { ensureNotNull(context, "The context may not be null"); ensureNotNull(threadPool, "The executor service may not be null"); ensureNotNull(cache, "The cache may not be null"); this.context = context; this.logger = new Logger(LogLevel.INFO); this.listeners = new LinkedHashSet<>(); this.cache = cache; this.views = Collections.synchronizedMap(new WeakHashMap<ViewType, KeyType>()); this.threadPool = threadPool; this.cancelLock = new Object(); this.canceled = false; this.useCache = true; }
From source file:org.apache.axiom.om.util.StAXUtils.java
/** * @return XMLInputFactory for the current classloader *///from www . ja v a2 s.c o m private static XMLInputFactory getXMLInputFactory_perClassLoader(StAXParserConfiguration configuration) { ClassLoader cl = getContextClassLoader(); XMLInputFactory factory; if (cl == null) { factory = getXMLInputFactory_singleton(configuration); } else { // Check the cache if (configuration == null) { configuration = StAXParserConfiguration.DEFAULT; } Map map = (Map) inputFactoryPerCLMap.get(configuration); if (map == null) { map = Collections.synchronizedMap(new WeakHashMap()); inputFactoryPerCLMap.put(configuration, map); factory = null; } else { factory = (XMLInputFactory) map.get(cl); } // If not found in the cache map, crate a new factory if (factory == null) { if (log.isDebugEnabled()) { log.debug("About to create XMLInputFactory implementation with " + "classloader=" + cl); log.debug("The classloader for javax.xml.stream.XMLInputFactory is: " + XMLInputFactory.class.getClassLoader()); } try { factory = newXMLInputFactory(null, configuration); } catch (ClassCastException cce) { if (log.isDebugEnabled()) { log.debug("Failed creation of XMLInputFactory implementation with " + "classloader=" + cl); log.debug("Exception is=" + cce); log.debug("Attempting with classloader: " + XMLInputFactory.class.getClassLoader()); } factory = newXMLInputFactory(XMLInputFactory.class.getClassLoader(), configuration); } if (factory != null) { // Cache the new factory map.put(cl, factory); if (log.isDebugEnabled()) { log.debug("Created XMLInputFactory = " + factory.getClass() + " with classloader=" + cl); log.debug("Configuration = " + configuration); log.debug("Size of XMLInputFactory map for this configuration = " + map.size()); log.debug("Configurations for which factories have been cached = " + inputFactoryPerCLMap.keySet()); } } else { factory = getXMLInputFactory_singleton(configuration); } } } return factory; }
From source file:android.support.v7.widget.AppCompatDrawableManager.java
private void addTintListToCache(@NonNull Context context, @DrawableRes int resId, @NonNull ColorStateList tintList) { if (mTintLists == null) { mTintLists = new WeakHashMap<>(); }// w w w . ja va 2 s . c o m SparseArray<ColorStateList> themeTints = mTintLists.get(context); if (themeTints == null) { themeTints = new SparseArray<>(); mTintLists.put(context, themeTints); } themeTints.append(resId, tintList); }
From source file:com.cloudbees.jenkins.support.SupportPlugin.java
public List<LogRecord> getAllLogRecords(final Node node) throws IOException, InterruptedException { if (node != null) { VirtualChannel channel = node.getChannel(); if (channel != null) { final Future<List<LogRecord>> future = channel.callAsync(new LogFetcher()); try { return future.get(REMOTE_OPERATION_TIMEOUT_MS, TimeUnit.MILLISECONDS); } catch (ExecutionException e) { final LogRecord lr = new LogRecord(Level.WARNING, "Could not retrieve remote log records"); lr.setThrown(e);/*from ww w . j a v a 2 s . c om*/ return Collections.singletonList(lr); } catch (TimeoutException e) { Computer.threadPoolForRemoting.submit(() -> { List<LogRecord> records; try { records = future.get(REMOTE_OPERATION_CACHE_TIMEOUT_SEC, TimeUnit.SECONDS); } catch (InterruptedException e1) { final LogRecord lr = new LogRecord(Level.WARNING, "Could not retrieve remote log records"); lr.setThrown(e1); records = Collections.singletonList(lr); } catch (ExecutionException e1) { final LogRecord lr = new LogRecord(Level.WARNING, "Could not retrieve remote log records"); lr.setThrown(e1); records = Collections.singletonList(lr); } catch (TimeoutException e1) { final LogRecord lr = new LogRecord(Level.WARNING, "Could not retrieve remote log records"); lr.setThrown(e1); records = Collections.singletonList(lr); future.cancel(true); } synchronized (SupportPlugin.this) { if (logRecords == null) { logRecords = new WeakHashMap<>(); } logRecords.put(node, records); } }); synchronized (this) { if (logRecords != null) { List<LogRecord> result = logRecords.get(node); if (result != null) { result = new ArrayList<>(result); final LogRecord lr = new LogRecord(Level.WARNING, "Using cached remote log records"); lr.setThrown(e); result.add(lr); return result; } } else { final LogRecord lr = new LogRecord(Level.WARNING, "No previous cached remote log records"); lr.setThrown(e); return Collections.singletonList(lr); } } } } } return Collections.emptyList(); }