Example usage for java.util Collections synchronizedMap

List of usage examples for java.util Collections synchronizedMap

Introduction

In this page you can find the example usage for java.util Collections synchronizedMap.

Prototype

public static <K, V> Map<K, V> synchronizedMap(Map<K, V> m) 

Source Link

Document

Returns a synchronized (thread-safe) map backed by the specified map.

Usage

From source file:org.apache.hadoop.hbase.crosssite.coprocessor.TestCrossSiteCoprocessor.java

private Map<byte[], Long> sum(final CrossSiteHTable table, final String[] clusterNames, final byte[] family,
        final byte[] qualifier, final byte[] start, final byte[] end) throws ServiceException, Throwable {
    ColumnAggregationProtos.SumRequest.Builder builder = ColumnAggregationProtos.SumRequest.newBuilder();
    builder.setFamily(HBaseZeroCopyByteString.wrap(family));
    if (qualifier != null && qualifier.length > 0) {
        builder.setQualifier(HBaseZeroCopyByteString.wrap(qualifier));
    }//  w  w w .  j  a  va2s.  co m
    final Map<byte[], Long> results = Collections
            .synchronizedMap(new TreeMap<byte[], Long>(Bytes.BYTES_COMPARATOR));
    table.coprocessorService(ColumnAggregationProtos.ColumnAggregationService.class, start, end, clusterNames,
            new Batch.Call<ColumnAggregationProtos.ColumnAggregationService, Long>() {
                @Override
                public Long call(ColumnAggregationProtos.ColumnAggregationService instance) throws IOException {
                    BlockingRpcCallback<ColumnAggregationProtos.SumResponse> rpcCallback = new BlockingRpcCallback<ColumnAggregationProtos.SumResponse>();
                    ColumnAggregationProtos.SumRequest.Builder builder = ColumnAggregationProtos.SumRequest
                            .newBuilder();
                    builder.setFamily(HBaseZeroCopyByteString.wrap(family));
                    if (qualifier != null && qualifier.length > 0) {
                        builder.setQualifier(HBaseZeroCopyByteString.wrap(qualifier));
                    }
                    instance.sum(null, builder.build(), rpcCallback);
                    return rpcCallback.get().getSum();
                }
            }, new Batch.Callback<Long>() {
                public void update(byte[] region, byte[] row, Long value) {
                    results.put(region, value);
                }
            });
    return results;
}

From source file:org.apache.hadoop.mapred.HFSPScheduler.java

@Override
public synchronized void start() throws IOException {
    super.start();

    this.preemptionStrategy = HFSPScheduler.loadPreemptionStrategyInstance(conf);

    // lookup utilities
    this.taskTrackers = new HashMap<String, TaskTrackerStatus>();
    this.jIDToJIP = new HashMap<JobID, JobInProgress>();
    this.jIDToMapJDI = Collections.synchronizedMap(new HashMap<JobID, JobDurationInfo>());
    this.jIDToReduceJDI = Collections.synchronizedMap(new HashMap<JobID, JobDurationInfo>());

    // job added listener
    this.jobInProgressListener = new HFSPJIPListener(this);
    this.taskTrackerManager.addJobInProgressListener(jobInProgressListener);

    // job initializer
    // if (!this.mockMode)
    this.eagerTaskInitializationListener.setTaskTrackerManager(this.taskTrackerManager);
    this.eagerTaskInitializationListener.start();
    this.taskTrackerManager.addJobInProgressListener(this.eagerTaskInitializationListener);
    // }//from w w  w.  j a  v  a 2  s . co  m

    // Training queues
    this.trainingMapJobs = Collections.synchronizedSet(new HashSet<JobInProgress>());
    this.trainingReduceJobs = Collections.synchronizedSet(new HashSet<JobInProgress>());

    // Size Based queues
    this.sizeBasedMapJobsQueue = new TreeMap<JobDurationInfo, JobInProgress>(
            HFSPScheduler.JOB_DURATION_COMPARATOR);
    this.sizeBasedReduceJobsQueue = new TreeMap<JobDurationInfo, JobInProgress>(
            HFSPScheduler.JOB_DURATION_COMPARATOR);

    // Simulator
    this.cluster = new VirtualCluster<TaskDurationInfo>(this.getMaxTasks(TaskType.MAP),
            this.getMaxTasks(TaskType.REDUCE));
    this.scheduler = new MaxMinFSScheduler<JobDurationInfo, TaskDurationInfo>();
    this.progressManager = new ProgressManager(this.cluster, this.scheduler);

    // Trainer
    // try {
    // this.trainer = new BrokerTrainer(this, conf, this.clock);
    // } catch (Exception e) {
    // throw new RuntimeException();
    // }

    // Update thread
    // this.updateInterval = conf.getLong(UPDATE_INTERVAL_KEYNAME, 5000);
    this.updateThread = new UpdateThread(this);
    // if (!this.mockMode) {
    // this.updateThread.start();
    // } else {
    // this.delayEnabled = false; // problem with delay enabled in mock mode
    // }

    // localityDelay = conf.getLong("mapred.fairscheduler.locality.delay", -1);
    if (localityDelay == -1)
        autoComputeLocalityDelay = true; // Compute from heartbeat interval

    if (LOG.isDebugEnabled()) {
        StringBuilder builder = new StringBuilder(HFSPScheduler.class.toString());
        // this.numSlotsForMapTrain = conf.getInt(TRAIN_MAP_SLOTS_KEYNAME, 0);
        // this.numSlotsForReduceTrain = conf.getInt(TRAIN_REDUCE_SLOTS_KEYNAME,
        // 0);
        // this.numMapSlotsForJob = conf.getInt(TRAINER_MIN_MAPS_KEYNAME, 2);
        // this.numReduceSlotsForJob = conf.getInt(TRAINER_MIN_REDUCES_KEYNAME,
        // 2);
        // this.eagerPreemptionEnabled
        builder.append(" initialized");
        if (this.mockMode) {
            builder.append(" in mockMode");
        }
        builder.append(" with configuration:").append("\t").append("update interval: ")
                .append(this.updateInterval).append("\t").append("eager preemption: ")
                .append(this.preemptionStrategy).append("\t").append("delay enabled: ")
                .append(this.delayEnabled).append("\t").append("num slots for train map: ")
                .append(this.numSlotsForMapTrain).append("\t").append("num slots for train reduce: ")
                .append(this.numSlotsForReduceTrain).append("\t").append("min map tasks for train: ")
                .append(this.numMapTrainSlotsForJob).append("\t").append("min reduce tasks for train: ")
                .append(this.numReduceTrainSlotsForJob).append("\t").append("initial map task duration: ")
                .append(this.initialMapTaskDuration).append("\t").append("initial reduce task duration: ")
                .append(this.initialReduceTaskDuration).append("\t").append("duration modifier map: ")
                .append(this.durationModifierMap).append("\t").append("duration modifier reduce: ")
                .append(this.durationModifierReduce).append("\t").append(" . Forcing the first update");

        LOG.debug(builder.toString());
    } else {
        LOG.info(HFSPScheduler.class + " initialized, forcing the first update");
    }

    // if (!this.mockMode)
    this.update();
}

From source file:nl.minbzk.dwr.zoeken.enricher.processor.UIMAInjector.java

/**
 * Initialize the given processor context.
 * //from w w  w.j  a  v a2  s.  c om
 * @param context
 */
public static void initialize(final ProcessorContext context) {
    context.setParameter(CONTEXT_PARAMETER_CAS, Collections.synchronizedMap(new HashMap<String, CASUnit>()));
}

From source file:op.care.reports.PnlReport.java

private void reloadDisplay(final boolean lockmessageAfterwards) {
    /***//w w  w  .j  a v a2  s.  c  o m
     *               _                 _ ____  _           _
     *      _ __ ___| | ___   __ _  __| |  _ \(_)___ _ __ | | __ _ _   _
     *     | '__/ _ \ |/ _ \ / _` |/ _` | | | | / __| '_ \| |/ _` | | | |
     *     | | |  __/ | (_) | (_| | (_| | |_| | \__ \ |_) | | (_| | |_| |
     *     |_|  \___|_|\___/ \__,_|\__,_|____/|_|___/ .__/|_|\__,_|\__, |
     *                                              |_|            |___/
     */

    synchronized (contentmap) {
        SYSTools.clear(contentmap);
    }
    synchronized (cpMap) {
        SYSTools.clear(cpMap);
    }

    synchronized (linemap) {
        SYSTools.clear(linemap);
    }
    synchronized (valuecache) {
        SYSTools.clear(valuecache);
    }

    initPhase = true;

    OPDE.getMainframe().setBlocked(true);
    OPDE.getDisplayManager().setProgressBarMessage(new DisplayMessage(SYSTools.xx("misc.msg.wait"), -1, 100));
    final long time = System.currentTimeMillis();

    SwingWorker worker = new SwingWorker() {
        Date max = null;

        @Override
        protected Object doInBackground() throws Exception {

            GUITools.setResidentDisplay(resident);

            if (minmax == null) {
                minmax = NReportTools.getMinMax(resident);
            }

            holidays = Collections.synchronizedMap(
                    SYSCalendar.getHolidays(minmax.getStart().getYear(), minmax.getEnd().getYear()));

            if (minmax != null) {
                max = minmax.getEnd().toDate();
                LocalDate start = SYSCalendar.bom(minmax.getStart()).toLocalDate();
                LocalDate end = resident.isActive() ? new LocalDate()
                        : SYSCalendar.bom(minmax.getEnd()).toLocalDate();

                int maxYears = Years.yearsBetween(start.toDateTimeAtStartOfDay(), end.toDateTimeAtStartOfDay())
                        .getYears();

                int i = 0;
                for (int year = end.getYear(); year >= start.getYear(); year--) {
                    OPDE.debug((System.currentTimeMillis() - time) + " ms");
                    i++;
                    OPDE.getDisplayManager().setProgressBarMessage(
                            new DisplayMessage(SYSTools.xx("misc.msg.wait"), i, maxYears));
                    createCP4Year(year, start, end);
                }

                OPDE.debug((System.currentTimeMillis() - time) + " ms1");
            }

            return null;
        }

        @Override
        protected void done() {
            OPDE.debug((System.currentTimeMillis() - time) + " ms2");
            expandTheLast2Weeks();

            OPDE.debug((System.currentTimeMillis() - time) + " ms3");
            buildPanel();
            OPDE.debug((System.currentTimeMillis() - time) + " ms4");
            initPhase = false;
            OPDE.getDisplayManager().setProgressBarMessage(null);
            OPDE.getMainframe().setBlocked(false);
            if (lockmessageAfterwards)
                OPDE.getDisplayManager().addSubMessage(DisplayManager.getLockMessage());
            if (max != null) {
                OPDE.getDisplayManager().addSubMessage(new DisplayMessage(
                        SYSTools.xx("misc.msg.lastEntry") + ": " + DateFormat.getDateInstance().format(max),
                        5));
            } else {
                OPDE.getDisplayManager()
                        .addSubMessage(new DisplayMessage(SYSTools.xx("misc.msg.noentryyet"), 5));
            }
        }
    };
    worker.execute();
}

From source file:org.openhab.binding.digitalstrom.internal.DigitalSTROMBinding.java

private void handleStructure(Apartment apartment) {
    if (apartment != null) {

        Map<Integer, Map<Short, List<String>>> newZoneGroupMap = Collections
                .synchronizedMap(new HashMap<Integer, Map<Short, List<String>>>());
        Map<String, Device> clonedDsidMap = getDsidToDeviceMap();

        for (Zone zone : apartment.getZoneMap().values()) {

            Map<Short, List<String>> groupMap = new HashMap<Short, List<String>>();

            for (DetailedGroupInfo g : zone.getGroups()) {

                List<String> devicesInGroup = new LinkedList<String>();
                for (String dsid : g.getDeviceList()) {
                    if (clonedDsidMap.containsKey(dsid)) {
                        devicesInGroup.add(dsid);
                    }//from   w w w .  ja  va2s.  c o m
                }
                groupMap.put(g.getGroupID(), devicesInGroup);
            }
            newZoneGroupMap.put(zone.getZoneId(), groupMap);
        }

        synchronized (digitalSTROMZoneGroupMap) {
            digitalSTROMZoneGroupMap = newZoneGroupMap;
        }
    }
}

From source file:de.javakaffee.kryoserializers.KryoTest.java

@DataProvider
public Object[][] synchronizedCollections() {
    final HashMap<String, String> m = new HashMap<String, String>();
    m.put("foo", "bar");
    return new Object[][] {
            { Collections.synchronizedList(new ArrayList<String>(Arrays.asList("foo", "bar"))) },
            { Collections.synchronizedSet(new HashSet<String>(Arrays.asList("foo", "bar"))) },
            { Collections.synchronizedMap(m) }, };
}

From source file:org.jasig.portal.groups.smartldap.SmartLdapGroupStore.java

private GroupsTree buildGroupsTree() {

    long timestamp = System.currentTimeMillis();

    // Prepare the new local indeces...
    Map<String, IEntityGroup> new_groups = Collections.synchronizedMap(new HashMap<String, IEntityGroup>());
    Map<String, List<String>> new_parents = Collections.synchronizedMap(new HashMap<String, List<String>>());
    Map<String, List<String>> new_children = Collections.synchronizedMap(new HashMap<String, List<String>>());
    Map<String, List<String>> new_keysByUpperCaseName = Collections
            .synchronizedMap(new HashMap<String, List<String>>());

    // Gather IEntityGroup objects from LDAP...
    RuntimeRequestResponse req = new RuntimeRequestResponse();
    Set<LdapRecord> set = new HashSet<LdapRecord>();
    req.setAttribute("GROUPS", set);
    req.setAttribute("smartLdapGroupStore", this);
    SubQueryCounter queryCounter = new SubQueryCounter();
    req.setAttribute("queryCounter", queryCounter);
    req.setAttribute("baseFilter", spring_context.getBean("filter"));
    for (String name : spring_context.getBeanDefinitionNames()) {
        req.setAttribute(name, spring_context.getBean(name));
    }/* ww  w  .  ja v  a  2  s  .co m*/
    runner.run(initTask, req);

    if (log.isInfoEnabled()) {
        String msg = "init() found " + set.size() + " records.";
        log.info(msg);
    }

    // Do a first loop to build the main catalog (new_groups)...
    for (LdapRecord r : set) {

        // new_groups (me)...
        IEntityGroup g = r.getGroup();
        new_groups.put(g.getLocalKey(), g);

    }

    // Do a second loop to build local indeces...
    for (LdapRecord r : set) {

        IEntityGroup g = r.getGroup();

        // new_parents (I am a parent for all my children)...
        for (String childKey : r.getKeysOfChildren()) {

            // NB:  We're only interested in relationships between 
            // objects in the main catalog (i.e. new_groups);  
            // discard everything else...
            if (!new_groups.containsKey(childKey)) {
                break;
            }

            List<String> parentsList = new_parents.get(childKey);
            if (parentsList == null) {
                // first parent for this child...
                parentsList = Collections.synchronizedList(new LinkedList<String>());
                new_parents.put(childKey, parentsList);
            }
            parentsList.add(g.getLocalKey());

        }

        // new_children...
        List<String> childrenList = Collections.synchronizedList(new LinkedList<String>());
        for (String childKey : r.getKeysOfChildren()) {
            // NB:  We're only interested in relationships between 
            // objects in the main catalog (i.e. new_groups);  
            // discard everything else...
            if (new_groups.containsKey(childKey)) {
                childrenList.add(childKey);
            }
        }
        new_children.put(g.getLocalKey(), childrenList);

        // new_keysByUpperCaseName...
        List<String> groupsWithMyName = new_keysByUpperCaseName.get(g.getName().toUpperCase());
        if (groupsWithMyName == null) {
            // I am the first group with my name (pretty likely)...
            groupsWithMyName = Collections.synchronizedList(new LinkedList<String>());
            new_keysByUpperCaseName.put(g.getName().toUpperCase(), groupsWithMyName);
        }
        groupsWithMyName.add(g.getLocalKey());

    }

    /*
     * Now load the ROOT_GROUP into the collections...
     */

    // new_groups (me)...
    new_groups.put(ROOT_GROUP.getLocalKey(), ROOT_GROUP);

    // new_parents (I am a parent for all groups that have no other parent)...
    List<String> childrenOfRoot = Collections.synchronizedList(new LinkedList<String>()); // for later...
    for (String possibleChildKey : new_groups.keySet()) {
        if (!possibleChildKey.equals(ROOT_GROUP.getLocalKey()) && !new_parents.containsKey(possibleChildKey)) {
            List<String> p = Collections.synchronizedList(new LinkedList<String>());
            p.add(ROOT_GROUP.getLocalKey());
            new_parents.put(possibleChildKey, p);
            childrenOfRoot.add(possibleChildKey); // for later...
        }
    }

    // new_children...
    new_children.put(ROOT_GROUP.getLocalKey(), childrenOfRoot);

    // new_keysByUpperCaseName...
    List<String> groupsWithMyName = new_keysByUpperCaseName.get(ROOT_GROUP.getName().toUpperCase());
    if (groupsWithMyName == null) {
        // I am the first group with my name (pretty likely)...
        groupsWithMyName = Collections.synchronizedList(new LinkedList<String>());
        new_keysByUpperCaseName.put(ROOT_GROUP.getName().toUpperCase(), groupsWithMyName);
    }
    groupsWithMyName.add(ROOT_GROUP.getLocalKey());

    if (log.isInfoEnabled()) {
        long benchmark = System.currentTimeMillis() - timestamp;
        log.info("Refresh of groups tree completed in " + benchmark + " milliseconds");
        log.info("Total number of LDAP queries:  " + (queryCounter.getCount() + 1));
        String msg = "init() :: final size of each collection is as follows..." + "\n\tgroups="
                + new_groups.size() + "\n\tparents=" + new_parents.size() + "\n\tchildren="
                + new_children.size() + "\n\tkeysByUpperCaseName=" + new_keysByUpperCaseName.size();
        log.info(msg);
    }

    if (log.isTraceEnabled()) {

        StringBuilder msg = new StringBuilder();

        // new_groups...
        msg.setLength(0);
        msg.append("Here are the keys of the new_groups collection:");
        for (String s : new_groups.keySet()) {
            msg.append("\n\t").append(s);
        }
        log.trace(msg.toString());

        // new_parents...
        msg.setLength(0);
        msg.append("Here are the parents of each child in the new_parents collection:");
        for (Map.Entry<String, List<String>> y : new_parents.entrySet()) {
            msg.append("\n\tchild=").append(y.getKey());
            for (String s : y.getValue()) {
                msg.append("\n\t\tparent=").append(s);
            }
        }
        log.trace(msg.toString());

        // new_children...
        msg.setLength(0);
        msg.append("Here are the children of each parent in the new_children collection:");
        for (Map.Entry<String, List<String>> y : new_children.entrySet()) {
            msg.append("\n\tparent=").append(y.getKey());
            for (String s : y.getValue()) {
                msg.append("\n\t\tchild=").append(s);
            }
        }
        log.trace(msg.toString());

        // new_keysByUpperCaseName...
        msg.append("Here are the groups that have each name in the new_keysByUpperCaseName collection:");
        for (Map.Entry<String, List<String>> y : new_keysByUpperCaseName.entrySet()) {
            msg.append("\n\tname=").append(y.getKey());
            for (String s : y.getValue()) {
                msg.append("\n\t\tgroup=").append(s);
            }
        }
        log.trace(msg.toString());

    }

    return new GroupsTree(new_groups, new_parents, new_children, new_keysByUpperCaseName);

}

From source file:org.rifidi.designer.services.core.entities.EntitiesServiceImpl.java

@SuppressWarnings("unchecked")
public void loadScene(IFile file) {
    TextureManager.clearCache();/*from w  ww . j av a2  s  .  co  m*/
    // invalidate the current sceneData
    for (SceneDataChangedListener listener : listeners) {
        listener.destroySceneData(this.sceneData);
    }
    try {
        // initialize jaxb to know about the classes provided by the
        // libraries
        logger.debug("initializing jaxb");
        List<Class> classes = EntityLibraryRegistry.getInstance().getEntityClasses();
        classes.add(org.rifidi.designer.entities.SceneData.class);
        classes.add(org.rifidi.designer.entities.VisualEntity.class);
        classes.add(org.rifidi.emulator.tags.impl.C0G1Tag.class);
        classes.add(org.rifidi.emulator.tags.impl.C1G1Tag.class);
        classes.add(org.rifidi.emulator.tags.impl.C1G2Tag.class);
        classes.add(org.rifidi.emulator.tags.impl.RifidiTag.class);
        JAXBContext context = JAXBContext.newInstance(classes.toArray(new Class[0]));
        logger.debug("loading");
        Unmarshaller unmarshaller = context.createUnmarshaller();

        try {
            Object unm = unmarshaller.unmarshal(file.getContents());
            sceneData = (SceneData) unm;
            sceneData.setPhysicsSpace(PhysicsSpace.create());
            sceneData.setCollisionHandler(new InputHandler());

            // initialize the JME importer to handle physics
            sceneData.getPhysicsSpace().setupBinaryClassLoader(BinaryImporter.getInstance());

            // if this is a new file create an empty room
            if (sceneData.getNodeBytes() == null) {
                sceneData.setBitMap(new BitMap(sceneData.getWidth()));
                // sceneData.getRootNode().attachChild(createRoom());
            } else {// load the model from the stored bytes

                // let the textures load from the right spot
                try {
                    URI dirpath = Activator.class.getClassLoader().getResource("/").toURI();
                    ResourceLocatorTool.addResourceLocator(ResourceLocatorTool.TYPE_TEXTURE,
                            new SimpleResourceLocator(dirpath));
                } catch (URISyntaxException e) {
                    logger.error("URI exception while setting texture path: " + e);
                }

                sceneData.setRootNode((Node) BinaryImporter.getInstance().load(sceneData.getNodeBytes()));

                for (Entity entity : sceneData.getEntities()) {
                    ServiceRegistry.getInstance().service(entity);
                    sceneData.getEntityNames().add(entity.getName());
                    initEntity(entity, sceneData, false);
                }
                for (Entity entity : sceneData.getProducedEntities().getEntities()) {
                    ServiceRegistry.getInstance().service(entity);
                    sceneData.getEntityNames().add(entity.getName());
                    initEntity(entity, sceneData, false);
                }
                for (EntityGroup entityGroup : sceneData.getEntityGroups()) {
                    entityGroup.setSceneData(sceneData);
                }
            }

        } catch (IOException e) {
            logger.fatal("Unable to load file (IOException): " + e);
            return;
        } catch (CoreException e) {
            logger.fatal("Unable to load file (CoreException): " + e);
            return;
        }
        logger.debug("loading: done");
    } catch (JAXBException e) {
        logger.fatal("Unable to load file (JAXB): " + e);
        return;
    }
    createRoom(sceneData);
    fileOfCurrentScene = file;
    nodeToEntity = Collections.synchronizedMap(new HashMap<Node, VisualEntity>());
    for (Entity entity : sceneData.getSearchableEntities()) {
        if (entity instanceof VisualEntity) {
            nodeToEntity.put(((VisualEntity) entity).getNode(), (VisualEntity) entity);
        }
    }
    for (SceneDataChangedListener listener : listeners) {
        listener.sceneDataChanged(sceneData);
    }
}

From source file:org.opencds.service.drools.v54.DroolsAdapter.java

/** 
* big picture pseudo code for following method:
* 
*       for this requestedKmId { //from ww  w.j a v a2s . c  om
*          getResponse:
*             create Drools session
*             load KM into session
 *           load globals into session
 *            load data from allFactLists into session 
 *           KBase.execute (calls Drools)
 *           unload result from KM to outputString
 *       } 
* 
* This means that we are considering the OMG-CDSS concept of KnowledgeModule equivalent to
* the Drools concept of KnowledgeBase.
*/

public String getOneResponse(DSSRequestKMItem dssRequestKMItem)
        throws InvalidDriDataFormatExceptionFault, RequiredDataNotProvidedExceptionFault,
        EvaluationExceptionFault, InvalidTimeZoneOffsetExceptionFault, UnrecognizedScopedEntityExceptionFault,
        UnrecognizedLanguageExceptionFault, UnsupportedLanguageExceptionFault, DSSRuntimeExceptionFault {
    //      SimpleKnowledgeRepository    simpleKR    = SimpleKnowledgeRepository.getInstance();

    String requestedKmId = dssRequestKMItem.getRequestedKmId();
    String requestedKmPrimaryProcessName = SimpleKnowledgeRepository
            .getRequiredKMPrimaryProcessNameForKMId(requestedKmId);
    TimingDataKM timingDataKM = dssRequestKMItem.getKmTimingData();
    @SuppressWarnings("unchecked")
    JAXBElement<org.opencds.vmr.v1_0.schema.CDSInput> cdsInput = (JAXBElement<org.opencds.vmr.v1_0.schema.CDSInput>) dssRequestKMItem
            .getDssRequestDataItem().getCdsInput();

    DSSRequestDataItem dssRequestDataItem = dssRequestKMItem.getDssRequestDataItem();

    String externalFactModelSSId = dssRequestDataItem.getExternalFactModelSSId();
    Date evalTime = dssRequestDataItem.getEvalTime();
    String clientLanguage = dssRequestDataItem.getClientLanguage();
    String clientTimeZoneOffset = dssRequestDataItem.getClientTimeZoneOffset();
    String interactionId = dssRequestDataItem.getInteractionId();

    log.debug("II: " + interactionId + " KMId: " + requestedKmId + " (" + requestedKmPrimaryProcessName + ")"
            + ", SSId: " + externalFactModelSSId + ", evalTime: " + evalTime + ", clTimeZone: "
            + clientTimeZoneOffset + ", clLang: " + clientLanguage);

    /** 
     * Load fact map from specific externalFactModels, as specified in externalFactModel SSId...
     * 
     * Every separately identified SSId, by definition, specifies separate input and output mappings.
     * Input mappings are used here, and then output mappings are used following the session.execute.
     */

    //      Map<String, List<?>>                allFactLists          = Collections.synchronizedMap(new WeakHashMap<String, List<?>>());
    Map<String, List<?>> allFactLists = new HashMap<String, List<?>>();

    //allFactLists are updated in place by the following call, including both facts and concepts...
    //==================================================================
    //      long usedMemory = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory();
    //      // this is not needed, but it will make it easier to see the leak
    //      System.gc(); 
    //      System.out.println("KMId: " + requestedKmId + " used memory before buildFactLists: " + usedMemory / 1000  + "KB");

    String focalPersonId = BuildCDSInputFactLists.buildFactLists(cdsInput, evalTime, allFactLists,
            timingDataKM);
    //String focalPersonId = "123^1.2.3";

    //      long usedMemory2 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory();
    //      // this is not needed, but it will make it easier to see the leak
    //      System.gc(); 
    //      System.out.println("KMId: " + requestedKmId + " used memory after buildFactLists: " + usedMemory2 / 1000  + "KB, diff = " + (usedMemory2 - usedMemory) / 1000 + "KB");
    //==================================================================
    dssRequestKMItem.setFocalPersonId(focalPersonId);

    log.debug("II: " + interactionId + " KMId: " + requestedKmId + " built fact/concept lists for "
            + focalPersonId);

    timingDataKM.setFinishBuildConceptListsTime(new AtomicLong(System.nanoTime()));

    /** 
     * Get the KMs and Load them into a stateless session
     * 
     * Currently, assumption is made that each requested knowledge module will be run separately
     *  (i.e., as part of a separate distinct knowledge base)
     * 
     */
    File drlFile = null;
    File bpmnFile = null;
    File pkgFile = null;

    KnowledgeBase knowledgeBase = (KnowledgeBase) SimpleKnowledgeRepository
            .getKnowledgeBaseCache(requestedKmId);
    //      long usedMemory16 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory();
    //      // this is not needed, but it will make it easier to see the leak
    //      System.gc(); 
    //      System.out.println("KMId: " + requestedKmId + " used memory after getKnowledgeBaseCache: " + usedMemory16 / 1000  + "KB, diff = " + (usedMemory16 - usedMemory2) / 1000 + "KB");
    //      long usedMemory17 = 0;

    //      synchronized (knowledgeBase) {
    if (knowledgeBase != null) {
        log.debug("II: " + interactionId + " KMId: " + requestedKmId + " knowledgeBase from cache");

    } else {
        knowledgeBase = KnowledgeBaseFactory.newKnowledgeBase();
        KnowledgeBuilder knowledgeBuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();

        drlFile = SimpleKnowledgeRepository.getResourceAsFileWithoutException("knowledgeModules",
                requestedKmId + ".drl");
        bpmnFile = SimpleKnowledgeRepository.getResourceAsFileWithoutException("knowledgeModules",
                requestedKmId + ".bpmn");
        pkgFile = SimpleKnowledgeRepository.getResourceAsFileWithoutException("knowledgeModules",
                requestedKmId + ".pkg");

        if (drlFile != null)
            knowledgeBuilder.add(ResourceFactory.newFileResource(drlFile), ResourceType.DRL);
        if (bpmnFile != null)
            knowledgeBuilder.add(ResourceFactory.newFileResource(bpmnFile), ResourceType.BPMN2);
        if (pkgFile != null)
            knowledgeBuilder.add(ResourceFactory.newFileResource(pkgFile), ResourceType.PKG);
        if (knowledgeBuilder.hasErrors()) {
            throw new DSSRuntimeExceptionFault("KnowledgeBuilder had errors on build of: '" + requestedKmId
                    + "', " + knowledgeBuilder.getErrors().toString());
        }
        if (knowledgeBuilder.getKnowledgePackages().size() == 0) {
            throw new DSSRuntimeExceptionFault(
                    "KnowledgeBuilder did not find any VALID '.drl', '.bpmn' or '.pkg' files for: '"
                            + requestedKmId + "', " + knowledgeBuilder.getErrors().toString());
        }

        knowledgeBase.addKnowledgePackages(knowledgeBuilder.getKnowledgePackages());
        SimpleKnowledgeRepository.putKnowledgeBaseCache(requestedKmId, knowledgeBase);
        log.debug("II: " + interactionId + " KMId: " + requestedKmId + " knowledgeBase built");
        //            usedMemory17 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory();
        //            // this is not needed, but it will make it easier to see the leak
        //            System.gc(); 
        //            System.out.println("KMId: " + requestedKmId + " used memory after buildKnowledgeBase: " + usedMemory17 / 1000  + "KB, diff = " + (usedMemory17 - usedMemory16) / 1000 + "KB");
    }
    //      }

    dssRequestKMItem.getKmTimingData().setFinishInsertKnowledgeTime(new AtomicLong(System.nanoTime()));

    StatelessKnowledgeSession statelessKnowledgeSession = knowledgeBase.newStatelessKnowledgeSession();
    //      long usedMemory18 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory();
    //      // this is not needed, but it will make it easier to see the leak
    //      System.gc(); 
    //      if (usedMemory17 == 0) {
    ////         System.out.println("KMId: " + requestedKmId + " used memory after newStatelessKnowledgeSession: " + usedMemory18 / 1000  + "KB, diff = " + (usedMemory18 - usedMemory16) / 1000 + "KB");
    //      } else {
    //         System.out.println("KMId: " + requestedKmId + " used memory after newStatelessKnowledgeSession: " + usedMemory18 / 1000  + "KB, diff = " + (usedMemory18 - usedMemory17) / 1000 + "KB");
    //      }

    //        // to create a new Drools Working Memory Logger for in depth Drools debugging - Use either the InMemory  
    //           //   to record logs on all input, or use the FileLogger for debugging of one input at a time in Drools and JBPM
    //         WorkingMemoryInMemoryLogger memoryLogger   = new WorkingMemoryInMemoryLogger (statelessKnowledgeSession);
    //         WorkingMemoryFileLogger    fileLogger       = new WorkingMemoryFileLogger (statelessKnowledgeSession);   
    //         // If using the FileLogger, Set the log file that we will be using to log Working Memory (aka session)         
    //         fileLogger.setFileName("C:/opencds-logs/OpenCDS-Drools-event-log"); 
    //TODO:    make the above choice based on configuration settings

    dssRequestKMItem.getKmTimingData().setFinishStartKnowledgeSessionTime(new AtomicLong(System.nanoTime()));

    /**
     * Load the Globals and Fact lists:  evalTime, language, timezoneOffset
     * 
     */

    @SuppressWarnings("rawtypes")
    List<Command> cmds = Collections.synchronizedList(new ArrayList<Command>());

    /**
     * Load the Globals:  evalTime, language, timezoneOffset, focalPersonId, assertions, namedObjects
     * 
     */
    cmds.add(CommandFactory.newSetGlobal("evalTime", evalTime));
    cmds.add(CommandFactory.newSetGlobal("clientLanguage", clientLanguage));
    cmds.add(CommandFactory.newSetGlobal("clientTimeZoneOffset", clientTimeZoneOffset));
    cmds.add(CommandFactory.newSetGlobal("focalPersonId", dssRequestKMItem.getFocalPersonId()));

    //following global used to store flags for inter-task communication in a JBPM Process
    java.util.Set<String> assertions = new java.util.HashSet<String>();
    cmds.add(CommandFactory.newSetGlobal("assertions", assertions));

    //following global used to return facts added by rules, such as new observationResults
    java.util.Map<String, Object> namedObjects = new java.util.HashMap<String, Object>();
    cmds.add(CommandFactory.newSetGlobal("namedObjects", namedObjects));

    dssRequestKMItem.getKmTimingData().setFinishLoadGlobalsTime(new AtomicLong(System.nanoTime()));
    //      long usedMemory19 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory();
    //      // this is not needed, but it will make it easier to see the leak
    //      System.gc(); 
    //      System.out.println("KMId: " + requestedKmId + " used memory after CommandFactory.newSetGlobal: " + usedMemory19 / 1000  + "KB, diff = " + (usedMemory19 - usedMemory18) / 1000 + "KB");

    /**
     * Load the FactLists into Commands:  Only ones that are not empty...
     * 
     */

    //does this whole thing needs to be made concurrent safe ?? Will this do it??
    synchronized (allFactLists) {
        for (String oneName : allFactLists.keySet()) {
            @SuppressWarnings("unchecked")
            List<Object> oneFactList = (List<Object>) allFactLists.get(oneName);
            String oneTypeName = "";
            for (Object oneFact : (List<Object>) oneFactList) {
                oneTypeName = oneFact.getClass().getSimpleName();
            }
            if (oneFactList.size() > 0) {
                cmds.add(CommandFactory.newInsertElements((List<?>) oneFactList, oneTypeName, true, null));
            } else {
                allFactLists.remove(oneTypeName);
            }
        }
    }
    //      long usedMemory20 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory();
    //      // this is not needed, but it will make it easier to see the leak
    //      System.gc(); 
    //      System.out.println("KMId: " + requestedKmId + " used memory after CommandFactory.newInsertElements: " + usedMemory20 / 1000  + "KB, diff = " + (usedMemory20 - usedMemory19) / 1000 + "KB");

    dssRequestKMItem.getKmTimingData().setFinishLoadFactListsTime(new AtomicLong(System.nanoTime()));

    /**   
     * If this is a PKG (for package with process, initiate the configured Primary Process for JBPM.
     * 
     */

    if ((requestedKmPrimaryProcessName != null) && (!"".equals(requestedKmPrimaryProcessName))) {
        if ("".equals(requestedKmPrimaryProcessName)) {
            throw new DSSRuntimeExceptionFault("DroolsAdapter found improperly configured KM: " + requestedKmId
                    + ".  This KM includes a BPMN file, but does not have a value "
                    + "for 'knowledgeModulePrimaryProcessName' in its configuration.");
        }
        cmds.add(CommandFactory.newStartProcess(requestedKmPrimaryProcessName));
        log.debug("II: " + interactionId + " KMId: " + requestedKmId + " knowledgeBase Primary Process: "
                + requestedKmPrimaryProcessName);
    }
    //      long usedMemory21 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory();
    //      // this is not needed, but it will make it easier to see the leak
    //      System.gc(); 
    //      System.out.println("KMId: " + requestedKmId + " used memory after CommandFactory.newInsertPrimaryProcess: " + usedMemory21 / 1000  + "KB, diff = " + (usedMemory21 - usedMemory20) / 1000 + "KB");

    dssRequestKMItem.getKmTimingData().setStartInferenceEngine(new AtomicLong(System.nanoTime()));

    /**   
     * Use Drools to process everything   
     * Added try/catch around stateless session. because Drools has an unhandled exception
     * when a JBPM Process improperly re-declares a global that is constraining a gateway
     * and the resultant global is null - des 20120727      
     ********************************************************************************
     */
    ExecutionResults results = null;
    try {

        results = statelessKnowledgeSession.execute(CommandFactory.newBatchExecution((cmds)));

    } catch (Exception e) {
        String err = "OpenCDS call to Drools.execute failed with error: " + e.getMessage();
        log.error(err);
        StackTraceElement elements[] = e.getStackTrace();
        for (int i = 0, n = elements.length; i < n; i++) {
            String detail = elements[i].getClassName() + ":" + elements[i].getMethodName() + ":"
                    + elements[i].getLineNumber();
            log.error(detail);
            err += "\n" + elements[i].getMethodName();
        }
        throw new DSSRuntimeExceptionFault(err);
    }
    //      long usedMemory22 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory();
    //      // this is not needed, but it will make it easier to see the leak
    //      System.gc(); 
    //      System.out.println("KMId: " + requestedKmId + " used memory after statelessKnowledgeSession.execute: " + usedMemory22 / 1000  + "KB, diff = " + (usedMemory22 - usedMemory21) / 1000 + "KB");

    /**               
     ********************************************************************************
     *  END Drools
     *  
     */
    dssRequestKMItem.getKmTimingData().setFinishInferenceEngine(new AtomicLong(System.nanoTime()));
    //grab session logging of whichever type was started...
    //      log.trace(memoryLogger.getEvents());
    //      fileLogger.writeToDisk();

    //update original entries from allFactLists to capture any new or updated elements
    //** need to look for every possible fact list, because rules may have created new ones...
    //NOTE that results contains the original objects passed in via CMD structure, with any 
    //changes introduced by rules.

    Map<String, List<?>> resultFactLists = Collections.synchronizedMap(new WeakHashMap<String, List<?>>());

    synchronized (results) {
        Collection<String> allResultNames = results.getIdentifiers(); //includes concepts but not globals?
        for (String oneName : allResultNames) {
            if (!("evalTime".equals(oneName)) && !("clientLanguage".equals(oneName))
                    && !("clientTimeZoneOffset".equals(oneName))) {
                // ignore these submitted globals, they should not have been changed by rules, and look at everything else

                Object oneList = results.getValue(oneName);
                resultFactLists.put(oneName, (List<?>) oneList);
            }
        }
    }
    //      long usedMemory23 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory();
    //      // this is not needed, but it will make it easier to see the leak
    //      System.gc(); 
    //      System.out.println("KMId: " + requestedKmId + " used memory after resultFactLists: " + usedMemory23 / 1000  + "KB, diff = " + (usedMemory23 - usedMemory22) / 1000 + "KB");

    /*
     * now process the returned namedObjects and add them to the resultFactLists
     * 
     */
    synchronized (namedObjects) {
        for (String key : namedObjects.keySet()) {
            if (namedObjects.get(key) != null) {
                Object oneNamedObject = namedObjects.get(key);
                //            String className = oneNamedObject.getClass().getSimpleName();
                @SuppressWarnings("unchecked")
                List<Object> oneList = (List<Object>) resultFactLists
                        .get(oneNamedObject.getClass().getSimpleName());
                if (oneList == null) {
                    oneList = new ArrayList<Object>();
                    oneList.add(oneNamedObject);
                } else {
                    oneList.add(oneNamedObject);
                }
                resultFactLists.put(oneNamedObject.getClass().getSimpleName(), oneList);
            }
        }
    }
    //      long usedMemory24 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory();
    //      // this is not needed, but it will make it easier to see the leak
    //      System.gc(); 
    //      System.out.println("KMId: " + requestedKmId + " used memory after adding namedObjects: " + usedMemory24 / 1000  + "KB, diff = " + (usedMemory24 - usedMemory23) / 1000 + "KB");

    /** 
     * Retrieve the Results for this requested KM and stack them in the DSS fkmResponse
     * NOTE: Each additional requested KM will have a separate output payload
     */

    dssRequestKMItem.getKmTimingData().setFinishInferenceEngineAdapterTime(new AtomicLong(System.nanoTime()));
    log.debug(
            "II: " + interactionId + " KMId: " + requestedKmId + " begin marshalling results to external VMR ");

    //FIXME probably need to create static final string to identify output SSID, probably always as VMR
    //        String                  payloadCreatorName   = SimpleKnowledgeRepository.getRequiredPayloadCreatorForSSID(externalFactModelSSId);
    IOutboundPayloadProcessor payloadCreator = (IOutboundPayloadProcessor) SimpleKnowledgeRepository
            .getPayloadCreatorInstanceForClassNameCache(
                    SimpleKnowledgeRepository.getRequiredPayloadCreatorForSSID(externalFactModelSSId));
    //      long usedMemory25 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory();
    //      // this is not needed, but it will make it easier to see the leak
    //      System.gc(); 
    //      System.out.println("KMId: " + requestedKmId + " used memory after getting payloadCreatorInstance: " + usedMemory25 / 1000  + "KB, diff = " + (usedMemory25 - usedMemory24) / 1000 + "KB");

    /*
     * following is normally instantiated as MarshalVMR2VMRSchemaPayload.getInstance().mappingOutbound( resultFactLists, dssRequestKMItem );
     * 
     */
    String outputString = payloadCreator.mappingOutbound(resultFactLists, dssRequestKMItem);
    //      long usedMemory26 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory();
    //      // this is not needed, but it will make it easier to see the leak
    //      System.gc(); 
    //      System.out.println("KMId: " + requestedKmId + " used memory after creating outputString: " + usedMemory26 / 1000  + "KB, diff = " + (usedMemory26 - usedMemory25) / 1000 + "KB");

    log.trace("II: " + interactionId + " KMId: " + requestedKmId
            + " finished marshalling results to external VMR, " + outputString.length() + " chars.");

    /*
     * clear out maps and arrays    
     *    
     */
    //        BuildCDSInputFactLists.clearAllFactLists(allFactLists);
    //      synchronized (allFactLists) {
    log.debug("clearAllFactLists");

    //         for (String eachKey : ((Map<String, List<?>>)allFactLists).keySet()) {
    //            if (allFactLists.get(eachKey) != null) {
    //               List<?> eachList = allFactLists.get(eachKey);
    //               eachList.clear();
    //               allFactLists.remove(eachKey);
    //            }
    //         }
    //      }   

    //      long usedMemory4 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory();
    //      // this is not needed, but it will make it easier to see the leak
    //      System.gc(); 
    //      System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter before clear: " + usedMemory4 / 1000  + "KB");

    allFactLists.clear();
    //      long usedMemory5 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory();
    //      System.gc(); 
    //      System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after allFactLists.clear(): " + usedMemory5 / 1000  + "KB, diff = " + (usedMemory5 - usedMemory4) / 1000 + "KB");

    allFactLists = null;
    //      long usedMemory6 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory();
    //      System.gc(); 
    //      System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after allFactLists = null: " + usedMemory6 / 1000  + "KB, diff = " + (usedMemory6 - usedMemory5) / 1000 + "KB");

    cmds.clear();
    //      long usedMemory7 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory();
    //      System.gc(); 
    //      System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after cmds.clear(): " + usedMemory7 / 1000  + "KB, diff = " + (usedMemory7 - usedMemory6) / 1000 + "KB");

    cmds = null;
    //      long usedMemory8 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory();
    //      System.gc(); 
    //      System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after cmds = null: " + usedMemory8 / 1000  + "KB, diff = " + (usedMemory8 - usedMemory7) / 1000 + "KB");

    assertions.clear();
    //      long usedMemory9 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory();
    //      System.gc(); 
    //      System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after assertions.clear(): " + usedMemory9 / 1000  + "KB, diff = " + (usedMemory9 - usedMemory8) / 1000 + "KB");

    assertions = null;
    //      long usedMemory10 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory();
    //      System.gc(); 
    //      System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after assertions = null;: " + usedMemory10 / 1000  + "KB, diff = " + (usedMemory10 - usedMemory9) / 1000 + "KB");

    namedObjects.clear();
    //      long usedMemory11 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory();
    //      System.gc(); 
    //      System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after namedObjects.clear(): " + usedMemory11 / 1000  + "KB, diff = " + (usedMemory11 - usedMemory10) / 1000 + "KB");

    namedObjects = null;
    //      long usedMemory12 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory();
    //      System.gc(); 
    //      System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after namedObjects = null: " + usedMemory12 / 1000  + "KB, diff = " + (usedMemory12 - usedMemory11) / 1000 + "KB");

    for (String oneId : results.getIdentifiers()) {
        //results.getFactHandle(oneId)
        results.getIdentifiers().remove(results.getValue(oneId));
    }
    results = null;
    //      long usedMemory13 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory();
    //      System.gc(); 
    //      System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after results = null: " + usedMemory13 / 1000  + "KB, diff = " + (usedMemory13 - usedMemory12) / 1000 + "KB");

    resultFactLists.values().clear();
    resultFactLists.clear();
    //      long usedMemory14 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory();
    //      System.gc(); 
    //      System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after resultFactLists.clear(): " + usedMemory14 / 1000  + "KB, diff = " + (usedMemory14 - usedMemory13) / 1000 + "KB");

    resultFactLists = null;
    //      long usedMemory15 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory();
    //      System.gc(); 
    //      System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after resultFactLists = null: " + usedMemory15 / 1000  + "KB, diff = " + (usedMemory15 - usedMemory14) / 1000 + "KB");

    //        dssRequestKMItem = null;

    //        statelessKnowledgeSession = null;

    log.debug("II: " + interactionId + " KMId: " + requestedKmId + " completed Drools inferencing engine");

    //      long usedMemory3 = Runtime.getRuntime().totalMemory()- Runtime.getRuntime().freeMemory();
    //      System.gc(); 
    //      System.out.println("KMId: " + requestedKmId + " used memory at end of DroolsAdapter after clear: " + usedMemory3 / 1000  + "KB, diff = " + (usedMemory3 - usedMemory4) / 1000 + "KB");

    return outputString;
    //        return "";
}

From source file:org.apereo.portal.groups.smartldap.SmartLdapGroupStore.java

private GroupsTree buildGroupsTree() {

    long timestamp = System.currentTimeMillis();

    // Prepare the new local indeces...
    Map<String, IEntityGroup> new_groups = Collections.synchronizedMap(new HashMap<String, IEntityGroup>());
    Map<String, List<String>> new_parents = Collections.synchronizedMap(new HashMap<String, List<String>>());
    Map<String, List<String>> new_children = Collections.synchronizedMap(new HashMap<String, List<String>>());
    Map<String, List<String>> new_keysByUpperCaseName = Collections
            .synchronizedMap(new HashMap<String, List<String>>());

    // Gather IEntityGroup objects from LDAP...
    RuntimeRequestResponse req = new RuntimeRequestResponse();
    Set<LdapRecord> set = new HashSet<>();
    req.setAttribute("GROUPS", set);
    req.setAttribute("smartLdapGroupStore", this);
    SubQueryCounter queryCounter = new SubQueryCounter();
    req.setAttribute("queryCounter", queryCounter);
    req.setAttribute("filter", filter); // This one changes iteratively...
    req.setAttribute("baseFilter", filter); // while this one stays the same.
    if (StringUtils.isBlank(baseGroupDn)) {
        throw new IllegalStateException("baseGroupDn property not set");
    }//  w  w  w  . j a  v  a  2s.  c o  m
    req.setAttribute("baseGroupDn", baseGroupDn);
    if (ldapContext == null) {
        throw new IllegalStateException("ldapContext property not set");
    }
    req.setAttribute("ldapContext", ldapContext);
    req.setAttribute("resolveMemberGroups", resolveMemberGroups);
    req.setAttribute("resolveDnList", resolveDnList);
    req.setAttribute("memberOfAttributeName", memberOfAttributeName);
    req.setAttribute("attributesMapper", attributesMapper);
    runner.run(initTask, req);

    log.info("init() found {} records", set.size());

    // Do a first loop to build the main catalog (new_groups)...
    for (LdapRecord r : set) {

        // new_groups (me)...
        IEntityGroup g = r.getGroup();
        new_groups.put(g.getLocalKey(), g);

    }

    // Do a second loop to build local indeces...
    for (LdapRecord r : set) {

        IEntityGroup g = r.getGroup();

        // new_parents (I am a parent for all my children)...
        for (String childKey : r.getKeysOfChildren()) {

            // NB:  We're only interested in relationships between 
            // objects in the main catalog (i.e. new_groups);  
            // discard everything else...
            if (!new_groups.containsKey(childKey)) {
                break;
            }

            List<String> parentsList = new_parents.get(childKey);
            if (parentsList == null) {
                // first parent for this child...
                parentsList = Collections.synchronizedList(new LinkedList<String>());
                new_parents.put(childKey, parentsList);
            }
            parentsList.add(g.getLocalKey());

        }

        // new_children...
        List<String> childrenList = Collections.synchronizedList(new LinkedList<String>());
        for (String childKey : r.getKeysOfChildren()) {
            // NB:  We're only interested in relationships between 
            // objects in the main catalog (i.e. new_groups);  
            // discard everything else...
            if (new_groups.containsKey(childKey)) {
                childrenList.add(childKey);
            }
        }
        new_children.put(g.getLocalKey(), childrenList);

        // new_keysByUpperCaseName...
        List<String> groupsWithMyName = new_keysByUpperCaseName.get(g.getName().toUpperCase());
        if (groupsWithMyName == null) {
            // I am the first group with my name (pretty likely)...
            groupsWithMyName = Collections.synchronizedList(new LinkedList<String>());
            new_keysByUpperCaseName.put(g.getName().toUpperCase(), groupsWithMyName);
        }
        groupsWithMyName.add(g.getLocalKey());

    }

    /*
     * Now load the ROOT_GROUP into the collections...
     */

    // new_groups (me)...
    final IEntityGroup root = getRootGroup();
    new_groups.put(root.getLocalKey(), root);

    // new_parents (I am a parent for all groups that have no other parent)...
    List<String> childrenOfRoot = Collections.synchronizedList(new LinkedList<String>()); // for later...
    for (String possibleChildKey : new_groups.keySet()) {
        if (!possibleChildKey.equals(root.getLocalKey()) && !new_parents.containsKey(possibleChildKey)) {
            List<String> p = Collections.synchronizedList(new LinkedList<String>());
            p.add(root.getLocalKey());
            new_parents.put(possibleChildKey, p);
            childrenOfRoot.add(possibleChildKey); // for later...
        }
    }

    // new_children...
    new_children.put(root.getLocalKey(), childrenOfRoot);

    // new_keysByUpperCaseName...
    List<String> groupsWithMyName = new_keysByUpperCaseName.get(root.getName().toUpperCase());
    if (groupsWithMyName == null) {
        // I am the first group with my name (pretty likely)...
        groupsWithMyName = Collections.synchronizedList(new LinkedList<String>());
        new_keysByUpperCaseName.put(root.getName().toUpperCase(), groupsWithMyName);
    }
    groupsWithMyName.add(root.getLocalKey());

    final long benchmark = System.currentTimeMillis() - timestamp;
    log.info("Refresh of groups tree completed in {} milliseconds", benchmark);
    log.info("Total number of LDAP queries:  {}", queryCounter.getCount() + 1);
    final String msg = "init() :: final size of each collection is as follows..." + "\n\tgroups={}"
            + "\n\tparents={}" + "\n\tchildren={}" + "\n\tkeysByUpperCaseName={}";
    log.info(msg, new_groups.size(), new_parents.size(), new_children.size(), new_keysByUpperCaseName.size());

    if (log.isTraceEnabled()) {

        StringBuilder sbuilder = new StringBuilder();

        // new_groups...
        sbuilder.setLength(0);
        sbuilder.append("Here are the keys of the new_groups collection:");
        for (String s : new_groups.keySet()) {
            sbuilder.append("\n\t").append(s);
        }
        log.trace(sbuilder.toString());

        // new_parents...
        sbuilder.setLength(0);
        sbuilder.append("Here are the parents of each child in the new_parents collection:");
        for (Map.Entry<String, List<String>> y : new_parents.entrySet()) {
            sbuilder.append("\n\tchild=").append(y.getKey());
            for (String s : y.getValue()) {
                sbuilder.append("\n\t\tparent=").append(s);
            }
        }
        log.trace(sbuilder.toString());

        // new_children...
        sbuilder.setLength(0);
        sbuilder.append("Here are the children of each parent in the new_children collection:");
        for (Map.Entry<String, List<String>> y : new_children.entrySet()) {
            sbuilder.append("\n\tparent=").append(y.getKey());
            for (String s : y.getValue()) {
                sbuilder.append("\n\t\tchild=").append(s);
            }
        }
        log.trace(sbuilder.toString());

        // new_keysByUpperCaseName...
        sbuilder.append("Here are the groups that have each name in the new_keysByUpperCaseName collection:");
        for (Map.Entry<String, List<String>> y : new_keysByUpperCaseName.entrySet()) {
            sbuilder.append("\n\tname=").append(y.getKey());
            for (String s : y.getValue()) {
                sbuilder.append("\n\t\tgroup=").append(s);
            }
        }
        log.trace(sbuilder.toString());

    }

    return new GroupsTree(new_groups, new_parents, new_children, new_keysByUpperCaseName);

}