Example usage for java.util Collections synchronizedMap

List of usage examples for java.util Collections synchronizedMap

Introduction

In this page you can find the example usage for java.util Collections synchronizedMap.

Prototype

public static <K, V> Map<K, V> synchronizedMap(Map<K, V> m) 

Source Link

Document

Returns a synchronized (thread-safe) map backed by the specified map.

Usage

From source file:com.cyberway.issue.crawler.frontier.WorkQueueFrontier.java

/**
 * Initializes the Frontier, given the supplied CrawlController.
 *
 * @see com.cyberway.issue.crawler.framework.Frontier#initialize(com.cyberway.issue.crawler.framework.CrawlController)
 */// ww  w  . j a v  a  2  s  .com
public void initialize(CrawlController c) throws FatalConfigurationException, IOException {
    // Call the super method. It sets up frontier journalling.
    super.initialize(c);
    this.controller = c;

    initQueuesOfQueues();

    this.targetSizeForReadyQueues = (Integer) getUncheckedAttribute(null, ATTR_TARGET_READY_QUEUES_BACKLOG);
    if (this.targetSizeForReadyQueues < 1) {
        this.targetSizeForReadyQueues = 1;
    }
    this.wakeTimer = new Timer("waker for " + c.toString());

    try {
        if (workQueueDataOnDisk() && getQueueAssignmentPolicy(null).maximumNumberOfKeys() >= 0
                && getQueueAssignmentPolicy(null)
                        .maximumNumberOfKeys() <= MAX_QUEUES_TO_HOLD_ALLQUEUES_IN_MEMORY) {
            this.allQueues = Collections.synchronizedMap(new HashMap<String, WorkQueue>());
        } else {
            this.allQueues = c.getBigMap("allqueues", String.class, WorkQueue.class);
            if (logger.isLoggable(Level.FINE)) {
                Iterator i = this.allQueues.keySet().iterator();
                try {
                    for (; i.hasNext();) {
                        logger.fine((String) i.next());
                    }
                } finally {
                    StoredIterator.close(i);
                }
            }
        }
        this.alreadyIncluded = createAlreadyIncluded();
        initQueue();
    } catch (IOException e) {
        e.printStackTrace();
        throw (FatalConfigurationException) new FatalConfigurationException(e.getMessage()).initCause(e);
    } catch (Exception e) {
        e.printStackTrace();
        throw (FatalConfigurationException) new FatalConfigurationException(e.getMessage()).initCause(e);
    }

    initCostPolicy();

    loadSeeds();
}

From source file:com.github.aptd.simulation.datamodel.CXMLReader.java

/**
 * create the train list//  www  . j  a  v  a  2 s  .  c  o m
 *
 * @param p_network network component
 * @param p_agents map with agent asl scripts
 * @param p_factory factory
 * @return unmodifiable map with trains
 */
private static Pair<Map<String, ITrain<?>>, Map<String, IDoor<?>>> train(final Network p_network,
        final Map<String, String> p_agents, final IFactory p_factory, final ITime p_time,
        final double p_minfreetimetoclose) {
    final String l_dooragent = IStatefulElement.getDefaultAsl("door");
    final Map<String, IElement.IGenerator<ITrain<?>>> l_generators = new ConcurrentHashMap<>();
    final Set<IAction> l_actions = CCommon.actionsFromPackage().collect(Collectors.toSet());
    final IElement.IGenerator<IDoor<?>> l_doorgenerator = doorgenerator(p_factory, l_dooragent, l_actions,
            p_time);
    final Map<String, AtomicLong> l_doorcount = Collections.synchronizedMap(new HashMap<>());
    final Map<String, IDoor<?>> l_doors = Collections.synchronizedMap(new HashMap<>());
    return new ImmutablePair<>(
            Collections.<String, ITrain<?>>unmodifiableMap(
                    p_network.getTimetable().getTrains().getTrain().parallelStream()
                            .filter(i -> hasagentname(i.getAny3())).map(i -> agentname(i, i.getAny3()))
                            .map(i -> l_generators
                                    .computeIfAbsent(i.getRight(),
                                            a -> traingenerator(p_factory, p_agents.get(i.getRight()),
                                                    l_actions, p_time))
                                    .generatesingle(i.getLeft().getId(),
                                            i.getLeft().getTrainPartSequence().stream().flatMap(ref -> {
                                                // @todo support multiple train parts
                                                final EOcpTT[] l_tts = ((ETrainPart) ref.getTrainPartRef()
                                                        .get(0).getRef()).getOcpsTT().getOcpTT()
                                                                .toArray(new EOcpTT[0]);
                                                final CTrain.CTimetableEntry[] l_entries = new CTrain.CTimetableEntry[l_tts.length];
                                                for (int j = 0; j < l_tts.length; j++) {
                                                    final EArrivalDepartureTimes l_times = l_tts[j].getTimes()
                                                            .stream()
                                                            .filter(t -> t.getScope()
                                                                    .equalsIgnoreCase("published"))
                                                            .findAny().orElseThrow(() -> new CSemanticException(
                                                                    "missing published times"));
                                                    l_entries[j] = new CTrain.CTimetableEntry(
                                                            j < 1 ? 0.0
                                                                    : ((ETrack) l_tts[j - 1].getSectionTT()
                                                                            .getTrackRef().get(0).getRef())
                                                                                    .getTrackTopology()
                                                                                    .getTrackEnd().getPos()
                                                                                    .doubleValue(),
                                                            ((EOcp) l_tts[j].getOcpRef()).getId(),
                                                            l_tts[j].getStopDescription().getOtherAttributes()
                                                                    .getOrDefault(PLATFORM_REF_ATTRIBUTE, null),
                                                            l_times.getArrival() == null ? null
                                                                    : l_times.getArrival().toGregorianCalendar()
                                                                            .toZonedDateTime()
                                                                            .with(LocalDate.from(p_time
                                                                                    .current()
                                                                                    .atZone(ZoneId
                                                                                            .systemDefault())))
                                                                            .toInstant(),
                                                            l_times.getDeparture() == null ? null
                                                                    : l_times.getDeparture()
                                                                            .toGregorianCalendar()
                                                                            .toZonedDateTime()
                                                                            .with(LocalDate.from(p_time
                                                                                    .current()
                                                                                    .atZone(ZoneId
                                                                                            .systemDefault())))
                                                                            .toInstant());
                                                }
                                                return Arrays.stream(l_entries);
                                            }), i.getLeft().getTrainPartSequence().stream()
                                                    // @todo support multiple train parts
                                                    .map(s -> (ETrainPart) s.getTrainPartRef().get(0).getRef())
                                                    .map(p -> (EFormation) p.getFormationTT().getFormationRef())
                                                    .flatMap(f -> f.getTrainOrder().getVehicleRef().stream())
                                                    .map(r -> new ImmutablePair<BigInteger, TDoors>(
                                                            r.getVehicleCount(),
                                                            ((EVehicle) r.getVehicleRef()).getWagon()
                                                                    .getPassenger().getDoors()))
                                                    .flatMap(v -> IntStream
                                                            .range(0,
                                                                    v.getLeft().intValue() * v.getRight()
                                                                            .getNumber().intValue())
                                                            .mapToObj(j -> l_doors.computeIfAbsent("door-"
                                                                    + i.getLeft().getId() + "-"
                                                                    + l_doorcount
                                                                            .computeIfAbsent(i.getLeft()
                                                                                    .getId(),
                                                                                    id -> new AtomicLong(1L))
                                                                            .getAndIncrement(),
                                                                    id -> l_doorgenerator.generatesingle(id,
                                                                            i.getLeft().getId(),
                                                                            v.getRight().getEntranceWidth()
                                                                                    .doubleValue()
                                                                                    / v.getRight().getNumber()
                                                                                            .longValue(),
                                                                            p_minfreetimetoclose))))
                                                    .collect(Collectors.toList())))
                            .collect(Collectors.toMap(IElement::id, i -> i))),
            l_doors);
}

From source file:eu.itesla_project.modules.wca.WCATool.java

@Override
public void run(CommandLine line) throws Exception {
    Path caseFile = Paths.get(line.getOptionValue("case-file"));
    String offlineWorkflowId = line.getOptionValue("offline-workflow-id"); // can be null meaning use no offline security rules
    Interval histoInterval = Interval.parse(line.getOptionValue("history-interval"));
    String rulesDbName = line.hasOption("rules-db-name") ? line.getOptionValue("rules-db-name")
            : OfflineConfig.DEFAULT_RULES_DB_NAME;
    double purityThreshold = DEFAULT_PURITY_THRESHOLD;
    if (line.hasOption("purity-threshold")) {
        purityThreshold = Double.parseDouble(line.getOptionValue("purity-threshold"));
    }// ww w.  ja v  a  2  s  . co m
    Set<SecurityIndexType> securityIndexTypes = null;
    if (line.hasOption("security-index-types")) {
        securityIndexTypes = Arrays.stream(line.getOptionValue("security-index-types").split(","))
                .map(SecurityIndexType::valueOf).collect(Collectors.toSet());
    }
    Path outputCsvFile = null;
    if (line.hasOption("output-csv-file")) {
        outputCsvFile = Paths.get(line.getOptionValue("output-csv-file"));
    }
    boolean stopWcaOnViolations = DEFAULT_STOP_WCA_ON_VIOLATIONS;
    if (line.hasOption("stop-on-violations")) {
        stopWcaOnViolations = Boolean.parseBoolean(line.getOptionValue("stop-on-violations"));
    }

    try (ComputationManager computationManager = new LocalComputationManager()) {
        WCAParameters parameters = new WCAParameters(histoInterval, offlineWorkflowId, securityIndexTypes,
                purityThreshold, stopWcaOnViolations);
        OnlineConfig config = OnlineConfig.load();
        ContingenciesAndActionsDatabaseClient contingenciesDb = config.getContingencyDbClientFactoryClass()
                .newInstance().create();
        LoadFlowFactory loadFlowFactory = config.getLoadFlowFactoryClass().newInstance();
        WCAFactory wcaFactory = config.getWcaFactoryClass().newInstance();
        try (HistoDbClient histoDbClient = new SynchronizedHistoDbClient(
                config.getHistoDbClientFactoryClass().newInstance().create());
                RulesDbClient rulesDbClient = config.getRulesDbClientFactoryClass().newInstance()
                        .create(rulesDbName)) {

            UncertaintiesAnalyserFactory uncertaintiesAnalyserFactory = config
                    .getUncertaintiesAnalyserFactoryClass().newInstance();

            if (Files.isRegularFile(caseFile)) {
                if (outputCsvFile != null) {
                    throw new RuntimeException(
                            "In case of single wca, only standard output pretty print is supported");
                }
                System.out.println("loading case...");
                // load the network
                Network network = Importers.loadNetwork(caseFile);
                if (network == null) {
                    throw new RuntimeException("Case '" + caseFile + "' not found");
                }
                network.getStateManager().allowStateMultiThreadAccess(true);

                WCA wca = wcaFactory.create(network, computationManager, histoDbClient, rulesDbClient,
                        uncertaintiesAnalyserFactory, contingenciesDb, loadFlowFactory);
                WCAAsyncResult result = wca.runAsync(StateManager.INITIAL_STATE_ID, parameters).join();

                Table table = new Table(3, BorderStyle.CLASSIC_WIDE);
                table.addCell("Contingency");
                table.addCell("Cluster");
                table.addCell("Causes");

                List<CompletableFuture<WCACluster>> futureClusters = new LinkedList<>(result.getClusters());
                while (futureClusters.size() > 0) {
                    CompletableFuture
                            .anyOf(futureClusters.toArray(new CompletableFuture[futureClusters.size()])).join();
                    for (Iterator<CompletableFuture<WCACluster>> it = futureClusters.iterator(); it
                            .hasNext();) {
                        CompletableFuture<WCACluster> futureCluster = it.next();
                        if (futureCluster.isDone()) {
                            it.remove();
                            WCACluster cluster = futureCluster.get();

                            if (cluster != null) {
                                System.out.println("contingency " + cluster.getContingency().getId() + " done: "
                                        + cluster.getNum() + " (" + cluster.getOrigin() + ")");

                                table.addCell(cluster.getContingency().getId());
                                table.addCell(cluster.getNum() + " (" + cluster.getOrigin() + ")");
                                List<String> sortedCauses = cluster.getCauses().stream().sorted()
                                        .collect(Collectors.toList());
                                if (sortedCauses != null && sortedCauses.size() > 0) {
                                    table.addCell(sortedCauses.get(0));
                                    for (int i = 1; i < sortedCauses.size(); i++) {
                                        table.addCell("");
                                        table.addCell("");
                                        table.addCell(sortedCauses.get(i));
                                    }
                                } else {
                                    table.addCell("");
                                }
                            }
                        }
                    }
                }

                System.out.println(table.render());
            } else if (Files.isDirectory(caseFile)) {
                if (outputCsvFile == null) {
                    throw new RuntimeException(
                            "In case of multiple wca, you have to specify and ouput to csv file");
                }

                Map<String, Map<String, WCACluster>> clusterPerContingencyPerBaseCase = Collections
                        .synchronizedMap(new TreeMap<>());
                Set<String> contingencyIds = Collections.synchronizedSet(new TreeSet<>());

                Importers.loadNetworks(caseFile, true, network -> {
                    try {
                        network.getStateManager().allowStateMultiThreadAccess(true);
                        String baseStateId = network.getId();
                        network.getStateManager().cloneState(StateManager.INITIAL_STATE_ID, baseStateId);
                        network.getStateManager().setWorkingState(baseStateId);

                        WCA wca = wcaFactory.create(network, computationManager, histoDbClient, rulesDbClient,
                                uncertaintiesAnalyserFactory, contingenciesDb, loadFlowFactory);
                        WCAAsyncResult result = wca.runAsync(baseStateId, parameters).join();

                        Map<String, WCACluster> clusterPerContingency = new HashMap<>();

                        List<CompletableFuture<WCACluster>> futureClusters = new LinkedList<>(
                                result.getClusters());
                        while (futureClusters.size() > 0) {
                            CompletableFuture
                                    .anyOf(futureClusters.toArray(new CompletableFuture[futureClusters.size()]))
                                    .join();
                            for (Iterator<CompletableFuture<WCACluster>> it = futureClusters.iterator(); it
                                    .hasNext();) {
                                CompletableFuture<WCACluster> futureCluster = it.next();
                                if (futureCluster.isDone()) {
                                    it.remove();
                                    WCACluster cluster = futureCluster.get();
                                    if (cluster != null) {
                                        System.out.println("case " + network.getId() + ", contingency "
                                                + cluster.getContingency().getId() + " done: "
                                                + cluster.getNum() + " (" + cluster.getOrigin() + ")");

                                        clusterPerContingency.put(cluster.getContingency().getId(), cluster);
                                        contingencyIds.add(cluster.getContingency().getId());
                                    }
                                }
                            }
                        }

                        clusterPerContingencyPerBaseCase.put(network.getId(), clusterPerContingency);
                    } catch (Exception e) {
                        LOGGER.error(e.toString(), e);
                    }
                }, dataSource -> System.out.println("loading case " + dataSource.getBaseName() + "..."));

                writeClustersCsv(clusterPerContingencyPerBaseCase, contingencyIds, outputCsvFile);
            }
        }
    }
}

From source file:org.apache.hadoop.hbase.crosssite.coprocessor.TestCrossSiteCoprocessor.java

@Test
public void testCoprocessorWithErrors() throws Throwable {
    CrossSiteHTable table = new CrossSiteHTable(TEST_UTIL.getConfiguration(), TABLE_NAME);
    ColumnAggregationWithErrorsProtos.SumRequest.Builder builder = ColumnAggregationWithErrorsProtos.SumRequest
            .newBuilder();/*  w  ww  .j a  va 2s.  c o m*/
    builder.setFamily(HBaseZeroCopyByteString.wrap(CF));
    if (QN != null && QN.length > 0) {
        builder.setQualifier(HBaseZeroCopyByteString.wrap(QN));
    }
    final Map<byte[], Long> results = Collections
            .synchronizedMap(new TreeMap<byte[], Long>(Bytes.BYTES_COMPARATOR));
    boolean hasErrors = false;
    try {
        table.coprocessorService(ColumnAggregationWithErrorsProtos.ColumnAggregationServiceWithErrors.class,
                HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW,
                new Batch.Call<ColumnAggregationWithErrorsProtos.ColumnAggregationServiceWithErrors, Long>() {
                    @Override
                    public Long call(
                            ColumnAggregationWithErrorsProtos.ColumnAggregationServiceWithErrors instance)
                            throws IOException {
                        BlockingRpcCallback<ColumnAggregationWithErrorsProtos.SumResponse> rpcCallback = new BlockingRpcCallback<ColumnAggregationWithErrorsProtos.SumResponse>();
                        ColumnAggregationWithErrorsProtos.SumRequest.Builder builder = ColumnAggregationWithErrorsProtos.SumRequest
                                .newBuilder();
                        builder.setFamily(HBaseZeroCopyByteString.wrap(CF));
                        if (QN != null && QN.length > 0) {
                            builder.setQualifier(HBaseZeroCopyByteString.wrap(QN));
                        }
                        instance.sum(null, builder.build(), rpcCallback);
                        return rpcCallback.get().getSum();
                    }
                }, new Batch.Callback<Long>() {
                    public void update(byte[] region, byte[] row, Long value) {
                        results.put(region, value);
                    }
                });
    } catch (Exception e) {
        hasErrors = true;
    }
    assertEquals(true, hasErrors);
    long sumResult = 0;
    long expectedResult = 0;
    for (Map.Entry<byte[], Long> e : results.entrySet()) {
        LOG.info("Got value " + e.getValue().longValue() + " for region " + Bytes.toStringBinary(e.getKey()));
        sumResult += e.getValue().longValue();
    }
    for (int i = 1; i < 10; i++) {
        expectedResult += i;
    }
    expectedResult *= 2;
    assertEquals("Invalid result", expectedResult, sumResult);
    table.close();
}

From source file:org.esupportail.lecture.domain.model.ChannelConfig.java

/**
 * @param channel/*w w w. j a  va2  s  . c  o  m*/
 */
@SuppressWarnings("unchecked")
public static void loadContextsAndCategoryprofiles(final Channel channel) {
    if (LOG.isDebugEnabled()) {
        LOG.debug("loadContextsAndCategoryprofiles()");
    }
    String categoryProfileId = "";
    Node channelConfig = xmlFile.getRootElement();
    List<Node> contexts = channelConfig.selectNodes("context");
    for (Node context : contexts) {
        Context c = new Context();
        c.setId(context.valueOf("@id"));
        c.setName(context.valueOf("@name"));
        //treeVisible
        String treeVisible = context.valueOf("@treeVisible");
        if (treeVisible.equals("no")) {
            c.setTreeVisible(TreeDisplayMode.NOTVISIBLE);
        } else if (treeVisible.equals("forceNo")) {
            c.setTreeVisible(TreeDisplayMode.NEVERVISIBLE);
        } else {
            c.setTreeVisible(TreeDisplayMode.VISIBLE);
        }
        if (LOG.isDebugEnabled()) {
            LOG.debug("loadContextsAndCategoryprofiles() : contextId " + c.getId());
        }
        Node description = context.selectSingleNode("description");
        c.setDescription(description.getStringValue());
        List<Node> refCategoryProfiles = context.selectNodes("refCategoryProfile");

        // Lire les refCategoryProfilesUrl puis :
        // - les transformer en refCategoryProfile ds le context
        // - ajouter les categoryProfile
        // A faire dans checkXmlFile ?

        Map<String, Integer> orderedCategoryIDs = Collections.synchronizedMap(new HashMap<String, Integer>());
        int xmlOrder = 1;

        // On parcours les refCategoryProfile de context
        for (Node refCategoryProfile : refCategoryProfiles) {
            String refId;
            // Ajout mcp
            refId = refCategoryProfile.valueOf("@refId");
            if (LOG.isDebugEnabled()) {
                LOG.debug("loadContextsAndCategoryprofiles() : refCategoryProfileId " + refId);
            }
            List<Node> categoryProfiles = channelConfig.selectNodes("categoryProfile");
            // On parcours les categoryProfile de root
            for (Node categoryProfile : categoryProfiles) {
                categoryProfileId = categoryProfile.valueOf("@id");
                if (LOG.isDebugEnabled()) {
                    LOG.debug("loadContextsAndCategoryprofiles() : is categoryProfileId " + categoryProfileId
                            + " matching ?");
                }
                if (categoryProfileId.compareTo(refId) == 0) {
                    if (LOG.isDebugEnabled()) {
                        LOG.debug("loadContextsAndCategoryprofiles() : categoryProfileId " + refId
                                + " matches... create mcp");
                    }
                    ManagedCategoryProfile mcp = new ManagedCategoryProfile();
                    // Id = long Id
                    String mcpProfileID = categoryProfileId;
                    mcp.setFileId(c.getId(), mcpProfileID);
                    if (LOG.isDebugEnabled()) {
                        LOG.debug("loadContextsAndCategoryprofiles() : categoryProfileId " + mcp.getId()
                                + " matches... create mcp");
                    }

                    mcp.setName(categoryProfile.valueOf("@name"));
                    mcp.setCategoryURL(categoryProfile.valueOf("@urlCategory"));
                    mcp.setTrustCategory(getBoolean(categoryProfile.valueOf("@trustCategory"), false));
                    mcp.setUserCanMarkRead(getBoolean(categoryProfile.valueOf("@userCanMarkRead"), true));
                    String specificUserContentValue = categoryProfile.valueOf("@specificUserContent");
                    if (specificUserContentValue.equals("yes")) {
                        mcp.setSpecificUserContent(true);
                    } else {
                        mcp.setSpecificUserContent(false);
                    }

                    String ttl = categoryProfile.valueOf("@ttl");
                    mcp.setTtl(Integer.parseInt(ttl));
                    String timeout = categoryProfile.valueOf("@timeout");
                    mcp.setTimeOut(Integer.parseInt(timeout));
                    mcp.setName(categoryProfile.valueOf("@name"));

                    // Accessibility
                    String access = categoryProfile.valueOf("@access");
                    if (access.equalsIgnoreCase("public")) {
                        mcp.setAccess(Accessibility.PUBLIC);
                    } else if (access.equalsIgnoreCase("cas")) {
                        mcp.setAccess(Accessibility.CAS);
                    }
                    // Visibility
                    VisibilitySets visibilitySets = new VisibilitySets();
                    // foreach (allowed / autoSubscribed / Obliged
                    visibilitySets.setAllowed(loadDefAndContentSets("allowed", categoryProfile));
                    visibilitySets.setAutoSubscribed(loadDefAndContentSets("autoSubscribed", categoryProfile));
                    visibilitySets.setObliged(loadDefAndContentSets("obliged", categoryProfile));
                    mcp.setVisibility(visibilitySets);

                    channel.addManagedCategoryProfile(mcp);
                    c.addRefIdManagedCategoryProfile(mcp.getId());
                    orderedCategoryIDs.put(mcp.getId(), xmlOrder);

                    break;
                }
            }
            xmlOrder += 1;
        }
        c.setOrderedCategoryIDs(orderedCategoryIDs);
        channel.addContext(c);
    }
}

From source file:com.kenai.redminenb.repository.RedmineRepository.java

private synchronized Map<String, RedmineQuery> getQueryMap() {
    if (queries == null) {
        queries = Collections.synchronizedMap(new HashMap<String, RedmineQuery>());
        String[] qs = RedmineConfig.getInstance().getQueries(getID());
        for (String queryName : qs) {
            RedmineQuery q = RedmineConfig.getInstance().getQuery(this, queryName);
            if (q != null) {
                queries.put(queryName, q);
            } else {
                Redmine.LOG.log(Level.WARNING, "Couldn''t find query with stored name {0}", queryName); // NOI18N
            }//from w w  w  . java  2 s .c  o  m
        }
    }
    return queries;
}

From source file:mitm.common.sms.transport.clickatell.ClickatellSMSTransport.java

public void setAdditionalParameters(Map<String, String> parameters) {
    additionalParameters = Collections.synchronizedMap(new HashMap<String, String>());

    additionalParameters.putAll(parameters);
}

From source file:edu.ku.brc.specify.tasks.subpane.qb.ERTICaptionInfoTreeLevelGrp.java

/**
 * Returns the ids of a node and its ancestors in the form of id1,
 * id2,...id3. It caches the results of the recursive ancestors because
 * Specify calls it for each result row/column.
 * //from  www .j  a  v  a  2s .  co  m
 * @author lchan
 * @param clazz
 * @param nodeId
 * @return
 */
private String getAncestorsIn(Class<?> clazz, Integer nodeId) {
    DataProviderSessionIFace mySession = DataProviderFactory.getInstance().createSession();
    try {

        String ancestorsIn;
        QueryIFace query = mySession
                .createQuery("select e.parent.id from " + clazz.getName() + " e where e.id = :nodeId", false);
        query.setParameter("nodeId", nodeId);
        Integer parentId = (Integer) query.uniqueResult();
        Map<Integer, String> ancestorsCacheSync = Collections.synchronizedMap(ancestorsCache);
        if (ancestorsCacheSync.containsKey(parentId)) {
            ancestorsIn = ancestorsCacheSync.get(parentId);
        } else {
            ancestorsIn = getAncestorsInRecursive(clazz, nodeId, mySession);
            ancestorsCacheSync.put(parentId, ancestorsIn);
        }

        String in = nodeId + ", " + ancestorsIn;
        in = "in(" + in.substring(0, in.length() - 2) + ")";

        return in;
    } finally {
        mySession.close();
    }
}

From source file:op.FrmMain.java

public void afterLogin() {
    OPDE.getDisplayManager().touch();//w w  w  . j a  v a 2 s.  c o m
    dlgLogin = null;

    if (OPDE.isTraining()) {
        JTextPane txtMessage = new JTextPane();
        txtMessage.setFont(new Font("Arial", Font.PLAIN, 18));
        txtMessage.setEditable(false);
        txtMessage.setContentType("text/html");
        txtMessage.setText(SYSTools.toHTMLForScreen(SYSTools.xx("opde.general.training.version.message")));

        JOptionPane.showConfirmDialog(this, txtMessage, SYSTools.xx("opde.general.training.version.title"),
                JOptionPane.DEFAULT_OPTION, JOptionPane.INFORMATION_MESSAGE);
    }

    if (specialities != null) {
        synchronized (specialities) {
            SYSTools.clear(specialities);
        }
    }

    specialities = Collections.synchronizedMap(new HashMap<Integer, Set<Resident>>());
    synchronized (specialities) {
        specialities.put(ResInfoTypeTools.TYPE_ABSENCE, new HashSet<Resident>());
        specialities.put(ResInfoTypeTools.TYPE_INFECTION, new HashSet<Resident>());
        specialities.put(ResInfoTypeTools.TYPE_WARNING, new HashSet<Resident>());
        specialities.put(ResInfoTypeTools.TYPE_ALLERGY, new HashSet<Resident>());
        specialities.put(ResInfoTypeTools.TYPE_DIABETES, new HashSet<Resident>());

        for (ResInfo info : ResInfoTools.getSpecialInfos()) {
            specialities.get(info.getResInfoType().getType()).add(info.getResident());
        }
    }

    prepareSearchArea();
    labelUSER.setText(OPDE.getLogin().getUser().getFullname());

    Runnable runnable = new Runnable() {
        @Override
        public void run() {
            initPhase = true;
            double pos;
            try {
                pos = Double.parseDouble(
                        OPDE.getProps().getProperty("opde.mainframe:splitPaneLeftDividerLocation"));
            } catch (Exception e) {
                pos = 0.5d;
            }
            splitPaneLeft.setDividerLocation(0, SYSTools.getDividerInAbsolutePosition(splitPaneLeft, pos));
            initPhase = false;
            homeButton.doClick();
        }
    };

    SwingUtilities.invokeLater(runnable);
}

From source file:eu.itesla_project.modules.validation.OfflineValidationTool.java

@Override
public void run(CommandLine line) throws Exception {
    OfflineConfig config = OfflineConfig.load();
    String rulesDbName = line.hasOption("rules-db-name") ? line.getOptionValue("rules-db-name")
            : OfflineConfig.DEFAULT_RULES_DB_NAME;
    String workflowId = line.getOptionValue("workflow");
    Path outputDir = Paths.get(line.getOptionValue("output-dir"));
    double purityThreshold = line.hasOption("purity-threshold")
            ? Double.parseDouble(line.getOptionValue("purity-threshold"))
            : DEFAULT_PURITY_THRESHOLD;/*from ww  w. j av  a  2  s.  c o  m*/
    Set<Country> countries = Arrays.stream(line.getOptionValue("base-case-countries").split(","))
            .map(Country::valueOf).collect(Collectors.toSet());
    Interval histoInterval = Interval.parse(line.getOptionValue("history-interval"));
    boolean mergeOptimized = line.hasOption("merge-optimized");
    CaseType caseType = CaseType.valueOf(line.getOptionValue("case-type"));

    CaseRepositoryFactory caseRepositoryFactory = config.getCaseRepositoryFactoryClass().newInstance();
    RulesDbClientFactory rulesDbClientFactory = config.getRulesDbClientFactoryClass().newInstance();
    ContingenciesAndActionsDatabaseClient contingencyDb = config.getContingencyDbClientFactoryClass()
            .newInstance().create();
    SimulatorFactory simulatorFactory = config.getSimulatorFactoryClass().newInstance();
    LoadFlowFactory loadFlowFactory = config.getLoadFlowFactoryClass().newInstance();
    MergeOptimizerFactory mergeOptimizerFactory = config.getMergeOptimizerFactoryClass().newInstance();

    SimulationParameters simulationParameters = SimulationParameters.load();

    try (ComputationManager computationManager = new LocalComputationManager();
            RulesDbClient rulesDb = rulesDbClientFactory.create(rulesDbName);
            CsvMetricsDb metricsDb = new CsvMetricsDb(outputDir, true, "metrics")) {

        CaseRepository caseRepository = caseRepositoryFactory.create(computationManager);

        Queue<DateTime> dates = Queues.synchronizedDeque(
                new ArrayDeque<>(caseRepository.dataAvailable(caseType, countries, histoInterval)));

        Map<String, Map<RuleId, ValidationStatus>> statusPerRulePerCase = Collections
                .synchronizedMap(new TreeMap<>());
        Map<String, Map<RuleId, Map<HistoDbAttributeId, Object>>> valuesPerRulePerCase = Collections
                .synchronizedMap(new TreeMap<>());

        int cores = Runtime.getRuntime().availableProcessors();
        ExecutorService executorService = Executors.newFixedThreadPool(cores);
        try {
            List<Future<?>> tasks = new ArrayList<>(cores);
            for (int i = 0; i < cores; i++) {
                tasks.add(executorService.submit((Runnable) () -> {
                    while (dates.size() > 0) {
                        DateTime date = dates.poll();

                        try {
                            Network network = MergeUtil.merge(caseRepository, date, caseType, countries,
                                    loadFlowFactory, 0, mergeOptimizerFactory, computationManager,
                                    mergeOptimized);

                            System.out.println("case " + network.getId() + " loaded");

                            System.out.println("running simulation on " + network.getId() + "...");

                            network.getStateManager().allowStateMultiThreadAccess(true);
                            String baseStateId = network.getId();
                            network.getStateManager().cloneState(StateManager.INITIAL_STATE_ID, baseStateId);
                            network.getStateManager().setWorkingState(baseStateId);

                            Map<RuleId, ValidationStatus> statusPerRule = new HashMap<>();
                            Map<RuleId, Map<HistoDbAttributeId, Object>> valuesPerRule = new HashMap<>();

                            LoadFlow loadFlow = loadFlowFactory.create(network, computationManager, 0);
                            LoadFlowResult loadFlowResult = loadFlow.run();

                            System.err.println("load flow terminated (" + loadFlowResult.isOk() + ") on "
                                    + network.getId());

                            if (loadFlowResult.isOk()) {
                                Stabilization stabilization = simulatorFactory.createStabilization(network,
                                        computationManager, 0);
                                ImpactAnalysis impactAnalysis = simulatorFactory.createImpactAnalysis(network,
                                        computationManager, 0, contingencyDb);
                                Map<String, Object> context = new HashMap<>();
                                stabilization.init(simulationParameters, context);
                                impactAnalysis.init(simulationParameters, context);
                                StabilizationResult stabilizationResult = stabilization.run();

                                System.err.println("stabilization terminated ("
                                        + stabilizationResult.getStatus() + ") on " + network.getId());

                                metricsDb.store(workflowId, network.getId(), "STABILIZATION",
                                        stabilizationResult.getMetrics());

                                if (stabilizationResult.getStatus() == StabilizationStatus.COMPLETED) {
                                    ImpactAnalysisResult impactAnalysisResult = impactAnalysis
                                            .run(stabilizationResult.getState());

                                    System.err.println("impact analysis terminated on " + network.getId());

                                    metricsDb.store(workflowId, network.getId(), "IMPACT_ANALYSIS",
                                            impactAnalysisResult.getMetrics());

                                    System.out.println("checking rules on " + network.getId() + "...");

                                    for (SecurityIndex securityIndex : impactAnalysisResult
                                            .getSecurityIndexes()) {
                                        for (RuleAttributeSet attributeSet : RuleAttributeSet.values()) {
                                            statusPerRule.put(new RuleId(attributeSet, securityIndex.getId()),
                                                    new ValidationStatus(null, securityIndex.isOk()));
                                        }
                                    }
                                }
                            }

                            Map<HistoDbAttributeId, Object> values = IIDM2DB
                                    .extractCimValues(network, new IIDM2DB.Config(null, false))
                                    .getSingleValueMap();
                            for (RuleAttributeSet attributeSet : RuleAttributeSet.values()) {
                                for (Contingency contingency : contingencyDb.getContingencies(network)) {
                                    List<SecurityRule> securityRules = rulesDb.getRules(workflowId,
                                            attributeSet, contingency.getId(), null);
                                    for (SecurityRule securityRule : securityRules) {
                                        SecurityRuleExpression securityRuleExpression = securityRule
                                                .toExpression(purityThreshold);
                                        SecurityRuleCheckReport checkReport = securityRuleExpression
                                                .check(values);

                                        valuesPerRule.put(securityRule.getId(),
                                                ExpressionAttributeList
                                                        .list(securityRuleExpression.getCondition()).stream()
                                                        .collect(Collectors.toMap(attributeId -> attributeId,
                                                                new Function<HistoDbAttributeId, Object>() {
                                                                    @Override
                                                                    public Object apply(
                                                                            HistoDbAttributeId attributeId) {
                                                                        Object value = values.get(attributeId);
                                                                        return value != null ? value
                                                                                : Float.NaN;
                                                                    }
                                                                })));

                                        ValidationStatus status = statusPerRule.get(securityRule.getId());
                                        if (status == null) {
                                            status = new ValidationStatus(null, null);
                                            statusPerRule.put(securityRule.getId(), status);
                                        }
                                        if (checkReport.getMissingAttributes().isEmpty()) {
                                            status.setRuleOk(checkReport.isSafe());
                                        }
                                    }
                                }
                            }

                            statusPerRulePerCase.put(network.getId(), statusPerRule);
                            valuesPerRulePerCase.put(network.getId(), valuesPerRule);
                        } catch (Exception e) {
                            LOGGER.error(e.toString(), e);
                        }
                    }
                }));
            }
            for (Future<?> task : tasks) {
                task.get();
            }
        } finally {
            executorService.shutdown();
            executorService.awaitTermination(1, TimeUnit.MINUTES);
        }

        writeCsv(statusPerRulePerCase, valuesPerRulePerCase, outputDir);
    }
}