Example usage for java.util.concurrent ExecutorService awaitTermination

List of usage examples for java.util.concurrent ExecutorService awaitTermination

Introduction

In this page you can find the example usage for java.util.concurrent ExecutorService awaitTermination.

Prototype

boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException;

Source Link

Document

Blocks until all tasks have completed execution after a shutdown request, or the timeout occurs, or the current thread is interrupted, whichever happens first.

Usage

From source file:io.nats.client.ITClusterTest.java

@Test
public void testHotSpotReconnect() throws InterruptedException {
    int numClients = 100;
    ExecutorService executor = Executors.newFixedThreadPool(numClients,
            new NatsThreadFactory("testhotspotreconnect"));

    final BlockingQueue<String> rch = new LinkedBlockingQueue<String>();
    final BlockingQueue<Integer> dch = new LinkedBlockingQueue<Integer>();
    final AtomicBoolean shutdown = new AtomicBoolean(false);
    try (NatsServer s1 = runServerOnPort(1222)) {
        try (NatsServer s2 = runServerOnPort(1224)) {
            try (NatsServer s3 = runServerOnPort(1226)) {

                final class NATSClient implements Runnable {
                    Connection nc = null;
                    final AtomicInteger numReconnects = new AtomicInteger(0);
                    final AtomicInteger numDisconnects = new AtomicInteger(0);
                    String currentUrl = null;
                    final AtomicInteger instance = new AtomicInteger(-1);

                    final Options opts;

                    NATSClient(int inst) {
                        this.instance.set(inst);
                        opts = defaultOptions();
                        opts.servers = Nats.processUrlArray(testServers);

                        opts.disconnectedCb = new DisconnectedCallback() {
                            public void onDisconnect(ConnectionEvent event) {
                                numDisconnects.incrementAndGet();
                                try {
                                    dch.put(instance.get());
                                } catch (InterruptedException e) {
                                    e.printStackTrace();
                                }//w  ww . j a v a 2s  .c o m
                                nc.setDisconnectedCallback(null);
                            }
                        };
                        opts.reconnectedCb = new ReconnectedCallback() {
                            public void onReconnect(ConnectionEvent event) {
                                numReconnects.incrementAndGet();
                                currentUrl = nc.getConnectedUrl();
                                try {
                                    rch.put(currentUrl);
                                } catch (InterruptedException e) {
                                    e.printStackTrace();
                                }
                            }
                        };
                    }

                    @Override
                    public void run() {
                        try {
                            nc = opts.connect();
                            assertTrue(!nc.isClosed());
                            assertNotNull(nc.getConnectedUrl());
                            currentUrl = nc.getConnectedUrl();
                            // System.err.println("Instance " + instance + " connected to " +
                            // currentUrl);
                            while (!shutdown.get()) {
                                sleep(10);
                            }
                            nc.close();
                        } catch (IOException e) {
                            e.printStackTrace();
                        }
                    }

                    public synchronized boolean isConnected() {
                        return (nc != null && !nc.isClosed());
                    }

                    public void shutdown() {
                        shutdown.set(true);
                    }
                }

                List<NATSClient> tasks = new ArrayList<NATSClient>(numClients);
                for (int i = 0; i < numClients; i++) {
                    NATSClient task = new NATSClient(i);
                    tasks.add(task);
                    executor.submit(task);
                }

                Map<String, Integer> cs = new HashMap<String, Integer>();

                int numReady = 0;
                while (numReady < numClients) {
                    numReady = 0;
                    for (NATSClient cli : tasks) {
                        if (cli.isConnected()) {
                            numReady++;
                        }
                    }
                    sleep(100);
                }

                s1.shutdown();
                sleep(1000);

                int disconnected = 0;
                // wait for disconnects
                while (dch.size() > 0 && disconnected < numClients) {
                    Integer instance = -1;
                    instance = dch.poll(5, TimeUnit.SECONDS);
                    assertNotNull("timed out waiting for disconnect signal", instance);
                    disconnected++;
                }
                assertTrue(disconnected > 0);

                int reconnected = 0;
                // wait for reconnects
                for (int i = 0; i < disconnected; i++) {
                    String url = null;
                    while (rch.size() == 0) {
                        sleep(50);
                    }
                    url = rch.poll(5, TimeUnit.SECONDS);
                    assertNotNull("timed out waiting for reconnect signal", url);
                    reconnected++;
                    Integer count = cs.get(url);
                    if (count != null) {
                        cs.put(url, ++count);
                    } else {
                        cs.put(url, 1);
                    }
                }

                for (NATSClient client : tasks) {
                    client.shutdown();
                }
                executor.shutdownNow();
                assertTrue(executor.awaitTermination(2, TimeUnit.SECONDS));

                assertEquals(disconnected, reconnected);

                int numServers = 2;

                assertEquals(numServers, cs.size());

                int expected = numClients / numServers;
                // We expect a 40 percent variance
                int var = (int) ((float) expected * 0.40);

                int delta = Math.abs(cs.get(testServers[2]) - cs.get(testServers[4]));
                // System.err.printf("var = %d, delta = %d\n", var, delta);
                if (delta > var) {
                    String str = String.format("Connected clients to servers out of range: %d/%d", delta, var);
                    fail(str);
                }
            }
        }
    }
}

From source file:eu.itesla_project.modules.validation.OfflineValidationTool.java

@Override
public void run(CommandLine line) throws Exception {
    OfflineConfig config = OfflineConfig.load();
    String rulesDbName = line.hasOption("rules-db-name") ? line.getOptionValue("rules-db-name")
            : OfflineConfig.DEFAULT_RULES_DB_NAME;
    String workflowId = line.getOptionValue("workflow");
    Path outputDir = Paths.get(line.getOptionValue("output-dir"));
    double purityThreshold = line.hasOption("purity-threshold")
            ? Double.parseDouble(line.getOptionValue("purity-threshold"))
            : DEFAULT_PURITY_THRESHOLD;/*w w w. ja v a 2  s  .c  o m*/
    Set<Country> countries = Arrays.stream(line.getOptionValue("base-case-countries").split(","))
            .map(Country::valueOf).collect(Collectors.toSet());
    Interval histoInterval = Interval.parse(line.getOptionValue("history-interval"));
    boolean mergeOptimized = line.hasOption("merge-optimized");
    CaseType caseType = CaseType.valueOf(line.getOptionValue("case-type"));

    CaseRepositoryFactory caseRepositoryFactory = config.getCaseRepositoryFactoryClass().newInstance();
    RulesDbClientFactory rulesDbClientFactory = config.getRulesDbClientFactoryClass().newInstance();
    ContingenciesAndActionsDatabaseClient contingencyDb = config.getContingencyDbClientFactoryClass()
            .newInstance().create();
    SimulatorFactory simulatorFactory = config.getSimulatorFactoryClass().newInstance();
    LoadFlowFactory loadFlowFactory = config.getLoadFlowFactoryClass().newInstance();
    MergeOptimizerFactory mergeOptimizerFactory = config.getMergeOptimizerFactoryClass().newInstance();

    SimulationParameters simulationParameters = SimulationParameters.load();

    try (ComputationManager computationManager = new LocalComputationManager();
            RulesDbClient rulesDb = rulesDbClientFactory.create(rulesDbName);
            CsvMetricsDb metricsDb = new CsvMetricsDb(outputDir, true, "metrics")) {

        CaseRepository caseRepository = caseRepositoryFactory.create(computationManager);

        Queue<DateTime> dates = Queues.synchronizedDeque(
                new ArrayDeque<>(caseRepository.dataAvailable(caseType, countries, histoInterval)));

        Map<String, Map<RuleId, ValidationStatus>> statusPerRulePerCase = Collections
                .synchronizedMap(new TreeMap<>());
        Map<String, Map<RuleId, Map<HistoDbAttributeId, Object>>> valuesPerRulePerCase = Collections
                .synchronizedMap(new TreeMap<>());

        int cores = Runtime.getRuntime().availableProcessors();
        ExecutorService executorService = Executors.newFixedThreadPool(cores);
        try {
            List<Future<?>> tasks = new ArrayList<>(cores);
            for (int i = 0; i < cores; i++) {
                tasks.add(executorService.submit((Runnable) () -> {
                    while (dates.size() > 0) {
                        DateTime date = dates.poll();

                        try {
                            Network network = MergeUtil.merge(caseRepository, date, caseType, countries,
                                    loadFlowFactory, 0, mergeOptimizerFactory, computationManager,
                                    mergeOptimized);

                            System.out.println("case " + network.getId() + " loaded");

                            System.out.println("running simulation on " + network.getId() + "...");

                            network.getStateManager().allowStateMultiThreadAccess(true);
                            String baseStateId = network.getId();
                            network.getStateManager().cloneState(StateManager.INITIAL_STATE_ID, baseStateId);
                            network.getStateManager().setWorkingState(baseStateId);

                            Map<RuleId, ValidationStatus> statusPerRule = new HashMap<>();
                            Map<RuleId, Map<HistoDbAttributeId, Object>> valuesPerRule = new HashMap<>();

                            LoadFlow loadFlow = loadFlowFactory.create(network, computationManager, 0);
                            LoadFlowResult loadFlowResult = loadFlow.run();

                            System.err.println("load flow terminated (" + loadFlowResult.isOk() + ") on "
                                    + network.getId());

                            if (loadFlowResult.isOk()) {
                                Stabilization stabilization = simulatorFactory.createStabilization(network,
                                        computationManager, 0);
                                ImpactAnalysis impactAnalysis = simulatorFactory.createImpactAnalysis(network,
                                        computationManager, 0, contingencyDb);
                                Map<String, Object> context = new HashMap<>();
                                stabilization.init(simulationParameters, context);
                                impactAnalysis.init(simulationParameters, context);
                                StabilizationResult stabilizationResult = stabilization.run();

                                System.err.println("stabilization terminated ("
                                        + stabilizationResult.getStatus() + ") on " + network.getId());

                                metricsDb.store(workflowId, network.getId(), "STABILIZATION",
                                        stabilizationResult.getMetrics());

                                if (stabilizationResult.getStatus() == StabilizationStatus.COMPLETED) {
                                    ImpactAnalysisResult impactAnalysisResult = impactAnalysis
                                            .run(stabilizationResult.getState());

                                    System.err.println("impact analysis terminated on " + network.getId());

                                    metricsDb.store(workflowId, network.getId(), "IMPACT_ANALYSIS",
                                            impactAnalysisResult.getMetrics());

                                    System.out.println("checking rules on " + network.getId() + "...");

                                    for (SecurityIndex securityIndex : impactAnalysisResult
                                            .getSecurityIndexes()) {
                                        for (RuleAttributeSet attributeSet : RuleAttributeSet.values()) {
                                            statusPerRule.put(new RuleId(attributeSet, securityIndex.getId()),
                                                    new ValidationStatus(null, securityIndex.isOk()));
                                        }
                                    }
                                }
                            }

                            Map<HistoDbAttributeId, Object> values = IIDM2DB
                                    .extractCimValues(network, new IIDM2DB.Config(null, false))
                                    .getSingleValueMap();
                            for (RuleAttributeSet attributeSet : RuleAttributeSet.values()) {
                                for (Contingency contingency : contingencyDb.getContingencies(network)) {
                                    List<SecurityRule> securityRules = rulesDb.getRules(workflowId,
                                            attributeSet, contingency.getId(), null);
                                    for (SecurityRule securityRule : securityRules) {
                                        SecurityRuleExpression securityRuleExpression = securityRule
                                                .toExpression(purityThreshold);
                                        SecurityRuleCheckReport checkReport = securityRuleExpression
                                                .check(values);

                                        valuesPerRule.put(securityRule.getId(),
                                                ExpressionAttributeList
                                                        .list(securityRuleExpression.getCondition()).stream()
                                                        .collect(Collectors.toMap(attributeId -> attributeId,
                                                                new Function<HistoDbAttributeId, Object>() {
                                                                    @Override
                                                                    public Object apply(
                                                                            HistoDbAttributeId attributeId) {
                                                                        Object value = values.get(attributeId);
                                                                        return value != null ? value
                                                                                : Float.NaN;
                                                                    }
                                                                })));

                                        ValidationStatus status = statusPerRule.get(securityRule.getId());
                                        if (status == null) {
                                            status = new ValidationStatus(null, null);
                                            statusPerRule.put(securityRule.getId(), status);
                                        }
                                        if (checkReport.getMissingAttributes().isEmpty()) {
                                            status.setRuleOk(checkReport.isSafe());
                                        }
                                    }
                                }
                            }

                            statusPerRulePerCase.put(network.getId(), statusPerRule);
                            valuesPerRulePerCase.put(network.getId(), valuesPerRule);
                        } catch (Exception e) {
                            LOGGER.error(e.toString(), e);
                        }
                    }
                }));
            }
            for (Future<?> task : tasks) {
                task.get();
            }
        } finally {
            executorService.shutdown();
            executorService.awaitTermination(1, TimeUnit.MINUTES);
        }

        writeCsv(statusPerRulePerCase, valuesPerRulePerCase, outputDir);
    }
}

From source file:org.corpus_tools.peppermodules.annis.Salt2ANNISMapper.java

@Override
public DOCUMENT_STATUS mapSDocument() {

    this.preorderTable = new ConcurrentHashMap<>();
    this.postorderTable = new ConcurrentHashMap<>();
    prePostOrder = 0l;/* www  . jav  a 2s  .  co m*/

    numberOfMappedNodes.set(0);

    if (this.getDocument() == null || this.getDocument().getDocumentGraph() == null) {
        throw new PepperModuleException(this, "Cannot map sDocumentGraph, because sDocumentGraph is null.");
    }

    {//start traversion of documentStructure

        try {

            if (this.getDocument().getDocumentGraph().getNodes() != null) {
                this.numberOfDocumentNodes = this.getDocument().getDocumentGraph().getNodes().size();
            }

            /**
             * traverse by SpanningRelations: DOCUMENT_STRUCTURE_CR
             * DominanceRelations: DOCUMENT_STRUCTURE_DR PointingRelations:
             * DOCUMENT_STRUCTURE_PR
             *
             * DominanceRelations Subcomponents: DOCUMENT_STRUCTURE_DR_SUB
             * PointingRelations Subcomponents: DOCUMENT_STRUCTURE_PR_SUB
             *
             * Dominance relations may consist of different subcomponents since
             * there are "edge" and "secedge" types
             *
             * Since every root node has it's own component, the pre and post order
             * needs to be 0 for the root node. You need to handle this.
             */
            List<? extends SNode> sRelationRoots;
            Multimap<String, SNode> subComponentRoots;
            //        Map<String, List<SNode>> subComponentRoots;

            Map<SToken, Long> token2Index = calculateToken2Index(getDocument().getDocumentGraph());

            // START Step 1: map SOrderRelation
            subComponentRoots = this.getDocument().getDocumentGraph()
                    .getRootsByRelationType(SALT_TYPE.SORDER_RELATION);
            if (subComponentRoots != null) {
                if (subComponentRoots.size() > 0) {
                    for (Entry<String, SNode> entry : subComponentRoots.entries()) {
                        SRelation2ANNISMapper sOrderRelationMapper = new SOrderRelation2ANNISMapper(
                                getIdManager(), getDocument().getDocumentGraph(), token2Index, tw_node,
                                tw_nodeAnno, tw_rank, tw_edgeAnno, tw_component, this);

                        String traversionType = entry.getKey();
                        if (SaltUtil.SALT_NULL_VALUE.equals(traversionType)) {
                            traversionType = "default_seg";
                        }
                        sOrderRelationMapper.setTraversionSType(traversionType);
                        sOrderRelationMapper.mapSRelations2ANNIS(subComponentRoots.get(entry.getKey()),
                                SALT_TYPE.SORDER_RELATION, null);

                    }
                }
            }
            // END Step 1: map SOrderRelation

            // also map the timeline (by creating a virtual tokenization if necessary)
            STimelineRelation2ANNISMapper timelineMapper = new STimelineRelation2ANNISMapper(getIdManager(),
                    getDocument().getDocumentGraph(), token2Index, tw_node, tw_nodeAnno, tw_rank, tw_edgeAnno,
                    tw_component, this, mergeTextsWithTimeline);
            timelineMapper.run();

            // START Step 2: map SText
            if (idManager.hasVirtualTokenization()) {
                Long sDocID;
                Long textId = 0l;
                String sDocumentElementId = this.getDocument().getId();

                if (sDocumentElementId == null) {
                    throw new PepperModuleException(this,
                            "SId Id of the document '" + this.getDocument().getName() + "' is NULL!");
                }
                sDocID = this.idManager.getNewCorpusTabId(sDocumentElementId);
                String textName = "sText0";
                String textContent = Strings.repeat(" ", idManager.getNumberOfVirtualToken());
                ArrayList<String> tuple = new ArrayList<>();
                tuple.add(sDocID.toString());
                tuple.add(textId.toString());
                tuple.add(textName);
                tuple.add(textContent);

                long transactionId = tw_text.beginTA();
                try {
                    tw_text.addTuple(transactionId, tuple);
                    tw_text.commitTA(transactionId);

                } catch (FileNotFoundException e) {
                    tw_text.abortTA(transactionId);
                    throw new PepperModuleException(this,
                            "Could not write to the node.tab, exception was" + e.getMessage());
                }
            } else {
                this.mapSText();
            }

            ExecutorService exec = null;
            if (mapRelationsInParallel) {
                exec = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());
            }

            subComponentRoots = getDocument().getDocumentGraph()
                    .getRootsByRelationType(SALT_TYPE.SPOINTING_RELATION);
            if (subComponentRoots != null) {
                //System.out.println("The Pointing relation graphs have "+ subComponentRoots.size() + " STypes.");
                if (subComponentRoots.size() > 0) {

                    for (String key : subComponentRoots.keySet()) {
                        //System.out.println("Count of PR roots for key "+key+" : "+subComponentRoots.get(key).size());
                        //System.out.println("Mapping PointingRelation subcomponents with sType: "+key);
                        SRelation2ANNISMapper sPointingSubRelationMapper = new SPointingRelation2ANNISMapper(
                                getIdManager(), getDocument().getDocumentGraph(), token2Index, tw_node,
                                tw_nodeAnno, tw_rank, tw_edgeAnno, tw_component, this);
                        sPointingSubRelationMapper.mapSRelations2ANNIS(subComponentRoots.get(key),
                                SALT_TYPE.SPOINTING_RELATION, TRAVERSION_TYPE.DOCUMENT_STRUCTURE_PR);
                        sPointingSubRelationMapper.setTraversionSType(key);
                        if (exec != null) {
                            exec.execute(sPointingSubRelationMapper);
                        } else {
                            sPointingSubRelationMapper.run();
                        }
                    }
                } else {
                    //System.out.println("No PointingRelation components found (null map)");
                }
            } else {
                //System.out.println("No PointingRelation components found (empty map)");
            }
            // END Step 2: map SPointingRelations

            // START Step 3: map SDominanceRelations
            sRelationRoots = this.getDocument().getDocumentGraph()
                    .getRootsByRelation(SALT_TYPE.SDOMINANCE_RELATION);
            if (sRelationRoots != null) {
                if (sRelationRoots.size() > 0) {
                    SRelation2ANNISMapper sDominanceRelationMapper = new SDominanceRelation2ANNISMapper(
                            getIdManager(), getDocument().getDocumentGraph(), token2Index, tw_node, tw_nodeAnno,
                            tw_rank, tw_edgeAnno, tw_component, this);
                    sDominanceRelationMapper.mapSRelations2ANNIS(sRelationRoots, SALT_TYPE.SDOMINANCE_RELATION,
                            TRAVERSION_TYPE.DOCUMENT_STRUCTURE_DR);
                    if (exec != null) {
                        exec.execute(sDominanceRelationMapper);
                    } else {
                        sDominanceRelationMapper.run();
                    }
                }
            }
            // END Step 3: map SDominanceRelations

            // START Step 3.1 : map the subComponents of the SDominanceRelations
            subComponentRoots = getDocument().getDocumentGraph()
                    .getRootsByRelationType(SALT_TYPE.SDOMINANCE_RELATION);
            if (subComponentRoots != null) {
                //System.out.println("The Dominance relation graphs have "+ subComponentRoots.size() + " STypes.");
                if (subComponentRoots.size() > 0) {

                    Set<String> domComponentTypeNames = subComponentRoots.keySet();

                    // only output the named relation types if there the user has not choosen
                    // to include them or if there are more than 1 named types
                    if (!((ANNISExporterProperties) this.getProperties()).getExcludeSingleDomType()
                            || domComponentTypeNames.size() >= 2) {
                        for (String key : domComponentTypeNames) {

                            if (!SaltUtil.SALT_NULL_VALUE.equals(key)) {

                                SRelation2ANNISMapper sDominanceSubRelationMapper = new SDominanceRelation2ANNISMapper(
                                        getIdManager(), getDocument().getDocumentGraph(), token2Index, tw_node,
                                        tw_nodeAnno, tw_rank, tw_edgeAnno, tw_component, this);
                                sDominanceSubRelationMapper.setTraversionSType(key);
                                sDominanceSubRelationMapper.mapSRelations2ANNIS(subComponentRoots.get(key),
                                        SALT_TYPE.SDOMINANCE_RELATION, TRAVERSION_TYPE.DOCUMENT_STRUCTURE_DR);
                                if (exec != null) {
                                    exec.execute(sDominanceSubRelationMapper);
                                } else {
                                    sDominanceSubRelationMapper.run();
                                }
                            }
                        }
                    }
                } else {
                    //System.out.println("No DominanceRelation subcomponents found (null map)");
                }
            } else {
                //System.out.println("No DominanceRelation subcomponents found (empty map)");
            }
            // END Step 3.1 : map the subComponents of the SDominanceRelations

            // START Step 4: map SSpanningrelations
            sRelationRoots = this.getDocument().getDocumentGraph()
                    .getRootsByRelation(SALT_TYPE.SSPANNING_RELATION);
            if (sRelationRoots != null) {
                if (sRelationRoots.size() > 0) {
                    SRelation2ANNISMapper spanningRelationMapper = new SSpanningRelation2ANNISMapper(
                            getIdManager(), getDocument().getDocumentGraph(), token2Index, tw_node, tw_nodeAnno,
                            tw_rank, tw_edgeAnno, tw_component, this);
                    spanningRelationMapper.mapSRelations2ANNIS(sRelationRoots, SALT_TYPE.SSPANNING_RELATION,
                            TRAVERSION_TYPE.DOCUMENT_STRUCTURE_CR);
                    if (exec != null) {
                        exec.execute(spanningRelationMapper);
                    } else {
                        spanningRelationMapper.run();
                    }
                }
            }
            // END Step 4: map SSpanningrelations

            // START Step 5: map SMedialRelations
            sRelationRoots = this.getDocument().getDocumentGraph().getTokens();
            if (sRelationRoots != null) {
                if (sRelationRoots.size() > 0) {
                    SRelation2ANNISMapper audioRelationMapper = new Audio2ANNISMapper(getIdManager(),
                            getDocument().getDocumentGraph(), token2Index, tw_node, tw_nodeAnno, tw_rank,
                            tw_edgeAnno, tw_component, this);
                    audioRelationMapper.mapSRelations2ANNIS(sRelationRoots,
                            SALT_TYPE.STIME_OVERLAPPING_RELATION, TRAVERSION_TYPE.DOCUMENT_STRUCTURE_AUDIO);
                    if (exec != null) {
                        exec.execute(audioRelationMapper);
                    } else {
                        audioRelationMapper.run();
                    }
                }
            }
            // END Step 5: map SMedialRelations

            if (exec != null) {
                exec.shutdown();
                while (!exec.awaitTermination(60, TimeUnit.SECONDS)) {
                    // wait to finish
                }
            }

            // START Step 6: map all SToken which were not mapped, yet
            SRelation2ANNISMapper mapper = new SSpanningRelation2ANNISMapper(getIdManager(),
                    getDocument().getDocumentGraph(), token2Index, tw_node, tw_nodeAnno, tw_rank, tw_edgeAnno,
                    tw_component, this);
            mapper.beginTransaction();
            for (SNode node : getDocument().getDocumentGraph().getTokens()) {
                if (this.idManager.getVirtualisedSpanId(node.getId()) == null) {
                    mapper.mapSNode(node);
                }
            }
            mapper.commitTransaction();
            // END Step 6: map all SToken which were not mapped, yet

        } catch (PepperModuleException e) {
            throw new PepperModuleException(this,
                    "Some error occurs while traversing document structure graph.", e);
        } catch (InterruptedException e) {
            throw new PepperModuleException(this,
                    "Some error occurs while traversing document structure graph.", e);
        }
    } //start traversion of corpus structure

    mergeLocalStatsIntoGlobal();

    setProgress(1.0);
    return DOCUMENT_STATUS.COMPLETED;
}

From source file:mamo.vanillaVotifier.VotifierServer.java

public synchronized void start() throws IOException {
    if (isRunning()) {
        throw new IllegalStateException("Server is already running!");
    }//from  ww  w.  j  a va  2s .  c  o m
    notifyListeners(new ServerStartingEvent());
    serverSocket = new ServerSocket();
    serverSocket.bind(votifier.getConfig().getInetSocketAddress());
    running = true;
    notifyListeners(new ServerStartedEvent());
    new Thread(new Runnable() {
        @Override
        public void run() {
            ExecutorService executorService = Executors.newSingleThreadExecutor();
            while (isRunning()) {
                try {
                    final Socket socket = serverSocket.accept();
                    executorService.execute(new Runnable() {
                        @Override
                        public void run() {
                            try {
                                notifyListeners(new ConnectionEstablishedEvent(socket));
                                socket.setSoTimeout(SocketOptions.SO_TIMEOUT); // SocketException: handled by try/catch.
                                BufferedWriter writer = new BufferedWriter(
                                        new OutputStreamWriter(socket.getOutputStream()));
                                writer.write("VOTIFIER 2.9\n");
                                writer.flush();
                                BufferedInputStream in = new BufferedInputStream(socket.getInputStream()); // IOException: handled by try/catch.
                                byte[] request = new byte[((RSAPublicKey) votifier.getConfig().getKeyPair()
                                        .getPublic()).getModulus().bitLength() / Byte.SIZE];
                                in.read(request); // IOException: handled by try/catch.
                                notifyListeners(new EncryptedInputReceivedEvent(socket, new String(request)));
                                request = RsaUtils
                                        .getDecryptCipher(votifier.getConfig().getKeyPair().getPrivate())
                                        .doFinal(request); // IllegalBlockSizeException: can't happen.
                                String requestString = new String(request);
                                notifyListeners(new DecryptedInputReceivedEvent(socket, requestString));
                                String[] requestArray = requestString.split("\n");
                                if ((requestArray.length == 5 || requestArray.length == 6)
                                        && requestArray[0].equals("VOTE")) {
                                    notifyListeners(new VoteEventVotifier(socket, new Vote(requestArray[1],
                                            requestArray[2], requestArray[3], requestArray[4])));
                                    for (VoteAction voteAction : votifier.getConfig().getVoteActions()) {
                                        String[] params = new String[4];
                                        try {
                                            for (int i = 0; i < params.length; i++) {
                                                params[i] = SubstitutionUtils.applyRegexReplacements(
                                                        requestArray[i + 1], voteAction.getRegexReplacements());
                                            }
                                        } catch (PatternSyntaxException e) {
                                            notifyListeners(new RegularExpressionPatternErrorException(e));
                                            params = new String[] { requestArray[1], requestArray[2],
                                                    requestArray[3], requestArray[4] };
                                        }
                                        if (voteAction.getCommandSender() instanceof RconCommandSender) {
                                            RconCommandSender commandSender = (RconCommandSender) voteAction
                                                    .getCommandSender();
                                            StrSubstitutor substitutor = SubstitutionUtils.buildStrSubstitutor(
                                                    new SimpleEntry<String, Object>("service-name", params[0]),
                                                    new SimpleEntry<String, Object>("user-name", params[1]),
                                                    new SimpleEntry<String, Object>("address", params[2]),
                                                    new SimpleEntry<String, Object>("timestamp", params[3]));
                                            for (String command : voteAction.getCommands()) {
                                                String theCommand = substitutor.replace(command);
                                                notifyListeners(new SendingRconCommandEvent(
                                                        commandSender.getRconConnection(), theCommand));
                                                try {
                                                    notifyListeners(new RconCommandResponseEvent(
                                                            commandSender.getRconConnection(), commandSender
                                                                    .sendCommand(theCommand).getPayload()));
                                                } catch (Exception e) {
                                                    notifyListeners(new RconExceptionEvent(
                                                            commandSender.getRconConnection(), e));
                                                }
                                            }
                                        }
                                        if (voteAction.getCommandSender() instanceof ShellCommandSender) {
                                            ShellCommandSender commandSender = (ShellCommandSender) voteAction
                                                    .getCommandSender();
                                            HashMap<String, String> environment = new HashMap<String, String>();
                                            environment.put("voteServiceName", params[0]);
                                            environment.put("voteUserName", params[1]);
                                            environment.put("voteAddress", params[2]);
                                            environment.put("voteTimestamp", params[3]);
                                            for (String command : voteAction.getCommands()) {
                                                notifyListeners(new SendingShellCommandEvent(command));
                                                try {
                                                    commandSender.sendCommand(command, environment);
                                                    notifyListeners(new ShellCommandSentEvent());
                                                } catch (Exception e) {
                                                    notifyListeners(new ShellCommandExceptionEvent(e));
                                                }
                                            }
                                        }
                                    }
                                } else {
                                    notifyListeners(new InvalidRequestEvent(socket, requestString));
                                }
                            } catch (SocketTimeoutException e) {
                                notifyListeners(new ReadTimedOutExceptionEvent(socket, e));
                            } catch (BadPaddingException e) {
                                notifyListeners(new DecryptInputExceptionEvent(socket, e));
                            } catch (Exception e) {
                                notifyListeners(new CommunicationExceptionEvent(socket, e));
                            }
                            try {
                                socket.close();
                                notifyListeners(new ConnectionClosedEvent(socket));
                            } catch (Exception e) { // IOException: catching just in case. Continue even if socket doesn't close.
                                notifyListeners(new ConnectionCloseExceptionEvent(socket, e));
                            }
                        }
                    });
                } catch (Exception e) {
                    if (running) { // Show errors only while running, to hide error while stopping.
                        notifyListeners(new ConnectionEstablishExceptionEvent(e));
                    }
                }
            }
            executorService.shutdown();
            if (!executorService.isTerminated()) {
                notifyListeners(new ServerAwaitingTaskCompletionEvent());
                try {
                    executorService.awaitTermination(Long.MAX_VALUE, TimeUnit.DAYS);
                } catch (Exception e) {
                    // InterruptedException: can't happen.
                }
            }
            notifyListeners(new ServerStoppedEvent());
        }
    }).start();
}

From source file:ranktracker.crawler.youtube.YoutubeStatistics.java

@Override
public void run() {
    ExecutorService executor = Executors.newFixedThreadPool(10);
    List<ProxyData> proxylist = objProxyDao.getProxyList();

    try {/* w w w. ja  v a  2s.  c o  m*/
        for (Videokeywords keywords : lstVideokeywords) {

            if (checkForRecentUpdatedKeyword(keywords)) {
                continue;
            }

            executor.submit(new VideoViewStatisticsThread(appContext, lstVideokeywords, objKeywordDao, yoUrl,
                    dailyUrl, vimeoUrl, metacafeUrl, yoKeywordId, keywords, proxylist,
                    fetchSourcewithAuthentication));

            //                yoUrl = keywords.getYoutubeURL();
            //                dailyUrl = keywords.getDailymotionURL();
            //                vimeoUrl = keywords.getVimeoURL();
            //                metacafeUrl = keywords.getMetacafeURL();
            //                yoKeywordId = keywords.getVideokeywordID();
            //
            //                System.out.println("yoKeywordId = " + yoKeywordId);
            //                System.out.println("Youtube URL " + yoUrl);
            //
            //                int youtube_view = 0;
            //                int vimeo_view = 0;
            //                int metacafe_view = 0;
            //                int dailymotion_view = 0;
            //                short daily_view = 0;
            //                int last_count = 0;
            //
            //                last_count = objKeywordDao.LastView(yoKeywordId);
            //                System.out.println("-----------" + last_count + "----------");
            //                if (yoUrl.length() < 5) {
            //                } else {
            //                    try {
            //                        if (yoUrl.contains("youtube.com")) {
            //
            //                        } else {
            //                            yoUrl = "youtube.com" + yoUrl;
            //                        }
            //                        String url = "http://www." + yoUrl;
            //                        int l = 0;
            //                        String input = fetchVideoPage(url);
            //                        while (input.length() <= 500 && l <= 3) {
            //                            input = fetchVideoPage(url);
            //                            l++;
            //                        }
            //
            //                        Document doc = Jsoup.parse(input);
            //                        {
            //                            Element links = doc.getElementById("watch7-views-info");
            //                            String view = links.getElementsByClass("watch-view-count").text();
            //
            //                            if (view.contains("views")) {
            //                                view = view.replaceAll(" views", "");
            //                            }
            //                            if (view.contains(",")) {
            //                                youtube_view = Integer.parseInt(view.replaceAll(",", ""));
            //                            } else {
            //                                youtube_view = Integer.parseInt(view);
            //                            }
            //                        }
            //                    } catch (IOException | NumberFormatException e) {
            //                        e.printStackTrace();
            //                    }
            //                }
            //                if (vimeoUrl.length() < 5) {
            //                } else {
            //                    String url = "http://www." + vimeoUrl;
            //                    int l = 0;
            //                    String input = fetchVideoPage(url);
            //                    while (input.length() <= 500 && l <= 3) {
            //                        input = fetchVideoPage(url);
            //                        l++;
            //                    }
            //                    try {
            //                        Document doc = Jsoup.parse(input);
            //                        String links = doc.getElementById("cols").toString();
            //
            //                        Pattern pattern = Pattern.compile("UserPlays:(.*?)/");
            //                        Matcher matcher = pattern.matcher(links);
            //                        if (matcher.find()) {
            //                            String view = matcher.group(1).trim().replace('"', ' ').replaceAll(" ", "");
            //                            if (view.contains(",")) {
            //                                vimeo_view = Integer.parseInt(view.replaceAll(",", ""));
            //                            } else {
            //                                vimeo_view = Integer.parseInt(view);
            //                            }
            //                        }
            //                    } catch (NumberFormatException e) {
            //                        e.printStackTrace();
            //                    }
            //
            //                }
            //
            //                if (metacafeUrl.length() < 5) {
            //                } else {
            //                    String url = "http://www." + metacafeUrl;
            //
            //                    try {
            //                        int l = 0;
            //                        String input = fetchVideoPage(url);
            //                        while (input.length() <= 500 && l <= 3) {
            //                            input = fetchVideoPage(url);
            //                            l++;
            //                        }
            //
            //                        Document doc = Jsoup.parse(input);
            //                        String[] links = doc.getElementById("Views").text().split(" ");
            //                        String view = (links[0]);
            //                        if (view.contains(",")) {
            //                            metacafe_view = Integer.parseInt(view.replaceAll(",", ""));
            //                        } else {
            //                            metacafe_view = Integer.parseInt(view);
            //                        }
            //
            //                    } catch (IOException | NumberFormatException e) {
            //                        e.printStackTrace();
            //                    }
            //                }
            //
            //                if (dailyUrl.length() < 5) {
            //                } else {
            //                    try {
            //                        String url = "http://www." + dailyUrl;
            //                        int l = 0;
            //                        String input = fetchVideoPage(url);
            //                        while (input.length() <= 1000 && l <= 3) {
            //                            input = fetchVideoPage(url);
            //                            l++;
            //                        }
            //                        Document doc = Jsoup.parse(input);
            //                        String[] links = doc.getElementById("video_views_count").text().split(" ");
            //                        String view = links[0];
            //                        if (view.contains(",")) {
            //                            dailymotion_view = Integer.parseInt(view.replaceAll(",", ""));
            //                        } else {
            //                            dailymotion_view = Integer.parseInt(view);
            //                        }
            //
            //                    } catch (IOException | NumberFormatException e) {
            //                        e.printStackTrace();
            //                    }
            //                }
            //                System.out.println("youtube_view" + youtube_view);
            //                System.out.println("vimeo_view" + vimeo_view);
            //                System.out.println("metacafe_view" + metacafe_view);
            //                System.out.println("dailymotion_view" + dailymotion_view);
            //                System.out.println("last_count" + last_count);
            //
            //                int daily_view_count = youtube_view - last_count;
            //                if (daily_view_count >= youtube_view || daily_view_count >= 65535) {
            //                    daily_view_count = 0;
            //                }
            //                if (youtube_view <= last_count) {
            //                    daily_view_count = 0;
            //                }
            //
            //                daily_view = (short) (daily_view_count);
            //                System.out.println("daily_view" + daily_view);
            //
            //                objKeywordDao.saveYoutubeStatistics(yoKeywordId, youtube_view, vimeo_view, metacafe_view, dailymotion_view, daily_view);
            //                System.out.println("==============inserted================");

        }
    } catch (Exception ex) {
        Logger.getLogger(YoutubeStatistics.class.getName()).log(Level.SEVERE, null, ex);
    }
    executor.shutdown();
    try {
        executor.awaitTermination(10, TimeUnit.MINUTES);
    } catch (InterruptedException ex) {
        Logger.getLogger(Vimeo_search.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:io.druid.data.input.impl.PrefetchableTextFilesFirehoseFactory.java

@Override
public Firehose connect(StringInputRowParser firehoseParser, File temporaryDirectory) throws IOException {
    if (maxCacheCapacityBytes == 0 && maxFetchCapacityBytes == 0) {
        return super.connect(firehoseParser, temporaryDirectory);
    }//from ww  w  . j av a  2s  .  c o m

    if (objects == null) {
        objects = ImmutableList.copyOf(Preconditions.checkNotNull(initObjects(), "objects"));
    }

    Preconditions.checkState(temporaryDirectory.exists(), "temporaryDirectory[%s] does not exist",
            temporaryDirectory);
    Preconditions.checkState(temporaryDirectory.isDirectory(), "temporaryDirectory[%s] is not a directory",
            temporaryDirectory);

    // fetchExecutor is responsible for background data fetching
    final ExecutorService fetchExecutor = createFetchExecutor();

    return new FileIteratingFirehose(new Iterator<LineIterator>() {
        // When prefetching is enabled, fetchFiles and nextFetchIndex are updated by the fetchExecutor thread, but
        // read by both the main thread (in hasNext()) and the fetchExecutor thread (in fetch()). To guarantee that
        // fetchFiles and nextFetchIndex are updated atomically, this lock must be held before updating
        // them.
        private final Object fetchLock = new Object();
        private final LinkedBlockingQueue<FetchedFile> fetchFiles = new LinkedBlockingQueue<>();

        // Number of bytes currently fetched files.
        // This is updated when a file is successfully fetched or a fetched file is deleted.
        private final AtomicLong fetchedBytes = new AtomicLong(0);
        private final boolean cacheInitialized;
        private final boolean prefetchEnabled;

        private Future<Void> fetchFuture;
        private int cacheIterateIndex;
        // nextFetchIndex indicates which object should be downloaded when fetch is triggered.
        private int nextFetchIndex;

        {
            cacheInitialized = totalCachedBytes > 0;
            prefetchEnabled = maxFetchCapacityBytes > 0;

            if (cacheInitialized) {
                nextFetchIndex = cacheFiles.size();
            }
            if (prefetchEnabled) {
                fetchIfNeeded(totalCachedBytes);
            }
        }

        private void fetchIfNeeded(long remainingBytes) {
            if ((fetchFuture == null || fetchFuture.isDone()) && remainingBytes <= prefetchTriggerBytes) {
                fetchFuture = fetchExecutor.submit(() -> {
                    fetch();
                    return null;
                });
            }
        }

        /**
         * Fetch objects to a local disk up to {@link PrefetchableTextFilesFirehoseFactory#maxFetchCapacityBytes}.
         * This method is not thread safe and must be called by a single thread.  Note that even
         * {@link PrefetchableTextFilesFirehoseFactory#maxFetchCapacityBytes} is 0, at least 1 file is always fetched.
         * This is for simplifying design, and should be improved when our client implementations for cloud storages
         * like S3 support range scan.
         */
        private void fetch() throws Exception {
            for (int i = nextFetchIndex; i < objects.size()
                    && fetchedBytes.get() <= maxFetchCapacityBytes; i++) {
                final ObjectType object = objects.get(i);
                LOG.info("Fetching object[%s], fetchedBytes[%d]", object, fetchedBytes.get());
                final File outFile = File.createTempFile(FETCH_FILE_PREFIX, null, temporaryDirectory);
                fetchedBytes.addAndGet(download(object, outFile, 0));
                synchronized (fetchLock) {
                    fetchFiles.put(new FetchedFile(object, outFile));
                    nextFetchIndex++;
                }
            }
        }

        /**
         * Downloads an object. It retries downloading {@link PrefetchableTextFilesFirehoseFactory#maxFetchRetry}
         * times and throws an exception.
         *
         * @param object   an object to be downloaded
         * @param outFile  a file which the object data is stored
         * @param tryCount current retry count
         *
         * @return number of downloaded bytes
         *
         * @throws IOException
         */
        private long download(ObjectType object, File outFile, int tryCount) throws IOException {
            try (final InputStream is = openObjectStream(object);
                    final CountingOutputStream cos = new CountingOutputStream(new FileOutputStream(outFile))) {
                IOUtils.copy(is, cos);
                return cos.getCount();
            } catch (IOException e) {
                final int nextTry = tryCount + 1;
                if (!Thread.currentThread().isInterrupted() && nextTry < maxFetchRetry) {
                    LOG.error(e, "Failed to download object[%s], retrying (%d of %d)", object, nextTry,
                            maxFetchRetry);
                    outFile.delete();
                    return download(object, outFile, nextTry);
                } else {
                    LOG.error(e, "Failed to download object[%s], retries exhausted, aborting", object);
                    throw e;
                }
            }
        }

        @Override
        public boolean hasNext() {
            synchronized (fetchLock) {
                return (cacheInitialized && cacheIterateIndex < cacheFiles.size()) || !fetchFiles.isEmpty()
                        || nextFetchIndex < objects.size();
            }
        }

        @Override
        public LineIterator next() {
            if (!hasNext()) {
                throw new NoSuchElementException();
            }

            // If fetch() fails, hasNext() always returns true because nextFetchIndex must be smaller than the number
            // of objects, which means next() is always called. The below method checks that fetch() threw an exception
            // and propagates it if exists.
            checkFetchException();

            final OpenedObject openedObject;

            try {
                // Check cache first
                if (cacheInitialized && cacheIterateIndex < cacheFiles.size()) {
                    final FetchedFile fetchedFile = cacheFiles.get(cacheIterateIndex++);
                    openedObject = new OpenedObject(fetchedFile, getNoopCloser());
                } else if (prefetchEnabled) {
                    openedObject = openObjectFromLocal();
                } else {
                    openedObject = openObjectFromRemote();
                }

                final InputStream stream = wrapObjectStream(openedObject.object, openedObject.objectStream);

                return new ResourceCloseableLineIterator(new InputStreamReader(stream, Charsets.UTF_8),
                        openedObject.resourceCloser);
            } catch (IOException e) {
                throw Throwables.propagate(e);
            }
        }

        private void checkFetchException() {
            if (fetchFuture != null && fetchFuture.isDone()) {
                try {
                    fetchFuture.get();
                    fetchFuture = null;
                } catch (InterruptedException | ExecutionException e) {
                    throw Throwables.propagate(e);
                }
            }
        }

        private OpenedObject openObjectFromLocal() throws IOException {
            final FetchedFile fetchedFile;
            final Closeable resourceCloser;

            if (!fetchFiles.isEmpty()) {
                // If there are already fetched files, use them
                fetchedFile = fetchFiles.poll();
                resourceCloser = cacheIfPossibleAndGetCloser(fetchedFile, fetchedBytes);
                fetchIfNeeded(fetchedBytes.get());
            } else {
                // Otherwise, wait for fetching
                try {
                    fetchIfNeeded(fetchedBytes.get());
                    fetchedFile = fetchFiles.poll(fetchTimeout, TimeUnit.MILLISECONDS);
                    if (fetchedFile == null) {
                        // Check the latest fetch is failed
                        checkFetchException();
                        // Or throw a timeout exception
                        throw new RuntimeException(new TimeoutException());
                    }
                    resourceCloser = cacheIfPossibleAndGetCloser(fetchedFile, fetchedBytes);
                    // trigger fetch again for subsequent next() calls
                    fetchIfNeeded(fetchedBytes.get());
                } catch (InterruptedException e) {
                    throw Throwables.propagate(e);
                }
            }
            return new OpenedObject(fetchedFile, resourceCloser);
        }

        private OpenedObject openObjectFromRemote() throws IOException {
            final OpenedObject openedObject;
            final Closeable resourceCloser = getNoopCloser();

            if (totalCachedBytes < maxCacheCapacityBytes) {
                LOG.info("Caching object[%s]", objects.get(nextFetchIndex));
                try {
                    // Since maxFetchCapacityBytes is 0, at most one file is fetched.
                    fetch();
                    FetchedFile fetchedFile = fetchFiles.poll();
                    if (fetchedFile == null) {
                        throw new ISE("Cannot fetch object[%s]", objects.get(nextFetchIndex));
                    }
                    cacheIfPossible(fetchedFile);
                    fetchedBytes.addAndGet(-fetchedFile.length());
                    openedObject = new OpenedObject(fetchedFile, resourceCloser);
                } catch (Exception e) {
                    throw Throwables.propagate(e);
                }
            } else {
                final ObjectType object = objects.get(nextFetchIndex++);
                LOG.info("Reading object[%s]", object);
                openedObject = new OpenedObject(object, openObjectStream(object), resourceCloser);
            }
            return openedObject;
        }
    }, firehoseParser, () -> {
        fetchExecutor.shutdownNow();
        try {
            Preconditions.checkState(fetchExecutor.awaitTermination(fetchTimeout, TimeUnit.MILLISECONDS));
        } catch (InterruptedException e) {
            Thread.currentThread().interrupt();
            throw new ISE("Failed to shutdown fetch executor during close");
        }
    });
}

From source file:org.netbeans.nbbuild.MakeJnlp2.java

private void generateFiles() throws IOException, BuildException {

    final Set<String> declaredLocales = new HashSet<String>();

    final boolean useAllLocales;

    if ("*".equals(includelocales)) {
        useAllLocales = true;/*  ww w. ja va2 s .  com*/
    } else if ("".equals(includelocales)) {
        useAllLocales = false;
    } else {
        useAllLocales = false;
        StringTokenizer tokenizer = new StringTokenizer(includelocales, ",");
        while (tokenizer.hasMoreElements()) {
            declaredLocales.add(tokenizer.nextToken());
        }
    }

    final Set<String> indirectFilePaths = new HashSet<String>();
    for (FileSet fs : new FileSet[] { indirectJars, indirectFiles }) {
        if (fs != null) {
            DirectoryScanner scan = fs.getDirectoryScanner(getProject());
            for (String f : scan.getIncludedFiles()) {
                indirectFilePaths.add(f.replace(File.pathSeparatorChar, '/'));
            }
        }
    }

    final ExecutorService executorService = Executors.newFixedThreadPool(nbThreads);

    final List<BuildException> exceptions = new ArrayList<BuildException>();

    for (final Iterator fileIt = files.iterator(); fileIt.hasNext();) {
        if (!exceptions.isEmpty()) {
            break;
        }

        final FileResource fr = (FileResource) fileIt.next();
        final File jar = fr.getFile();

        if (!jar.canRead()) {
            throw new BuildException("Cannot read file: " + jar);
        }

        //
        if (optimize && checkDuplicate(jar).isPresent()) {
            continue;
        }
        //

        executorService.execute(new Runnable() {
            @Override
            public void run() {
                JarFile theJar = null;
                try {
                    theJar = new JarFile(jar);

                    String codenamebase = JarWithModuleAttributes
                            .extractCodeName(theJar.getManifest().getMainAttributes());
                    if (codenamebase == null) {
                        throw new BuildException("Not a NetBeans Module: " + jar);
                    }
                    {
                        int slash = codenamebase.indexOf('/');
                        if (slash >= 0) {
                            codenamebase = codenamebase.substring(0, slash);
                        }
                    }
                    String dashcnb = codenamebase.replace('.', '-');

                    String title;
                    String oneline;
                    String shrt;
                    String osDep = null;

                    {
                        String bundle = theJar.getManifest().getMainAttributes()
                                .getValue("OpenIDE-Module-Localizing-Bundle");
                        Properties prop = new Properties();
                        if (bundle != null) {
                            ZipEntry en = theJar.getEntry(bundle);
                            if (en == null) {
                                throw new BuildException("Cannot find entry: " + bundle + " in file: " + jar);
                            }
                            InputStream is = theJar.getInputStream(en);
                            prop.load(is);
                            is.close();
                        }
                        title = prop.getProperty("OpenIDE-Module-Name", codenamebase);
                        oneline = prop.getProperty("OpenIDE-Module-Short-Description", title);
                        shrt = prop.getProperty("OpenIDE-Module-Long-Description", oneline);
                    }

                    {
                        String osMan = theJar.getManifest().getMainAttributes()
                                .getValue("OpenIDE-Module-Requires");
                        if (osMan != null) {
                            if (osMan.indexOf("org.openide.modules.os.MacOSX") >= 0) { // NOI18N
                                osDep = "Mac OS X"; // NOI18N
                            } else if (osMan.indexOf("org.openide.modules.os.Linux") >= 0) { // NOI18N
                                osDep = "Linux"; // NOI18N
                            } else if (osMan.indexOf("org.openide.modules.os.Solaris") >= 0) { // NOI18N
                                osDep = "Solaris"; // NOI18N
                            } else if (osMan.indexOf("org.openide.modules.os.Windows") >= 0) { // NOI18N
                                osDep = "Windows"; // NOI18N
                            }
                        }
                    }

                    Map<String, List<File>> localizedFiles = verifyExtensions(jar, theJar.getManifest(),
                            dashcnb, codenamebase, verify, indirectFilePaths);

                    executedLocales = localizedFiles.keySet();

                    new File(targetFile, dashcnb).mkdir();

                    File signed = new File(new File(targetFile, dashcnb), jar.getName());

                    // +p
                    final JarConfigResolved jarConfig = signOrCopy(jar, signed);

                    File jnlp = new File(targetFile, dashcnb + ".jnlp");
                    StringWriter writeJNLP = new StringWriter();
                    writeJNLP.write("<?xml version='1.0' encoding='UTF-8'?>\n");
                    writeJNLP.write(
                            "<!DOCTYPE jnlp PUBLIC \"-//Sun Microsystems, Inc//DTD JNLP Descriptor 6.0//EN\" \"http://java.sun.com/dtd/JNLP-6.0.dtd\">\n");
                    writeJNLP.write("<jnlp spec='1.0+' codebase='" + codebase + "'>\n");
                    writeJNLP.write("  <information>\n");
                    writeJNLP.write("   <title>" + XMLUtil.toElementContent(title) + "</title>\n");
                    writeJNLP.write("   <vendor>NetBeans</vendor>\n");
                    writeJNLP.write("   <description kind='one-line'>" + XMLUtil.toElementContent(oneline)
                            + "</description>\n");
                    writeJNLP.write("   <description kind='short'>" + XMLUtil.toElementContent(shrt)
                            + "</description>\n");
                    writeJNLP.write("  </information>\n");

                    String realPermissions = permissions;
                    if ((jarConfig != null) && (jarConfig.getExtraManifestAttributes() != null)) {
                        String jarPermissions = jarConfig.getExtraManifestAttributes().getValue("Permissions");

                        if (jarPermissions != null) {
                            if ("all-permissions".equals(jarPermissions)) {
                                realPermissions = "<security><all-permissions/></security>\n";
                            } else {
                                realPermissions = "";
                            }
                        }
                    }

                    writeJNLP.write(realPermissions);

                    if (osDep == null) {
                        writeJNLP.write("  <resources>\n");
                    } else {
                        writeJNLP.write("  <resources os='" + osDep + "'>\n");
                    }
                    writeJNLP.write("<property name=\"jnlp.packEnabled\" value=\"" + String.valueOf(pack200)
                            + "\"/>\n");
                    writeJNLP.write(constructJarHref(jar, dashcnb));

                    processExtensions(jar, theJar.getManifest(), writeJNLP, dashcnb, codebase, realPermissions);
                    processIndirectJars(writeJNLP, dashcnb);
                    processIndirectFiles(writeJNLP, dashcnb);

                    writeJNLP.write("  </resources>\n");

                    if (useAllLocales || !declaredLocales.isEmpty()) {

                        // write down locales
                        for (Map.Entry<String, List<File>> e : localizedFiles.entrySet()) {
                            final String locale = e.getKey();

                            if (!declaredLocales.isEmpty() && !declaredLocales.contains(locale)) {
                                continue;
                            }

                            final List<File> allFiles = e.getValue();

                            writeJNLP.write("  <resources locale='" + locale + "'>\n");

                            for (File n : allFiles) {
                                log("generating locale " + locale + " for " + n, Project.MSG_VERBOSE);
                                String name = n.getName();
                                String clusterRootPrefix = jar.getParent() + File.separatorChar;
                                String absname = n.getAbsolutePath();
                                if (absname.startsWith(clusterRootPrefix)) {
                                    name = absname.substring(clusterRootPrefix.length())
                                            .replace(File.separatorChar, '-');
                                }
                                File t = new File(new File(targetFile, dashcnb), name);
                                signOrCopy(n, t);
                                writeJNLP.write(constructJarHref(n, dashcnb, name));
                            }

                            writeJNLP.write("  </resources>\n");

                        }
                    }

                    writeJNLP.write("  <component-desc/>\n");
                    writeJNLP.write("</jnlp>\n");
                    writeJNLP.close();

                    // +p
                    Files.write(writeJNLP.toString(), jnlp, Charset.forName("UTF-8"));
                } catch (Exception e) {
                    exceptions.add(new BuildException(e));
                } finally {
                    if (theJar != null) {
                        try {
                            theJar.close();
                        } catch (IOException e) {
                        }
                    }
                }
            }
        });
    }

    executorService.shutdown();

    try {
        executorService.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
    } catch (Exception e) {
        throw new BuildException(e);
    }

    if (!exceptions.isEmpty()) {
        throw exceptions.get(0);
    }
}

From source file:org.dllearner.algorithms.qtl.experiments.QTLEvaluation.java

public void run(int maxNrOfProcessedQueries, int maxTreeDepth, int[] exampleInterval, double[] noiseInterval,
        HeuristicType[] measures) throws Exception {
    this.maxTreeDepth = maxTreeDepth;
    queryTreeFactory.setMaxDepth(maxTreeDepth);

    if (exampleInterval != null) {
        nrOfExamplesIntervals = exampleInterval;
    }//  ww w. ja va2 s  .com
    if (noiseInterval != null) {
        this.noiseIntervals = noiseInterval;
    }
    if (measures != null) {
        this.measures = measures;
    }

    logger.info("Started QTL evaluation...");
    long t1 = System.currentTimeMillis();

    List<String> queries = dataset.getSparqlQueries().values().stream().map(q -> q.toString())
            .collect(Collectors.toList());
    logger.info("#loaded queries: " + queries.size());

    // filter for debugging purposes
    queries = queries.stream().filter(q -> tokens.stream().noneMatch(t -> !q.contains(t)))
            .collect(Collectors.toList());

    if (maxNrOfProcessedQueries == -1) {
        maxNrOfProcessedQueries = queries.size();
    }

    //      queries = filter(queries, (int) Math.ceil((double) maxNrOfProcessedQueries / maxTreeDepth));
    //      queries = queries.subList(0, Math.min(queries.size(), maxNrOfProcessedQueries));
    logger.info("#queries to process: " + queries.size());

    // generate examples for each query
    logger.info("precomputing pos. and neg. examples...");
    final Map<String, ExampleCandidates> query2Examples = new HashMap<>();
    for (String query : queries) {//if(!(query.contains("Borough_(New_York_City)")))continue;
        query2Examples.put(query, generateExamples(query));
    }
    logger.info("precomputing pos. and neg. examples finished.");

    // check for queries that do not return any result (should not happen, but we never know)
    Set<String> emptyQueries = query2Examples.entrySet().stream()
            .filter(e -> e.getValue().correctPosExampleCandidates.isEmpty()).map(e -> e.getKey())
            .collect(Collectors.toSet());
    logger.info("got {} empty queries.", emptyQueries.size());
    queries.removeAll(emptyQueries);

    // min. pos examples
    Set<String> lowNrOfExamplesQueries = query2Examples.entrySet().stream()
            .filter(e -> e.getValue().correctPosExampleCandidates.size() < 2).map(e -> e.getKey())
            .collect(Collectors.toSet());
    logger.info("got {} queries with < 2 pos. examples.", emptyQueries.size());
    queries.removeAll(lowNrOfExamplesQueries);

    final int totalNrOfQTLRuns = heuristics.length * this.measures.length * nrOfExamplesIntervals.length
            * noiseIntervals.length * queries.size();
    logger.info("#QTL runs: " + totalNrOfQTLRuns);

    final AtomicInteger currentNrOfFinishedRuns = new AtomicInteger(0);

    // loop over heuristics
    for (final QueryTreeHeuristic heuristic : heuristics) {
        final String heuristicName = heuristic.getClass().getAnnotation(ComponentAnn.class).shortName();

        // loop over heuristics measures
        for (HeuristicType measure : this.measures) {
            final String measureName = measure.toString();
            heuristic.setHeuristicType(measure);

            double[][] data = new double[nrOfExamplesIntervals.length][noiseIntervals.length];

            // loop over number of positive examples
            for (int i = 0; i < nrOfExamplesIntervals.length; i++) {
                final int nrOfExamples = nrOfExamplesIntervals[i];

                // loop over noise value
                for (int j = 0; j < noiseIntervals.length; j++) {
                    final double noise = noiseIntervals[j];

                    // check if not already processed
                    File logFile = new File(benchmarkDirectory, "qtl2-" + nrOfExamples + "-" + noise + "-"
                            + heuristicName + "-" + measureName + ".log");
                    File statsFile = new File(benchmarkDirectory, "qtl2-" + nrOfExamples + "-" + noise + "-"
                            + heuristicName + "-" + measureName + ".stats");

                    if (!override && logFile.exists() && statsFile.exists()) {
                        logger.info(
                                "Eval config already processed. For re-running please remove corresponding output files.");
                        continue;
                    }

                    FileAppender appender = null;
                    try {
                        appender = new FileAppender(new SimpleLayout(), logFile.getPath(), false);
                        Logger.getRootLogger().addAppender(appender);
                    } catch (IOException e) {
                        e.printStackTrace();
                    }

                    logger.info("#examples: " + nrOfExamples + " noise: " + noise);

                    final DescriptiveStatistics nrOfReturnedSolutionsStats = new SynchronizedDescriptiveStatistics();

                    final DescriptiveStatistics baselinePrecisionStats = new SynchronizedDescriptiveStatistics();
                    final DescriptiveStatistics baselineRecallStats = new SynchronizedDescriptiveStatistics();
                    final DescriptiveStatistics baselineFMeasureStats = new SynchronizedDescriptiveStatistics();
                    final DescriptiveStatistics baselinePredAccStats = new SynchronizedDescriptiveStatistics();
                    final DescriptiveStatistics baselineMathCorrStats = new SynchronizedDescriptiveStatistics();

                    final DescriptiveStatistics bestReturnedSolutionPrecisionStats = new SynchronizedDescriptiveStatistics();
                    final DescriptiveStatistics bestReturnedSolutionRecallStats = new SynchronizedDescriptiveStatistics();
                    final DescriptiveStatistics bestReturnedSolutionFMeasureStats = new SynchronizedDescriptiveStatistics();
                    final DescriptiveStatistics bestReturnedSolutionPredAccStats = new SynchronizedDescriptiveStatistics();
                    final DescriptiveStatistics bestReturnedSolutionMathCorrStats = new SynchronizedDescriptiveStatistics();

                    final DescriptiveStatistics bestReturnedSolutionRuntimeStats = new SynchronizedDescriptiveStatistics();

                    final DescriptiveStatistics bestSolutionPrecisionStats = new SynchronizedDescriptiveStatistics();
                    final DescriptiveStatistics bestSolutionRecallStats = new SynchronizedDescriptiveStatistics();
                    final DescriptiveStatistics bestSolutionFMeasureStats = new SynchronizedDescriptiveStatistics();
                    final DescriptiveStatistics bestSolutionPredAccStats = new SynchronizedDescriptiveStatistics();
                    final DescriptiveStatistics bestSolutionMathCorrStats = new SynchronizedDescriptiveStatistics();

                    final DescriptiveStatistics bestSolutionPositionStats = new SynchronizedDescriptiveStatistics();

                    MonitorFactory.getTimeMonitor(TimeMonitors.CBD_RETRIEVAL.name()).reset();
                    MonitorFactory.getTimeMonitor(TimeMonitors.TREE_GENERATION.name()).reset();

                    ExecutorService tp = Executors.newFixedThreadPool(nrOfThreads);

                    // indicates if the execution for some of the queries failed
                    final AtomicBoolean failed = new AtomicBoolean(false);

                    // loop over SPARQL queries
                    for (final String sparqlQuery : queries) {

                        tp.submit(() -> {

                            logger.info("##############################################################");
                            logger.info("Processing query\n" + sparqlQuery);

                            try {
                                ExamplesWrapper examples = query2Examples.get(sparqlQuery).get(nrOfExamples,
                                        nrOfExamples, noise);
                                logger.info(
                                        "pos. examples:\n" + Joiner.on("\n").join(examples.correctPosExamples));
                                logger.info(
                                        "neg. examples:\n" + Joiner.on("\n").join(examples.correctNegExamples));

                                // write examples to disk
                                File dir = new File(benchmarkDirectory, "data/" + hash(sparqlQuery));
                                dir.mkdirs();
                                Files.write(Joiner.on("\n").join(examples.correctPosExamples),
                                        new File(dir, "examples_" + nrOfExamples + "_" + noise + ".tp"),
                                        Charsets.UTF_8);
                                Files.write(Joiner.on("\n").join(examples.correctNegExamples),
                                        new File(dir, "examples_" + nrOfExamples + "_" + noise + ".tn"),
                                        Charsets.UTF_8);
                                Files.write(Joiner.on("\n").join(examples.falsePosExamples),
                                        new File(dir, "examples_" + nrOfExamples + "_" + noise + ".fp"),
                                        Charsets.UTF_8);

                                // compute baseline
                                logger.info("Computing baseline...");
                                RDFResourceTree baselineSolution = applyBaseLine(examples,
                                        Baseline.MOST_INFORMATIVE_EDGE_IN_EXAMPLES);
                                logger.info("Baseline solution:\n" + owlRenderer
                                        .render(QueryTreeUtils.toOWLClassExpression(baselineSolution)));
                                logger.info("Evaluating baseline...");
                                Score baselineScore = computeScore(sparqlQuery, baselineSolution, noise);
                                logger.info("Baseline score:\n" + baselineScore);
                                String baseLineQuery = QueryTreeUtils.toSPARQLQueryString(baselineSolution,
                                        dataset.getBaseIRI(), dataset.getPrefixMapping());
                                baselinePrecisionStats.addValue(baselineScore.precision);
                                baselineRecallStats.addValue(baselineScore.recall);
                                baselineFMeasureStats.addValue(baselineScore.fmeasure);
                                baselinePredAccStats.addValue(baselineScore.predAcc);
                                baselineMathCorrStats.addValue(baselineScore.mathCorr);

                                // run QTL
                                PosNegLPStandard lp = new PosNegLPStandard();
                                lp.setPositiveExamples(examples.posExamplesMapping.keySet());
                                lp.setNegativeExamples(examples.negExamplesMapping.keySet());
                                QTL2Disjunctive la = new QTL2Disjunctive(lp, qef);
                                la.setRenderer(new org.dllearner.utilities.owl.DLSyntaxObjectRenderer());
                                la.setReasoner(dataset.getReasoner());
                                la.setEntailment(Entailment.SIMPLE);
                                la.setTreeFactory(queryTreeFactory);
                                la.setPositiveExampleTrees(examples.posExamplesMapping);
                                la.setNegativeExampleTrees(examples.negExamplesMapping);
                                la.setNoise(noise);
                                la.setHeuristic(heuristic);
                                la.setMaxExecutionTimeInSeconds(maxExecutionTimeInSeconds);
                                la.setMaxTreeComputationTimeInSeconds(maxExecutionTimeInSeconds);
                                la.init();
                                la.start();
                                List<EvaluatedRDFResourceTree> solutions = new ArrayList<>(la.getSolutions());

                                //                              List<EvaluatedRDFResourceTree> solutions = generateSolutions(examples, noise, heuristic);
                                nrOfReturnedSolutionsStats.addValue(solutions.size());

                                // the best returned solution by QTL
                                EvaluatedRDFResourceTree bestSolution = solutions.get(0);
                                logger.info("Got " + solutions.size() + " query trees.");
                                logger.info("Best computed solution:\n"
                                        + render(bestSolution.asEvaluatedDescription()));
                                logger.info("QTL Score:\n" + bestSolution.getTreeScore());
                                long runtimeBestSolution = la.getTimeBestSolutionFound();
                                bestReturnedSolutionRuntimeStats.addValue(runtimeBestSolution);

                                // convert to SPARQL query
                                RDFResourceTree tree = bestSolution.getTree();
                                //                  filter.filter(tree);
                                String learnedSPARQLQuery = QueryTreeUtils.toSPARQLQueryString(tree,
                                        dataset.getBaseIRI(), dataset.getPrefixMapping());

                                // compute score
                                Score score = computeScore(sparqlQuery, tree, noise);
                                bestReturnedSolutionPrecisionStats.addValue(score.precision);
                                bestReturnedSolutionRecallStats.addValue(score.recall);
                                bestReturnedSolutionFMeasureStats.addValue(score.fmeasure);
                                bestReturnedSolutionPredAccStats.addValue(score.predAcc);
                                bestReturnedSolutionMathCorrStats.addValue(score.mathCorr);
                                logger.info(score.toString());

                                // find the extensionally best matching tree in the list
                                Pair<EvaluatedRDFResourceTree, Score> bestMatchingTreeWithScore = findBestMatchingTreeFast(
                                        solutions, sparqlQuery, noise, examples);
                                EvaluatedRDFResourceTree bestMatchingTree = bestMatchingTreeWithScore
                                        .getFirst();
                                Score bestMatchingScore = bestMatchingTreeWithScore.getSecond();

                                // position of best tree in list of solutions
                                int positionBestScore = solutions.indexOf(bestMatchingTree);
                                bestSolutionPositionStats.addValue(positionBestScore);

                                Score bestScore = score;
                                if (positionBestScore > 0) {
                                    logger.info("Position of best covering tree in list: " + positionBestScore);
                                    logger.info("Best covering solution:\n"
                                            + render(bestMatchingTree.asEvaluatedDescription()));
                                    logger.info("Tree score: " + bestMatchingTree.getTreeScore());
                                    bestScore = bestMatchingScore;
                                    logger.info(bestMatchingScore.toString());
                                } else {
                                    logger.info("Best returned solution was also the best covering solution.");
                                }
                                bestSolutionRecallStats.addValue(bestScore.recall);
                                bestSolutionPrecisionStats.addValue(bestScore.precision);
                                bestSolutionFMeasureStats.addValue(bestScore.fmeasure);
                                bestSolutionPredAccStats.addValue(bestScore.predAcc);
                                bestSolutionMathCorrStats.addValue(bestScore.mathCorr);

                                for (RDFResourceTree negTree : examples.negExamplesMapping.values()) {
                                    if (QueryTreeUtils.isSubsumedBy(negTree, bestMatchingTree.getTree())) {
                                        Files.append(sparqlQuery + "\n", new File("/tmp/negCovered.txt"),
                                                Charsets.UTF_8);
                                        break;
                                    }
                                }

                                String bestQuery = QueryFactory.create(QueryTreeUtils.toSPARQLQueryString(
                                        filter.apply(bestMatchingTree.getTree()), dataset.getBaseIRI(),
                                        dataset.getPrefixMapping())).toString();

                                if (write2DB) {
                                    write2DB(sparqlQuery, nrOfExamples, examples, noise, baseLineQuery,
                                            baselineScore, heuristicName, measureName,
                                            QueryFactory.create(learnedSPARQLQuery).toString(), score,
                                            runtimeBestSolution, bestQuery, positionBestScore, bestScore);
                                }

                            } catch (Exception e) {
                                failed.set(true);
                                logger.error("Error occured for query\n" + sparqlQuery, e);
                                try {
                                    StringWriter sw = new StringWriter();
                                    PrintWriter pw = new PrintWriter(sw);
                                    e.printStackTrace(pw);
                                    Files.append(sparqlQuery + "\n" + sw.toString(),
                                            new File(benchmarkDirectory, "failed-" + nrOfExamples + "-" + noise
                                                    + "-" + heuristicName + "-" + measureName + ".txt"),
                                            Charsets.UTF_8);
                                } catch (IOException e1) {
                                    e1.printStackTrace();
                                }
                            } finally {
                                int cnt = currentNrOfFinishedRuns.incrementAndGet();
                                logger.info("***********Evaluation Progress:"
                                        + NumberFormat.getPercentInstance()
                                                .format((double) cnt / totalNrOfQTLRuns)
                                        + "(" + cnt + "/" + totalNrOfQTLRuns + ")" + "***********");
                            }
                        });

                    }

                    tp.shutdown();
                    tp.awaitTermination(12, TimeUnit.HOURS);

                    Logger.getRootLogger().removeAppender(appender);

                    if (!failed.get()) {
                        String result = "";
                        result += "\nBaseline Precision:\n" + baselinePrecisionStats;
                        result += "\nBaseline Recall:\n" + baselineRecallStats;
                        result += "\nBaseline F-measure:\n" + baselineFMeasureStats;
                        result += "\nBaseline PredAcc:\n" + baselinePredAccStats;
                        result += "\nBaseline MathCorr:\n" + baselineMathCorrStats;

                        result += "#Returned solutions:\n" + nrOfReturnedSolutionsStats;

                        result += "\nOverall Precision:\n" + bestReturnedSolutionPrecisionStats;
                        result += "\nOverall Recall:\n" + bestReturnedSolutionRecallStats;
                        result += "\nOverall F-measure:\n" + bestReturnedSolutionFMeasureStats;
                        result += "\nOverall PredAcc:\n" + bestReturnedSolutionPredAccStats;
                        result += "\nOverall MathCorr:\n" + bestReturnedSolutionMathCorrStats;

                        result += "\nTime until best returned solution found:\n"
                                + bestReturnedSolutionRuntimeStats;

                        result += "\nPositions of best solution:\n"
                                + Arrays.toString(bestSolutionPositionStats.getValues());
                        result += "\nPosition of best solution stats:\n" + bestSolutionPositionStats;
                        result += "\nOverall Precision of best solution:\n" + bestSolutionPrecisionStats;
                        result += "\nOverall Recall of best solution:\n" + bestSolutionRecallStats;
                        result += "\nOverall F-measure of best solution:\n" + bestSolutionFMeasureStats;

                        result += "\nCBD generation time(total):\t"
                                + MonitorFactory.getTimeMonitor(TimeMonitors.CBD_RETRIEVAL.name()).getTotal()
                                + "\n";
                        result += "CBD generation time(avg):\t"
                                + MonitorFactory.getTimeMonitor(TimeMonitors.CBD_RETRIEVAL.name()).getAvg()
                                + "\n";
                        result += "Tree generation time(total):\t"
                                + MonitorFactory.getTimeMonitor(TimeMonitors.TREE_GENERATION.name()).getTotal()
                                + "\n";
                        result += "Tree generation time(avg):\t"
                                + MonitorFactory.getTimeMonitor(TimeMonitors.TREE_GENERATION.name()).getAvg()
                                + "\n";
                        result += "Tree size(avg):\t" + treeSizeStats.getMean() + "\n";

                        logger.info(result);

                        try {
                            Files.write(result, statsFile, Charsets.UTF_8);
                        } catch (IOException e) {
                            e.printStackTrace();
                        }

                        data[i][j] = bestReturnedSolutionFMeasureStats.getMean();

                        if (write2DB) {
                            write2DB(heuristicName, measureName, nrOfExamples, noise,
                                    bestReturnedSolutionFMeasureStats.getMean(),
                                    bestReturnedSolutionPrecisionStats.getMean(),
                                    bestReturnedSolutionRecallStats.getMean(),
                                    bestReturnedSolutionPredAccStats.getMean(),
                                    bestReturnedSolutionMathCorrStats.getMean(),
                                    bestSolutionPositionStats.getMean(), bestSolutionFMeasureStats.getMean(),
                                    bestSolutionPrecisionStats.getMean(), bestSolutionRecallStats.getMean(),
                                    bestSolutionPredAccStats.getMean(), bestSolutionMathCorrStats.getMean(),
                                    baselineFMeasureStats.getMean(), baselinePrecisionStats.getMean(),
                                    baselineRecallStats.getMean(), baselinePredAccStats.getMean(),
                                    baselineMathCorrStats.getMean(),
                                    bestReturnedSolutionRuntimeStats.getMean());
                        }
                    }
                }
            }

            String content = "###";
            String separator = "\t";
            for (double noiseInterval1 : noiseIntervals) {
                content += separator + noiseInterval1;
            }
            content += "\n";
            for (int i = 0; i < nrOfExamplesIntervals.length; i++) {
                content += nrOfExamplesIntervals[i];
                for (int j = 0; j < noiseIntervals.length; j++) {
                    content += separator + data[i][j];
                }
                content += "\n";
            }

            File examplesVsNoise = new File(benchmarkDirectory,
                    "examplesVsNoise-" + heuristicName + "-" + measureName + ".tsv");
            try {
                Files.write(content, examplesVsNoise, Charsets.UTF_8);
            } catch (IOException e) {
                logger.error("failed to write stats to file", e);
            }
        }
    }

    if (write2DB) {
        conn.close();
    }

    if (useEmailNotification) {
        sendFinishedMail();
    }
    long t2 = System.currentTimeMillis();
    long duration = t2 - t1;
    logger.info("QTL evaluation finished in " + DurationFormatUtils.formatDurationHMS(duration) + "ms.");
}

From source file:org.dllearner.algorithms.qtl.experiments.PRConvergenceExperiment.java

public void run(int maxNrOfProcessedQueries, int maxTreeDepth, int[] exampleInterval, double[] noiseInterval,
        HeuristicType[] measures) throws Exception {
    this.maxTreeDepth = maxTreeDepth;
    queryTreeFactory.setMaxDepth(maxTreeDepth);

    if (exampleInterval != null) {
        nrOfExamplesIntervals = exampleInterval;
    }/*from w  w w  .j  ava  2 s.co m*/
    if (noiseInterval != null) {
        this.noiseIntervals = noiseInterval;
    }
    if (measures != null) {
        this.measures = measures;
    }

    boolean noiseEnabled = noiseIntervals.length > 1 || noiseInterval[0] > 0;
    boolean posOnly = noiseEnabled ? false : true;

    logger.info("Started QTL evaluation...");
    long t1 = System.currentTimeMillis();

    List<String> queries = dataset.getSparqlQueries().values().stream().map(q -> q.toString())
            .collect(Collectors.toList());
    logger.info("#loaded queries: " + queries.size());

    // filter for debugging purposes
    queries = queries.stream().filter(q -> queriesToProcessTokens.stream().noneMatch(t -> !q.contains(t)))
            .collect(Collectors.toList());
    queries = queries.stream().filter(q -> queriesToOmitTokens.stream().noneMatch(t -> q.contains(t)))
            .collect(Collectors.toList());

    if (maxNrOfProcessedQueries == -1) {
        maxNrOfProcessedQueries = queries.size();
    }

    //      queries = filter(queries, (int) Math.ceil((double) maxNrOfProcessedQueries / maxTreeDepth));
    //      queries = queries.subList(0, Math.min(queries.size(), maxNrOfProcessedQueries));
    logger.info("#queries to process: " + queries.size());

    // generate examples for each query
    logger.info("precomputing pos. and neg. examples...");
    for (String query : queries) {//if(!(query.contains("Borough_(New_York_City)")))continue;
        query2Examples.put(query, generateExamples(query, posOnly, noiseEnabled));
    }
    logger.info("precomputing pos. and neg. examples finished.");

    // check for queries that do not return any result (should not happen, but we never know)
    Set<String> emptyQueries = query2Examples.entrySet().stream()
            .filter(e -> e.getValue().correctPosExampleCandidates.isEmpty()).map(e -> e.getKey())
            .collect(Collectors.toSet());
    logger.info("got {} empty queries.", emptyQueries.size());
    queries.removeAll(emptyQueries);

    // min. pos examples
    int min = 3;
    Set<String> lowNrOfExamplesQueries = query2Examples.entrySet().stream()
            .filter(e -> e.getValue().correctPosExampleCandidates.size() < min).map(e -> e.getKey())
            .collect(Collectors.toSet());
    logger.info("got {} queries with < {} pos. examples.", emptyQueries.size(), min);
    queries.removeAll(lowNrOfExamplesQueries);
    queries = queries.subList(0, Math.min(80, queries.size()));

    final int totalNrOfQTLRuns = heuristics.length * this.measures.length * nrOfExamplesIntervals.length
            * noiseIntervals.length * queries.size();
    logger.info("#QTL runs: " + totalNrOfQTLRuns);

    final AtomicInteger currentNrOfFinishedRuns = new AtomicInteger(0);

    // loop over heuristics
    for (final QueryTreeHeuristic heuristic : heuristics) {
        final String heuristicName = heuristic.getClass().getAnnotation(ComponentAnn.class).shortName();

        // loop over heuristics measures
        for (HeuristicType measure : this.measures) {
            final String measureName = measure.toString();
            heuristic.setHeuristicType(measure);

            double[][] data = new double[nrOfExamplesIntervals.length][noiseIntervals.length];

            // loop over number of positive examples
            for (int i = 0; i < nrOfExamplesIntervals.length; i++) {
                final int nrOfExamples = nrOfExamplesIntervals[i];

                // loop over noise value
                for (int j = 0; j < noiseIntervals.length; j++) {
                    final double noise = noiseIntervals[j];

                    // check if not already processed
                    File logFile = new File(benchmarkDirectory, "qtl2-" + nrOfExamples + "-" + noise + "-"
                            + heuristicName + "-" + measureName + ".log");
                    File statsFile = new File(benchmarkDirectory, "qtl2-" + nrOfExamples + "-" + noise + "-"
                            + heuristicName + "-" + measureName + ".stats");

                    if (!override && logFile.exists() && statsFile.exists()) {
                        logger.info(
                                "Eval config already processed. For re-running please remove corresponding output files.");
                        continue;
                    }

                    FileAppender appender = null;
                    try {
                        appender = new FileAppender(new SimpleLayout(), logFile.getPath(), false);
                        Logger.getRootLogger().addAppender(appender);
                    } catch (IOException e) {
                        e.printStackTrace();
                    }

                    logger.info("#examples: " + nrOfExamples + " noise: " + noise);

                    final DescriptiveStatistics nrOfReturnedSolutionsStats = new SynchronizedDescriptiveStatistics();

                    final DescriptiveStatistics baselinePrecisionStats = new SynchronizedDescriptiveStatistics();
                    final DescriptiveStatistics baselineRecallStats = new SynchronizedDescriptiveStatistics();
                    final DescriptiveStatistics baselineFMeasureStats = new SynchronizedDescriptiveStatistics();
                    final DescriptiveStatistics baselinePredAccStats = new SynchronizedDescriptiveStatistics();
                    final DescriptiveStatistics baselineMathCorrStats = new SynchronizedDescriptiveStatistics();

                    final DescriptiveStatistics bestReturnedSolutionPrecisionStats = new SynchronizedDescriptiveStatistics();
                    final DescriptiveStatistics bestReturnedSolutionRecallStats = new SynchronizedDescriptiveStatistics();
                    final DescriptiveStatistics bestReturnedSolutionFMeasureStats = new SynchronizedDescriptiveStatistics();
                    final DescriptiveStatistics bestReturnedSolutionPredAccStats = new SynchronizedDescriptiveStatistics();
                    final DescriptiveStatistics bestReturnedSolutionMathCorrStats = new SynchronizedDescriptiveStatistics();

                    final DescriptiveStatistics bestReturnedSolutionRuntimeStats = new SynchronizedDescriptiveStatistics();

                    final DescriptiveStatistics bestSolutionPrecisionStats = new SynchronizedDescriptiveStatistics();
                    final DescriptiveStatistics bestSolutionRecallStats = new SynchronizedDescriptiveStatistics();
                    final DescriptiveStatistics bestSolutionFMeasureStats = new SynchronizedDescriptiveStatistics();
                    final DescriptiveStatistics bestSolutionPredAccStats = new SynchronizedDescriptiveStatistics();
                    final DescriptiveStatistics bestSolutionMathCorrStats = new SynchronizedDescriptiveStatistics();

                    final DescriptiveStatistics bestSolutionPositionStats = new SynchronizedDescriptiveStatistics();

                    MonitorFactory.getTimeMonitor(TimeMonitors.CBD_RETRIEVAL.name()).reset();
                    MonitorFactory.getTimeMonitor(TimeMonitors.TREE_GENERATION.name()).reset();

                    ExecutorService tp = Executors.newFixedThreadPool(nrOfThreads);

                    // indicates if the execution for some of the queries failed
                    final AtomicBoolean failed = new AtomicBoolean(false);

                    Set<String> queriesToProcess = new TreeSet<>(queries);
                    queriesToProcess.retainAll(query2Examples.entrySet().stream()
                            .filter(e -> e.getValue().correctPosExampleCandidates.size() >= nrOfExamples)
                            .map(e -> e.getKey()).collect(Collectors.toSet()));

                    // loop over SPARQL queries
                    for (final String sparqlQuery : queriesToProcess) {
                        CBDStructureTree cbdStructure = cbdStructureTree != null ? cbdStructureTree
                                : QueryUtils.getOptimalCBDStructure(QueryFactory.create(sparqlQuery));

                        tp.submit(() -> {
                            logger.info("CBD tree:" + cbdStructure.toStringVerbose());

                            // update max tree depth
                            this.maxTreeDepth = QueryTreeUtils.getDepth(cbdStructure);
                            logger.info("##############################################################");
                            logger.info("Processing query\n" + sparqlQuery);

                            // we repeat it n times with different permutations of examples
                            int nrOfPermutations = 1;

                            if (nrOfExamples >= query2Examples.get(sparqlQuery).correctPosExampleCandidates
                                    .size()) {
                                nrOfPermutations = 1;
                            }
                            for (int perm = 1; perm <= nrOfPermutations; perm++) {
                                logger.info("Run {}/{}", perm, nrOfPermutations);
                                try {
                                    ExamplesWrapper examples = getExamples(sparqlQuery, nrOfExamples,
                                            nrOfExamples, noise, cbdStructure);
                                    logger.info("pos. examples:\n"
                                            + Joiner.on("\n").join(examples.correctPosExamples));
                                    logger.info("neg. examples:\n"
                                            + Joiner.on("\n").join(examples.correctNegExamples));

                                    // write examples to disk
                                    File dir = new File(benchmarkDirectory, "data/" + hash(sparqlQuery));
                                    dir.mkdirs();
                                    Files.write(Joiner.on("\n").join(examples.correctPosExamples), new File(dir,
                                            "examples" + perm + "_" + nrOfExamples + "_" + noise + ".tp"),
                                            Charsets.UTF_8);
                                    Files.write(Joiner.on("\n").join(examples.correctNegExamples), new File(dir,
                                            "examples" + perm + "_" + nrOfExamples + "_" + noise + ".tn"),
                                            Charsets.UTF_8);
                                    Files.write(Joiner.on("\n").join(examples.falsePosExamples), new File(dir,
                                            "examples" + perm + "_" + nrOfExamples + "_" + noise + ".fp"),
                                            Charsets.UTF_8);

                                    // compute baseline
                                    RDFResourceTree baselineSolution = applyBaseLine(examples,
                                            Baseline.MOST_INFORMATIVE_EDGE_IN_EXAMPLES);
                                    logger.info("Evaluating baseline...");
                                    Score baselineScore = computeScore(sparqlQuery, baselineSolution, noise);
                                    logger.info("Baseline score:\n" + baselineScore);
                                    String baseLineQuery = QueryTreeUtils.toSPARQLQueryString(baselineSolution,
                                            dataset.getBaseIRI(), dataset.getPrefixMapping());
                                    baselinePrecisionStats.addValue(baselineScore.precision);
                                    baselineRecallStats.addValue(baselineScore.recall);
                                    baselineFMeasureStats.addValue(baselineScore.fmeasure);
                                    baselinePredAccStats.addValue(baselineScore.predAcc);
                                    baselineMathCorrStats.addValue(baselineScore.mathCorr);

                                    // run QTL
                                    PosNegLPStandard lp = new PosNegLPStandard();
                                    lp.setPositiveExamples(examples.posExamplesMapping.keySet());
                                    lp.setNegativeExamples(examples.negExamplesMapping.keySet());
                                    //                                 QTL2Disjunctive la = new QTL2Disjunctive(lp, qef);
                                    QTL2DisjunctiveMultiThreaded la = new QTL2DisjunctiveMultiThreaded(lp, qef);
                                    la.setRenderer(new org.dllearner.utilities.owl.DLSyntaxObjectRenderer());
                                    la.setReasoner(dataset.getReasoner());
                                    la.setEntailment(Entailment.SIMPLE);
                                    la.setTreeFactory(queryTreeFactory);
                                    la.setPositiveExampleTrees(examples.posExamplesMapping);
                                    la.setNegativeExampleTrees(examples.negExamplesMapping);
                                    la.setNoise(noise);
                                    la.setHeuristic(heuristic);
                                    la.setMaxExecutionTimeInSeconds(maxExecutionTimeInSeconds);
                                    la.setMaxTreeComputationTimeInSeconds(maxExecutionTimeInSeconds);
                                    la.init();
                                    la.start();
                                    List<EvaluatedRDFResourceTree> solutions = new ArrayList<>(
                                            la.getSolutions());

                                    //                              List<EvaluatedRDFResourceTree> solutions = generateSolutions(examples, noise, heuristic);
                                    nrOfReturnedSolutionsStats.addValue(solutions.size());

                                    // the best returned solution by QTL
                                    EvaluatedRDFResourceTree bestSolution = solutions.get(0);
                                    logger.info("Got " + solutions.size() + " query trees.");
                                    //                                 logger.info("Best computed solution:\n" + render(bestSolution.asEvaluatedDescription()));
                                    logger.info("QTL Score:\n" + bestSolution.getTreeScore());
                                    long runtimeBestSolution = la.getTimeBestSolutionFound();
                                    bestReturnedSolutionRuntimeStats.addValue(runtimeBestSolution);

                                    // convert to SPARQL query
                                    RDFResourceTree tree = bestSolution.getTree();
                                    tree = filter.apply(tree);
                                    String learnedSPARQLQuery = QueryTreeUtils.toSPARQLQueryString(tree,
                                            dataset.getBaseIRI(), dataset.getPrefixMapping());

                                    // compute score
                                    Score score = computeScore(sparqlQuery, tree, noise);
                                    bestReturnedSolutionPrecisionStats.addValue(score.precision);
                                    bestReturnedSolutionRecallStats.addValue(score.recall);
                                    bestReturnedSolutionFMeasureStats.addValue(score.fmeasure);
                                    bestReturnedSolutionPredAccStats.addValue(score.predAcc);
                                    bestReturnedSolutionMathCorrStats.addValue(score.mathCorr);
                                    logger.info(score.toString());

                                    // find the extensionally best matching tree in the list
                                    Pair<EvaluatedRDFResourceTree, Score> bestMatchingTreeWithScore = findBestMatchingTreeFast(
                                            solutions, sparqlQuery, noise, examples);
                                    EvaluatedRDFResourceTree bestMatchingTree = bestMatchingTreeWithScore
                                            .getFirst();
                                    Score bestMatchingScore = bestMatchingTreeWithScore.getSecond();

                                    // position of best tree in list of solutions
                                    int positionBestScore = solutions.indexOf(bestMatchingTree);
                                    bestSolutionPositionStats.addValue(positionBestScore);

                                    Score bestScore = score;
                                    if (positionBestScore > 0) {
                                        logger.info(
                                                "Position of best covering tree in list: " + positionBestScore);
                                        logger.info("Best covering solution:\n"
                                                + render(bestMatchingTree.asEvaluatedDescription()));
                                        logger.info("Tree score: " + bestMatchingTree.getTreeScore());
                                        bestScore = bestMatchingScore;
                                        logger.info(bestMatchingScore.toString());
                                    } else {
                                        logger.info(
                                                "Best returned solution was also the best covering solution.");
                                    }
                                    bestSolutionRecallStats.addValue(bestScore.recall);
                                    bestSolutionPrecisionStats.addValue(bestScore.precision);
                                    bestSolutionFMeasureStats.addValue(bestScore.fmeasure);
                                    bestSolutionPredAccStats.addValue(bestScore.predAcc);
                                    bestSolutionMathCorrStats.addValue(bestScore.mathCorr);

                                    for (RDFResourceTree negTree : examples.negExamplesMapping.values()) {
                                        if (QueryTreeUtils.isSubsumedBy(negTree, bestMatchingTree.getTree())) {
                                            Files.append(sparqlQuery + "\n", new File("/tmp/negCovered.txt"),
                                                    Charsets.UTF_8);
                                            break;
                                        }
                                    }

                                    String bestQuery = QueryFactory
                                            .create(QueryTreeUtils.toSPARQLQueryString(
                                                    filter.apply(bestMatchingTree.getTree()),
                                                    dataset.getBaseIRI(), dataset.getPrefixMapping()))
                                            .toString();

                                    if (write2DB) {
                                        write2DB(sparqlQuery, nrOfExamples, examples, noise, baseLineQuery,
                                                baselineScore, heuristicName, measureName,
                                                QueryFactory.create(learnedSPARQLQuery).toString(), score,
                                                runtimeBestSolution, bestQuery, positionBestScore, bestScore);
                                    }

                                } catch (Exception e) {
                                    failed.set(true);
                                    logger.error("Error occured for query\n" + sparqlQuery, e);
                                    try {
                                        StringWriter sw = new StringWriter();
                                        PrintWriter pw = new PrintWriter(sw);
                                        e.printStackTrace(pw);
                                        Files.append(sparqlQuery + "\n" + sw.toString(),
                                                new File(benchmarkDirectory,
                                                        "failed-" + nrOfExamples + "-" + noise + "-"
                                                                + heuristicName + "-" + measureName + ".txt"),
                                                Charsets.UTF_8);
                                    } catch (IOException e1) {
                                        e1.printStackTrace();
                                    }
                                } finally {
                                    int cnt = currentNrOfFinishedRuns.incrementAndGet();
                                    logger.info("***********Evaluation Progress:"
                                            + NumberFormat.getPercentInstance()
                                                    .format((double) cnt / totalNrOfQTLRuns)
                                            + "(" + cnt + "/" + totalNrOfQTLRuns + ")" + "***********");
                                }
                            }
                        });
                    }

                    tp.shutdown();
                    tp.awaitTermination(12, TimeUnit.HOURS);

                    Logger.getRootLogger().removeAppender(appender);

                    if (!failed.get()) {
                        String result = "";
                        result += "\nBaseline Precision:\n" + baselinePrecisionStats;
                        result += "\nBaseline Recall:\n" + baselineRecallStats;
                        result += "\nBaseline F-measure:\n" + baselineFMeasureStats;
                        result += "\nBaseline PredAcc:\n" + baselinePredAccStats;
                        result += "\nBaseline MathCorr:\n" + baselineMathCorrStats;

                        result += "#Returned solutions:\n" + nrOfReturnedSolutionsStats;

                        result += "\nOverall Precision:\n" + bestReturnedSolutionPrecisionStats;
                        result += "\nOverall Recall:\n" + bestReturnedSolutionRecallStats;
                        result += "\nOverall F-measure:\n" + bestReturnedSolutionFMeasureStats;
                        result += "\nOverall PredAcc:\n" + bestReturnedSolutionPredAccStats;
                        result += "\nOverall MathCorr:\n" + bestReturnedSolutionMathCorrStats;

                        result += "\nTime until best returned solution found:\n"
                                + bestReturnedSolutionRuntimeStats;

                        result += "\nPositions of best solution:\n"
                                + Arrays.toString(bestSolutionPositionStats.getValues());
                        result += "\nPosition of best solution stats:\n" + bestSolutionPositionStats;
                        result += "\nOverall Precision of best solution:\n" + bestSolutionPrecisionStats;
                        result += "\nOverall Recall of best solution:\n" + bestSolutionRecallStats;
                        result += "\nOverall F-measure of best solution:\n" + bestSolutionFMeasureStats;

                        result += "\nCBD generation time(total):\t"
                                + MonitorFactory.getTimeMonitor(TimeMonitors.CBD_RETRIEVAL.name()).getTotal()
                                + "\n";
                        result += "CBD generation time(avg):\t"
                                + MonitorFactory.getTimeMonitor(TimeMonitors.CBD_RETRIEVAL.name()).getAvg()
                                + "\n";
                        result += "Tree generation time(total):\t"
                                + MonitorFactory.getTimeMonitor(TimeMonitors.TREE_GENERATION.name()).getTotal()
                                + "\n";
                        result += "Tree generation time(avg):\t"
                                + MonitorFactory.getTimeMonitor(TimeMonitors.TREE_GENERATION.name()).getAvg()
                                + "\n";
                        result += "Tree size(avg):\t" + treeSizeStats.getMean() + "\n";

                        logger.info(result);

                        try {
                            Files.write(result, statsFile, Charsets.UTF_8);
                        } catch (IOException e) {
                            e.printStackTrace();
                        }

                        data[i][j] = bestReturnedSolutionFMeasureStats.getMean();

                        if (write2DB) {
                            write2DB(heuristicName, measureName, nrOfExamples, noise,
                                    bestReturnedSolutionFMeasureStats.getMean(),
                                    bestReturnedSolutionPrecisionStats.getMean(),
                                    bestReturnedSolutionRecallStats.getMean(),
                                    bestReturnedSolutionPredAccStats.getMean(),
                                    bestReturnedSolutionMathCorrStats.getMean(),
                                    bestSolutionPositionStats.getMean(), bestSolutionFMeasureStats.getMean(),
                                    bestSolutionPrecisionStats.getMean(), bestSolutionRecallStats.getMean(),
                                    bestSolutionPredAccStats.getMean(), bestSolutionMathCorrStats.getMean(),
                                    baselineFMeasureStats.getMean(), baselinePrecisionStats.getMean(),
                                    baselineRecallStats.getMean(), baselinePredAccStats.getMean(),
                                    baselineMathCorrStats.getMean(),
                                    bestReturnedSolutionRuntimeStats.getMean());
                        }
                    }
                }
            }

            String content = "###";
            String separator = "\t";
            for (double noiseInterval1 : noiseIntervals) {
                content += separator + noiseInterval1;
            }
            content += "\n";
            for (int i = 0; i < nrOfExamplesIntervals.length; i++) {
                content += nrOfExamplesIntervals[i];
                for (int j = 0; j < noiseIntervals.length; j++) {
                    content += separator + data[i][j];
                }
                content += "\n";
            }

            File examplesVsNoise = new File(benchmarkDirectory,
                    "examplesVsNoise-" + heuristicName + "-" + measureName + ".tsv");
            try {
                Files.write(content, examplesVsNoise, Charsets.UTF_8);
            } catch (IOException e) {
                logger.error("failed to write stats to file", e);
            }
        }
    }

    if (write2DB) {
        conn.close();
    }

    if (useEmailNotification) {
        sendFinishedMail();
    }
    long t2 = System.currentTimeMillis();
    long duration = t2 - t1;
    logger.info("QTL evaluation finished in " + DurationFormatUtils.formatDurationHMS(duration) + "ms.");
}