Example usage for java.util.concurrent ExecutorService execute

List of usage examples for java.util.concurrent ExecutorService execute

Introduction

In this page you can find the example usage for java.util.concurrent ExecutorService execute.

Prototype

void execute(Runnable command);

Source Link

Document

Executes the given command at some time in the future.

Usage

From source file:org.apache.accumulo.server.gc.SimpleGarbageCollector.java

/**
 * This method attempts to do its best to remove files from the filesystem that have been confirmed for deletion.
 *///from ww  w.j  a  v  a 2s .co  m
private void deleteFiles(SortedSet<String> confirmedDeletes) {
    // create a batchwriter to remove the delete flags for successful
    // deletes
    BatchWriter writer = null;
    if (!offline) {
        Connector c;
        try {
            c = instance.getConnector(SecurityConstants.getSystemCredentials());
            writer = c.createBatchWriter(Constants.METADATA_TABLE_NAME, 10000000, 60000l, 3);
        } catch (Exception e) {
            log.error("Unable to create writer to remove file from the !METADATA table", e);
        }
    }

    // when deleting a dir and all files in that dir, only need to delete the dir
    // the dir will sort right before the files... so remove the files in this case
    // to minimize namenode ops
    Iterator<String> cdIter = confirmedDeletes.iterator();
    String lastDir = null;
    while (cdIter.hasNext()) {
        String delete = cdIter.next();
        if (isDir(delete)) {
            lastDir = delete;
        } else if (lastDir != null) {
            if (delete.startsWith(lastDir)) {
                log.debug("Ignoring " + delete + " because " + lastDir + " exist");
                Mutation m = new Mutation(new Text(Constants.METADATA_DELETE_FLAG_PREFIX + delete));
                m.putDelete(EMPTY_TEXT, EMPTY_TEXT);
                try {
                    writer.addMutation(m);
                } catch (MutationsRejectedException e) {
                    throw new RuntimeException(e);
                }
                cdIter.remove();
            } else {
                lastDir = null;
            }

        }
    }

    final BatchWriter finalWriter = writer;

    ExecutorService deleteThreadPool = Executors.newFixedThreadPool(numDeleteThreads);

    for (final String delete : confirmedDeletes) {

        Runnable deleteTask = new Runnable() {
            @Override
            public void run() {
                boolean removeFlag;

                log.debug("Deleting " + ServerConstants.getTablesDir() + delete);
                try {

                    Path p = new Path(ServerConstants.getTablesDir() + delete);

                    if (fs.delete(p, true)) {
                        // delete succeeded, still want to delete
                        removeFlag = true;
                        synchronized (SimpleGarbageCollector.this) {
                            ++status.current.deleted;
                        }
                    } else if (fs.exists(p)) {
                        // leave the entry in the METADATA table; we'll try again
                        // later
                        removeFlag = false;
                        synchronized (SimpleGarbageCollector.this) {
                            ++status.current.errors;
                        }
                        log.warn("File exists, but was not deleted for an unknown reason: " + p);
                    } else {
                        // this failure, we still want to remove the METADATA table
                        // entry
                        removeFlag = true;
                        synchronized (SimpleGarbageCollector.this) {
                            ++status.current.errors;
                        }
                        String parts[] = delete.split("/");
                        if (parts.length > 1) {
                            String tableId = parts[1];
                            TableManager.getInstance().updateTableStateCache(tableId);
                            TableState tableState = TableManager.getInstance().getTableState(tableId);
                            if (tableState != null && tableState != TableState.DELETING)
                                log.warn("File doesn't exist: " + p);
                        } else {
                            log.warn("Very strange path name: " + delete);
                        }
                    }

                    // proceed to clearing out the flags for successful deletes and
                    // non-existent files
                    if (removeFlag && finalWriter != null) {
                        Mutation m = new Mutation(new Text(Constants.METADATA_DELETE_FLAG_PREFIX + delete));
                        m.putDelete(EMPTY_TEXT, EMPTY_TEXT);
                        finalWriter.addMutation(m);
                    }
                } catch (Exception e) {
                    log.error(e, e);
                }

            }
        };

        deleteThreadPool.execute(deleteTask);
    }

    deleteThreadPool.shutdown();

    try {
        while (!deleteThreadPool.awaitTermination(1000, TimeUnit.MILLISECONDS)) {
        }
    } catch (InterruptedException e1) {
        log.error(e1, e1);
    }

    if (writer != null) {
        try {
            writer.close();
        } catch (MutationsRejectedException e) {
            log.error("Problem removing entries from the metadata table: ", e);
        }
    }
}

From source file:de.huberlin.cuneiform.compiler.local.LocalDispatcher.java

protected Set<JsonReportEntry> dispatch(Invocation invocation)
        throws IOException, InterruptedException, NotDerivableException, JSONException {

    File scriptFile;//from  w w w  . j  av a  2 s . c o  m
    Process process;
    int exitValue;
    Set<JsonReportEntry> report;
    String line;
    String[] arg;
    String value;
    int i;
    StringBuffer buf;
    File location;
    File reportFile;
    StreamConsumer stdoutConsumer, errConsumer;
    ExecutorService executor;
    String signature;
    Path srcPath, destPath;
    File successMarker;

    if (invocation == null)
        throw new NullPointerException("Invocation must not be null.");

    if (!invocation.isReady())
        throw new RuntimeException("Cannot dispatch invocation that is not ready.");

    location = new File(buildDir.getAbsolutePath() + "/" + invocation.getSignature());
    successMarker = new File(location.getAbsolutePath() + "/" + SUCCESS_FILENAME);
    reportFile = new File(location.getAbsolutePath() + "/" + Invocation.REPORT_FILENAME);

    if (!successMarker.exists()) {

        if (location.exists())
            FileUtils.deleteDirectory(location);

        if (!location.mkdirs())
            throw new IOException("Could not create invocation location.");

        scriptFile = new File(location.getAbsolutePath() + "/" + SCRIPT_FILENAME);

        try (BufferedWriter writer = new BufferedWriter(new FileWriter(scriptFile, false))) {

            // write away script
            writer.write(invocation.toScript());

        }

        scriptFile.setExecutable(true);

        for (String filename : invocation.getStageInList()) {

            if (filename.charAt(0) != '/' && filename.indexOf('_') >= 0) {

                signature = filename.substring(0, filename.indexOf('_'));

                srcPath = FileSystems.getDefault()
                        .getPath(buildDir.getAbsolutePath() + "/" + signature + "/" + filename);
                destPath = FileSystems.getDefault()
                        .getPath(buildDir.getAbsolutePath() + "/" + invocation.getSignature() + "/" + filename);
                Files.createSymbolicLink(destPath, srcPath);
            }
        }

        arg = new String[] { "/usr/bin/time", "-a", "-o",
                location.getAbsolutePath() + "/" + Invocation.REPORT_FILENAME, "-f",
                "{" + JsonReportEntry.ATT_TIMESTAMP + ":" + System.currentTimeMillis() + ","
                        + JsonReportEntry.ATT_RUNID + ":\"" + invocation.getDagId() + "\","
                        + JsonReportEntry.ATT_TASKID + ":" + invocation.getTaskNodeId() + ","
                        + JsonReportEntry.ATT_TASKNAME + ":\"" + invocation.getTaskName() + "\","
                        + JsonReportEntry.ATT_LANG + ":\"" + invocation.getLangLabel() + "\","
                        + JsonReportEntry.ATT_INVOCID + ":" + invocation.getSignature() + ","
                        + JsonReportEntry.ATT_KEY + ":\"" + JsonReportEntry.KEY_INVOC_TIME + "\","
                        + JsonReportEntry.ATT_VALUE + ":" + "{\"realTime\":%e,\"userTime\":%U,\"sysTime\":%S,"
                        + "\"maxResidentSetSize\":%M,\"avgResidentSetSize\":%t,"
                        + "\"avgDataSize\":%D,\"avgStackSize\":%p,\"avgTextSize\":%X,"
                        + "\"nMajPageFault\":%F,\"nMinPageFault\":%R,"
                        + "\"nSwapOutMainMem\":%W,\"nForcedContextSwitch\":%c,"
                        + "\"nWaitContextSwitch\":%w,\"nIoRead\":%I,\"nIoWrite\":%O,"
                        + "\"nSocketRead\":%r,\"nSocketWrite\":%s,\"nSignal\":%k}}",
                scriptFile.getAbsolutePath() };

        // run script
        process = Runtime.getRuntime().exec(arg, null, location);

        executor = Executors.newCachedThreadPool();

        stdoutConsumer = new StreamConsumer(process.getInputStream());
        executor.execute(stdoutConsumer);

        errConsumer = new StreamConsumer(process.getErrorStream());
        executor.execute(errConsumer);

        executor.shutdown();

        exitValue = process.waitFor();
        if (!executor.awaitTermination(4, TimeUnit.SECONDS))
            throw new RuntimeException("Consumer threads did not finish orderly.");

        try (BufferedWriter reportWriter = new BufferedWriter(new FileWriter(reportFile, true))) {

            if (exitValue != 0) {

                System.err.println("[script]");

                try (BufferedReader reader = new BufferedReader(new StringReader(invocation.toScript()))) {

                    i = 0;
                    while ((line = reader.readLine()) != null)
                        System.err.println(String.format("%02d  %s", ++i, line));
                }

                System.err.println("[out]");
                try (BufferedReader reader = new BufferedReader(
                        new StringReader(stdoutConsumer.getContent()))) {

                    while ((line = reader.readLine()) != null)
                        System.err.println(line);
                }

                System.err.println("[err]");
                try (BufferedReader reader = new BufferedReader(new StringReader(errConsumer.getContent()))) {

                    while ((line = reader.readLine()) != null)
                        System.err.println(line);
                }

                System.err.println("[end]");

                throw new RuntimeException("Invocation of task '" + invocation.getTaskName()
                        + "' with signature " + invocation.getSignature()
                        + " terminated with non-zero exit value. Exit value was " + exitValue + ".");
            }

            try (BufferedReader reader = new BufferedReader(new StringReader(stdoutConsumer.getContent()))) {

                buf = new StringBuffer();
                while ((line = reader.readLine()) != null)
                    buf.append(line.replaceAll("\\\\", "\\\\\\\\").replaceAll("\"", "\\\"")).append('\n');

                value = buf.toString();
                if (!value.isEmpty())

                    reportWriter.write(new JsonReportEntry(invocation, JsonReportEntry.KEY_INVOC_STDOUT, value)
                            .toString());
            }
            try (BufferedReader reader = new BufferedReader(new StringReader(errConsumer.getContent()))) {

                buf = new StringBuffer();
                while ((line = reader.readLine()) != null)
                    buf.append(line.replaceAll("\\\\", "\\\\\\\\").replaceAll("\"", "\\\"")).append('\n');

                value = buf.toString();
                if (!value.isEmpty())

                    reportWriter.write(new JsonReportEntry(invocation, JsonReportEntry.KEY_INVOC_STDERR, value)
                            .toString());
            }

        }
    }

    // gather report
    report = new HashSet<>();
    try (BufferedReader reader = new BufferedReader(new FileReader(reportFile))) {

        while ((line = reader.readLine()) != null) {

            line = line.trim();

            if (line.isEmpty())
                continue;

            report.add(new JsonReportEntry(line));
        }

    }

    invocation.evalReport(report);

    if (!successMarker.exists())
        if (!successMarker.createNewFile())
            throw new IOException("Could not create success marker.");

    return report;
}

From source file:MSUmpire.DIA.DIAPack.java

public void TargetedExtractionQuant(boolean export, FragmentLibManager libManager, float ReSearchProb,
        float RTWindow) throws IOException, SQLException, XmlPullParserException {
    if (IDsummary.GetMappedPepIonList().isEmpty()) {
        Logger.getRootLogger().error("There is no peptide ion for targeted re-extraction.");
        return;//from ww w  .ja  va2 s.c  om
    }
    parameter.RT_window_Targeted = RTWindow;
    GenerateClusterScanNomapping();
    ExecutorService executorPool = null;

    //Targeted re-extraction scoring
    TScoring = new TargetMatchScoring(Filename, libManager.LibID);

    if (parameter.UseOldVersion) {
        TScoring.SetUseOldVersion();
    }
    Logger.getRootLogger().info("No. of identified peptide ions: " + IDsummary.GetPepIonList().size());
    Logger.getRootLogger().info("No. of mapped peptide ions: " + IDsummary.GetMappedPepIonList().size());
    ArrayList<PepIonID> SearchList = new ArrayList<>();
    //For each peptide ions in targeted re-extraction, determine whether to research the peptide ion given a re-search probability threshold
    for (PepIonID pepIonID : IDsummary.GetMappedPepIonList().values()) {
        if (libManager.PeptideFragmentLib.containsKey(pepIonID.GetKey())
                && libManager.GetFragmentLib(pepIonID.GetKey()).FragmentGroups.size() >= 3
                && pepIonID.TargetedProbability() < ReSearchProb) {
            pepIonID.CreateQuantInstance(parameter.MaxNoPeakCluster);
            pepIonID.MS1PeakClusters = new ArrayList<>();
            pepIonID.MS2UnfragPeakClusters = new ArrayList<>();
            pepIonID.UScoreProbability_MS1 = 0f;
            pepIonID.MS1AlignmentProbability = 0f;
            pepIonID.UScoreProbability_MS2 = 0f;
            pepIonID.MS2AlignmentProbability = 0f;
            pepIonID.TPPModSeq = "Ext";
            SearchList.add(pepIonID);
        }
    }
    Logger.getRootLogger().info("No. of searchable peptide ions: " + SearchList.size());

    for (LCMSPeakDIAMS2 DIAWindow : DIAWindows) {
        Logger.getRootLogger().info("Assigning clusters for peak groups in MS2 isolation window:"
                + FilenameUtils.getBaseName(DIAWindow.ScanCollectionName));

        if (!DIAWindow.ReadPeakCluster() || !DIAWindow.ReadPrecursorFragmentClu2Cur()) {
            Logger.getRootLogger().warn("Reading results for " + DIAWindow.ScanCollectionName + " failed");
            continue;
        }

        executorPool = Executors.newFixedThreadPool(NoCPUs);
        //For each target peptide  ion
        for (PepIonID pepIonID : SearchList) {
            if (DIAWindow.DIA_MZ_Range.getX() <= pepIonID.NeutralPrecursorMz()
                    && DIAWindow.DIA_MZ_Range.getY() >= pepIonID.NeutralPrecursorMz()) {
                //If the spectrum of peptide ion in the spectral library has more than three fragment peaks
                if (libManager.GetFragmentLib(pepIonID.GetKey()).FragmentGroups.size() >= 3) {
                    //U-score spectral matching
                    UmpireSpecLibMatch matchunit = new UmpireSpecLibMatch(MS1FeatureMap, DIAWindow, pepIonID,
                            libManager.GetFragmentLib(pepIonID.GetKey()),
                            libManager.GetDecoyFragmentLib(pepIonID.GetKey()), parameter);
                    executorPool.execute(matchunit);
                    TScoring.libTargetMatches.add(matchunit);
                } else {
                    Logger.getRootLogger()
                            .warn("skipping " + pepIonID.GetKey() + ", it has only "
                                    + libManager.GetFragmentLib(pepIonID.GetKey()).FragmentGroups.size()
                                    + " matched fragments");
                }
            }
        }

        //For each identified peptide ion, calculate their U-score for LDA training
        for (PepIonID pepIonID : IDsummary.GetPepIonList().values()) {
            if (libManager.PeptideFragmentLib.containsKey(pepIonID.GetKey())
                    && DIAWindow.DIA_MZ_Range.getX() <= pepIonID.NeutralPrecursorMz()
                    && DIAWindow.DIA_MZ_Range.getY() >= pepIonID.NeutralPrecursorMz()) {
                //If the spectrum of peptide ion in the spectral library has more than three fragment peaks
                if (libManager.GetFragmentLib(pepIonID.GetKey()).FragmentGroups.size() >= 3) {
                    //U-score spectral matching
                    UmpireSpecLibMatch matchunit = new UmpireSpecLibMatch(MS1FeatureMap, DIAWindow, pepIonID,
                            libManager.GetFragmentLib(pepIonID.GetKey()),
                            libManager.GetDecoyFragmentLib(pepIonID.GetKey()), parameter);
                    matchunit.IdentifiedPeptideIon = true;
                    executorPool.execute(matchunit);
                    TScoring.libIDMatches.add(matchunit);
                } else {
                    Logger.getRootLogger()
                            .warn("skipping " + pepIonID.GetKey() + ", it has only "
                                    + libManager.GetFragmentLib(pepIonID.GetKey()).FragmentGroups.size()
                                    + " matched fragments");
                }
            }
        }
        executorPool.shutdown();

        try {
            executorPool.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
        } catch (InterruptedException e) {
            Logger.getRootLogger().info("interrupted..");
        }
        DIAWindow.ClearAllPeaks();
    }

    Logger.getRootLogger().info("Removing entries with no precursor signal hits: total target entries: "
            + TScoring.libTargetMatches.size());
    ArrayList<UmpireSpecLibMatch> newlist = new ArrayList<>();
    for (UmpireSpecLibMatch match : TScoring.libTargetMatches) {
        if (!match.DecoyHits.isEmpty() || !match.TargetHits.isEmpty()) {
            newlist.add(match);
        }
    }
    TScoring.libTargetMatches = newlist;
    Logger.getRootLogger().info("Remaining entries: " + TScoring.libTargetMatches.size());

    //U-score and probablilty calculatation  
    TScoring.Process();
    TargetHitPepXMLWriter pepxml = new TargetHitPepXMLWriter(GetiProphExtPepxml(libManager.LibID),
            IDsummary.FastaPath, IDsummary.DecoyTag, TScoring);
    TScoring = null;
    executorPool = Executors.newFixedThreadPool(NoCPUs);

    //Assign precursor peak cluster, extract fragments and do quantification
    for (PepIonID pepIonID : IDsummary.GetMappedPepIonList().values()) {
        DIAAssignQuantUnit quantunit = new DIAAssignQuantUnit(pepIonID, MS1FeatureMap, parameter);
        executorPool.execute(quantunit);
    }
    for (PepIonID pepIonID : IDsummary.GetPepIonList().values()) {
        DIAAssignQuantUnit quantunit = new DIAAssignQuantUnit(pepIonID, MS1FeatureMap, parameter);
        executorPool.execute(quantunit);
    }
    executorPool.shutdown();

    try {
        executorPool.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
    } catch (InterruptedException e) {
        Logger.getRootLogger().info("interrupted..");
    }

    if (export) {
        ExportID();
    }
}

From source file:org.apache.hadoop.hive.ql.exec.StatsNoJobTask.java

private int aggregateStats(ExecutorService threadPool) {
    int ret = 0;/* www.java 2 s . c  o m*/

    try {
        Collection<Partition> partitions = null;
        if (work.getPrunedPartitionList() == null) {
            partitions = getPartitionsList();
        } else {
            partitions = work.getPrunedPartitionList().getPartitions();
        }

        // non-partitioned table
        if (partitions == null) {
            org.apache.hadoop.hive.metastore.api.Table tTable = table.getTTable();
            Map<String, String> parameters = tTable.getParameters();
            try {
                Path dir = new Path(tTable.getSd().getLocation());
                long numRows = 0;
                long rawDataSize = 0;
                long fileSize = 0;
                long numFiles = 0;
                FileSystem fs = dir.getFileSystem(conf);
                List<FileStatus> fileList = ShimLoader.getHadoopShims().listLocatedStatus(fs, dir,
                        hiddenFileFilter);
                boolean statsAvailable = false;
                for (FileStatus file : fileList) {
                    if (!file.isDir()) {
                        InputFormat<?, ?> inputFormat = (InputFormat<?, ?>) ReflectionUtil
                                .newInstance(table.getInputFormatClass(), jc);
                        InputSplit dummySplit = new FileSplit(file.getPath(), 0, 0,
                                new String[] { table.getDataLocation().toString() });
                        org.apache.hadoop.mapred.RecordReader<?, ?> recordReader = (org.apache.hadoop.mapred.RecordReader<?, ?>) inputFormat
                                .getRecordReader(dummySplit, jc, Reporter.NULL);
                        StatsProvidingRecordReader statsRR;
                        if (recordReader instanceof StatsProvidingRecordReader) {
                            statsRR = (StatsProvidingRecordReader) recordReader;
                            numRows += statsRR.getStats().getRowCount();
                            rawDataSize += statsRR.getStats().getRawDataSize();
                            fileSize += file.getLen();
                            numFiles += 1;
                            statsAvailable = true;
                        }
                        recordReader.close();
                    }
                }

                if (statsAvailable) {
                    parameters.put(StatsSetupConst.ROW_COUNT, String.valueOf(numRows));
                    parameters.put(StatsSetupConst.RAW_DATA_SIZE, String.valueOf(rawDataSize));
                    parameters.put(StatsSetupConst.TOTAL_SIZE, String.valueOf(fileSize));
                    parameters.put(StatsSetupConst.NUM_FILES, String.valueOf(numFiles));
                    parameters.put(StatsSetupConst.STATS_GENERATED_VIA_STATS_TASK, StatsSetupConst.TRUE);

                    db.alterTable(tableFullName, new Table(tTable));

                    String msg = "Table " + tableFullName + " stats: [" + toString(parameters) + ']';
                    LOG.debug(msg);
                    console.printInfo(msg);
                } else {
                    String msg = "Table " + tableFullName + " does not provide stats.";
                    LOG.debug(msg);
                }
            } catch (Exception e) {
                console.printInfo("[Warning] could not update stats for " + tableFullName + ".",
                        "Failed with exception " + e.getMessage() + "\n" + StringUtils.stringifyException(e));
            }
        } else {

            // Partitioned table
            for (Partition partn : partitions) {
                threadPool.execute(new StatsCollection(partn));
            }

            LOG.debug("Stats collection waiting for threadpool to shutdown..");
            shutdownAndAwaitTermination(threadPool);
            LOG.debug("Stats collection threadpool shutdown successful.");

            ret = updatePartitions();
        }

    } catch (Exception e) {
        // Fail the query if the stats are supposed to be reliable
        if (work.isStatsReliable()) {
            ret = -1;
        }
    }

    // The return value of 0 indicates success,
    // anything else indicates failure
    return ret;
}

From source file:org.fao.geonet.kernel.DataManager.java

/**
 * TODO javadoc.//ww w. j a  v  a  2s .c  o m
 *
 * @param context
 * @param ids
 */
public void batchRebuild(ServiceContext context, List<String> ids) {

    // split reindexing task according to number of processors we can assign
    int threadCount = ThreadUtils.getNumberOfThreads();
    ExecutorService executor = Executors.newFixedThreadPool(threadCount);

    int perThread;
    if (ids.size() < threadCount)
        perThread = ids.size();
    else
        perThread = ids.size() / threadCount;
    int index = 0;

    while (index < ids.size()) {
        int start = index;
        int count = Math.min(perThread, ids.size() - start);
        // create threads to process this chunk of ids
        Runnable worker = new IndexMetadataTask(context, ids, start, count);
        executor.execute(worker);
        index += count;
    }

    executor.shutdown();
}

From source file:be.agiv.security.demo.Main.java

private void invokeClaimsAwareService() {
    GridBagLayout gridBagLayout = new GridBagLayout();
    GridBagConstraints gridBagConstraints = new GridBagConstraints();
    JPanel contentPanel = new JPanel(gridBagLayout);

    final JLabel ipStsLabel = new JLabel("IP-STS:");
    gridBagConstraints.gridx = 0;//from   w  w  w.  ja  v  a  2  s. c om
    gridBagConstraints.gridy = 0;
    gridBagConstraints.anchor = GridBagConstraints.WEST;
    gridBagConstraints.ipadx = 5;
    gridBagLayout.setConstraints(ipStsLabel, gridBagConstraints);
    contentPanel.add(ipStsLabel);

    final JTextField ipStsTextField = new JTextField(
            "https://auth.beta.agiv.be/ipsts/Services/DaliSecurityTokenServiceConfiguration.svc/IWSTrust13",
            60);
    gridBagConstraints.gridx++;
    gridBagLayout.setConstraints(ipStsTextField, gridBagConstraints);
    contentPanel.add(ipStsTextField);

    JLabel realmLabel = new JLabel("Realm:");
    gridBagConstraints.gridx = 0;
    gridBagConstraints.gridy++;
    gridBagLayout.setConstraints(realmLabel, gridBagConstraints);
    contentPanel.add(realmLabel);

    JTextField realmTextField = new JTextField(AGIVSecurity.BETA_REALM, 30);
    gridBagConstraints.gridx++;
    gridBagLayout.setConstraints(realmTextField, gridBagConstraints);
    contentPanel.add(realmTextField);

    final CredentialPanel credentialPanel = new CredentialPanel();
    gridBagConstraints.gridx = 0;
    gridBagConstraints.gridy++;
    gridBagConstraints.gridwidth = GridBagConstraints.REMAINDER;
    gridBagLayout.setConstraints(credentialPanel, gridBagConstraints);
    contentPanel.add(credentialPanel);

    final JLabel rStsLabel = new JLabel("R-STS:");
    gridBagConstraints.gridx = 0;
    gridBagConstraints.gridy++;
    gridBagConstraints.gridwidth = 1;
    gridBagLayout.setConstraints(rStsLabel, gridBagConstraints);
    contentPanel.add(rStsLabel);

    final JTextField rStsTextField = new JTextField(
            "https://auth.beta.agiv.be/sts/Services/SalvadorSecurityTokenServiceConfiguration.svc/IWSTrust13",
            60);
    gridBagConstraints.gridx++;
    gridBagLayout.setConstraints(rStsTextField, gridBagConstraints);
    contentPanel.add(rStsTextField);

    JLabel serviceRealmLabel = new JLabel("Service realm:");
    gridBagConstraints.gridx = 0;
    gridBagConstraints.gridy++;
    gridBagLayout.setConstraints(serviceRealmLabel, gridBagConstraints);
    contentPanel.add(serviceRealmLabel);

    JTextField serviceRealmTextField = new JTextField(ClaimsAwareServiceFactory.SERVICE_REALM, 60);
    gridBagConstraints.gridx++;
    gridBagLayout.setConstraints(serviceRealmTextField, gridBagConstraints);
    contentPanel.add(serviceRealmTextField);

    JLabel urlLabel = new JLabel("Service URL:");
    gridBagConstraints.gridx = 0;
    gridBagConstraints.gridy++;
    gridBagLayout.setConstraints(urlLabel, gridBagConstraints);
    contentPanel.add(urlLabel);

    JTextField urlTextField = new JTextField(ClaimsAwareServiceFactory.SERVICE_LOCATION, 60);
    gridBagConstraints.gridx++;
    gridBagLayout.setConstraints(urlTextField, gridBagConstraints);
    contentPanel.add(urlTextField);

    final JCheckBox noWsPolicyCheckBox = new JCheckBox("WSDL without WS-Policy");
    gridBagConstraints.gridx = 0;
    gridBagConstraints.gridy++;
    gridBagConstraints.gridwidth = GridBagConstraints.REMAINDER;
    gridBagLayout.setConstraints(noWsPolicyCheckBox, gridBagConstraints);
    contentPanel.add(noWsPolicyCheckBox);

    final JCheckBox useWsSecureConversationCheckBox = new JCheckBox("Use WS-SecureConversation");
    gridBagConstraints.gridx = 0;
    gridBagConstraints.gridy++;
    gridBagConstraints.gridwidth = GridBagConstraints.REMAINDER;
    gridBagLayout.setConstraints(useWsSecureConversationCheckBox, gridBagConstraints);
    contentPanel.add(useWsSecureConversationCheckBox);

    final JCheckBox usePreviousSecurityCheckBox = new JCheckBox("Use previous AGIV Security");
    final JCheckBox cancelPreviousSecureConversationToken = new JCheckBox("Cancel previous conversation token");
    usePreviousSecurityCheckBox.setEnabled(null != this.agivSecurity);
    cancelPreviousSecureConversationToken.setEnabled(false);
    usePreviousSecurityCheckBox.addActionListener(new ActionListener() {

        public void actionPerformed(ActionEvent e) {
            LOG.debug("use previous security: " + usePreviousSecurityCheckBox.isSelected());
            boolean newSecurity = !usePreviousSecurityCheckBox.isSelected();
            ipStsLabel.setEnabled(newSecurity);
            ipStsTextField.setEditable(newSecurity);
            credentialPanel.setEnabled(newSecurity);
            rStsLabel.setEnabled(newSecurity);
            rStsTextField.setEnabled(newSecurity);
            cancelPreviousSecureConversationToken.setEnabled(!newSecurity);
        }
    });
    gridBagConstraints.gridx = 0;
    gridBagConstraints.gridy++;
    gridBagConstraints.gridwidth = GridBagConstraints.REMAINDER;
    gridBagLayout.setConstraints(usePreviousSecurityCheckBox, gridBagConstraints);
    contentPanel.add(usePreviousSecurityCheckBox);

    gridBagConstraints.gridx = 0;
    gridBagConstraints.gridy++;
    gridBagConstraints.gridwidth = GridBagConstraints.REMAINDER;
    gridBagLayout.setConstraints(cancelPreviousSecureConversationToken, gridBagConstraints);
    contentPanel.add(cancelPreviousSecureConversationToken);

    JPanel expiresPanel = new JPanel();
    gridBagConstraints.gridx = 0;
    gridBagConstraints.gridy++;
    gridBagConstraints.gridwidth = 2;
    gridBagLayout.setConstraints(expiresPanel, gridBagConstraints);
    contentPanel.add(expiresPanel);

    JLabel expiresLabelLabel = new JLabel("Secure conversation token expires:");
    expiresLabelLabel.setEnabled(null != this.agivSecurity);
    expiresPanel.add(expiresLabelLabel);

    JLabel expiresLabel = new JLabel();
    expiresLabel.setEnabled(null != this.agivSecurity);
    expiresPanel.add(expiresLabel);
    if (null != this.agivSecurity) {
        if (false == this.agivSecurity.getSecureConversationTokens().isEmpty()) {
            SecurityToken secureConversationToken = this.agivSecurity.getSecureConversationTokens().values()
                    .iterator().next();
            expiresLabel.setText(secureConversationToken.getExpires().toString());
        }
    }

    int dialogResult = JOptionPane.showConfirmDialog(this, contentPanel, "Claims Aware Service",
            JOptionPane.OK_CANCEL_OPTION);
    if (dialogResult == JOptionPane.CANCEL_OPTION) {
        return;
    }

    final String location = urlTextField.getText();
    final String serviceRealm = serviceRealmTextField.getText();
    final String ipStsLocation = ipStsTextField.getText();
    final String rStsLocation = rStsTextField.getText();
    final String username = credentialPanel.getUsername();
    final String password = credentialPanel.getPassword();
    final File pkcs12File = credentialPanel.getPKCS12File();
    final String realm = realmTextField.getText();

    ExecutorService executor = Executors.newFixedThreadPool(1);
    FutureTask<ArrayOfClaimInfo> futureTask = new FutureTask<ArrayOfClaimInfo>(
            new Callable<ArrayOfClaimInfo>() {

                public ArrayOfClaimInfo call() throws Exception {
                    Service service;
                    if (noWsPolicyCheckBox.isSelected()) {
                        service = ClaimsAwareServiceFactory.getInstanceNoWSPolicy();
                    } else {
                        service = ClaimsAwareServiceFactory.getInstance();
                    }
                    IService iservice = service.getWS2007FederationHttpBindingIService(new AddressingFeature());
                    BindingProvider bindingProvider = (BindingProvider) iservice;

                    if (false == usePreviousSecurityCheckBox.isSelected()) {
                        if (null != username) {
                            Main.this.agivSecurity = new AGIVSecurity(ipStsLocation, rStsLocation, realm,
                                    username, password);
                        } else {
                            Main.this.agivSecurity = new AGIVSecurity(ipStsLocation, rStsLocation, realm,
                                    pkcs12File, password);
                        }
                        Main.this.agivSecurity.addSTSListener(Main.this);
                        if (Main.this.proxyEnable) {
                            agivSecurity.setProxy(Main.this.proxyHost, Main.this.proxyPort,
                                    Main.this.proxyType);
                        }
                    }
                    if (cancelPreviousSecureConversationToken.isSelected()) {
                        Main.this.agivSecurity.cancelSecureConversationTokens();
                    }
                    Main.this.agivSecurity.enable(bindingProvider, location,
                            useWsSecureConversationCheckBox.isSelected(), serviceRealm);

                    ArrayOfClaimInfo result = iservice.getData(0);
                    return result;
                }
            }) {

        @Override
        protected void done() {
            try {
                ArrayOfClaimInfo result = get();
                List<ClaimInfo> claims = result.getClaimInfo();
                StringBuffer message = new StringBuffer();
                for (ClaimInfo claim : claims) {
                    message.append(claim.getName());
                    message.append(" = ");
                    message.append(claim.getValue());
                    message.append("\n");
                }

                JOptionPane.showMessageDialog(Main.this, message.toString(), "Claims Aware Service Result",
                        JOptionPane.INFORMATION_MESSAGE);
            } catch (final Exception e) {
                try {
                    SwingUtilities.invokeAndWait(new Runnable() {

                        public void run() {
                            Main.this.statusBar.setErrorStatus(e.getMessage());
                        }
                    });
                } catch (Exception e1) {
                }
                showException(e);
            }
        }
    };
    executor.execute(futureTask);
}

From source file:org.dllearner.scripts.evaluation.EnrichmentEvaluationMultithreaded.java

private void evaluateClasses(final SparqlEndpointKS ks)
        throws IllegalArgumentException, SecurityException, InstantiationException, IllegalAccessException,
        InvocationTargetException, NoSuchMethodException, ComponentInitException, InterruptedException {
    Set<NamedClass> classes = new SPARQLTasks(ks.getEndpoint()).getAllClasses();
    logger.info("Evaluating " + classes.size() + " classes...");
    for (final Class<? extends LearningAlgorithm> algorithmClass : classAlgorithms) {
        ExecutorService threadPool = null;
        if (algorithmClass == CELOE.class) {

        } else {/*from  www  .  ja  v a 2  s .  c  o  m*/
            threadPool = Executors.newFixedThreadPool(maxNrOfThreads);
        }
        int classesCnt = 0;
        Thread.sleep(5000);

        Set<OWLAxiom> axioms = new HashSet<OWLAxiom>();
        algorithm2Ontology.put(algorithmClass, axioms);

        for (final NamedClass cls : classes) {
            try {
                String algName = "";
                if (algorithmClass == CELOE.class) {
                    algName = CELOE.class.getAnnotation(ComponentAnn.class).name();
                } else {
                    LearningAlgorithm learner = algorithmClass.getConstructor(SparqlEndpointKS.class)
                            .newInstance(ks);
                    algName = AnnComponentManager.getName(learner);
                }
                List<EvaluatedAxiom> learnedAxioms = new ArrayList<EvaluatedAxiom>();
                boolean emptyEntity = sparqlReasoner.getPopularity(cls) == 0;
                if (emptyEntity) {
                    logger.warn("Empty entity: " + cls);
                    writeToDB(cls.toManchesterSyntaxString(baseURI, prefixes), algName, "EMPTY_ENTITY", 0, 0,
                            false);
                } else {
                    long startTime = System.currentTimeMillis();
                    boolean timeout = false;
                    if (algorithmClass == CELOE.class) {
                        logger.info("Applying " + algName + " on " + cls + " ... ");
                        learnedAxioms = applyCELOE(ks, cls, false);
                        long runTime = System.currentTimeMillis() - startTime;
                        if (timeout && learnedAxioms.isEmpty()) {
                            writeToDB(cls.toManchesterSyntaxString(baseURI, prefixes), algName, "TIMEOUT", 0,
                                    runTime, false);
                        } else if (learnedAxioms == null || learnedAxioms.isEmpty()) {
                            writeToDB(cls.toManchesterSyntaxString(baseURI, prefixes), algName, "NULL", 0,
                                    runTime, false);
                        } else {
                            for (EvaluatedAxiom learnedAxiom : learnedAxioms) {
                                double score = learnedAxiom.getScore().getAccuracy();
                                if (Double.isNaN(score)) {
                                    score = -1;
                                }
                                writeToDB(cls.toManchesterSyntaxString(baseURI, prefixes).toString(), algName,
                                        learnedAxiom.getAxiom().toManchesterSyntaxString(baseURI, prefixes),
                                        score, runTime, isEntailed(learnedAxiom));
                            }
                        }
                    } else {
                        threadPool.execute(new Runnable() {

                            @Override
                            public void run() {
                                String algName = "";
                                try {
                                    LearningAlgorithm learner = algorithmClass
                                            .getConstructor(SparqlEndpointKS.class).newInstance(ks);
                                    algName = AnnComponentManager.getName(learner);
                                    ((AbstractAxiomLearningAlgorithm) learner).setReasoner(sparqlReasoner);
                                    ConfigHelper.configure(learner, "classToDescribe", cls.toString());
                                    ConfigHelper.configure(learner, "maxExecutionTimeInSeconds",
                                            maxExecutionTimeInSeconds);
                                    learner.init();
                                    applyLearningAlgorithm((AxiomLearningAlgorithm) learner, cls);
                                } catch (Exception e) {
                                    logger.error("Error occured for class " + cls.getName() + " with algorithm "
                                            + algName, e);
                                }
                            }
                        });

                    }

                }

                classesCnt++;
                if (maxClasses != 0 && classesCnt == maxClasses) {
                    break;
                }

            } catch (Exception e) {
                logger.error("Error occured for class " + cls.getName(), e);
            }
        }
        if (algorithmClass != CELOE.class) {
            threadPool.shutdown();
            while (!threadPool.isTerminated()) {

            }
        }
    }
}

From source file:org.apache.hadoop.raid.RaidShell.java

/**
 * checks the raided file system, prints a list of corrupt files to
 * this.out and returns the number of corrupt files.
 * Also prints out the total number of files with at least one missing block.
 * When called with '-retNumStrpsMissingBlks', also prints out number of stripes
 * with certain number of blocks missing for files using the 'RS' codec. 
 *//*from ww  w  .ja  v a  2 s . c  om*/
public void fsck(String cmd, String[] args, int startIndex) throws IOException {
    final int numFsckArgs = args.length - startIndex;
    int numThreads = 16;
    String path = "/";
    boolean argsOk = false;
    boolean countOnly = false;
    boolean cntMissingBlksPerStrp = false;
    boolean listRecoverableFile = false;
    if (numFsckArgs >= 1) {
        argsOk = true;
        path = args[startIndex];
    }
    for (int i = startIndex + 1; i < args.length; i++) {
        if (args[i].equals("-threads")) {
            numThreads = Integer.parseInt(args[++i]);
        } else if (args[i].equals("-count")) {
            countOnly = true;
        } else if (args[i].equals("-retNumStrpsMissingBlks")) {
            cntMissingBlksPerStrp = true;
        } else if (args[i].equals("-listrecoverablefiles")) {
            listRecoverableFile = true;
        }
    }
    if (!argsOk) {
        printUsage(cmd);
        return;
    }
    final String dateString = dateFormat.format(new Date());
    ;
    System.err
            .println("Running RAID FSCK with " + numThreads + " threads on " + path + " at time " + dateString);

    FileSystem fs = (new Path(path)).getFileSystem(conf);

    // if we got a raid fs, get the underlying fs 
    if (fs instanceof DistributedRaidFileSystem) {
        fs = ((DistributedRaidFileSystem) fs).getFileSystem();
    }

    // check that we have a distributed fs
    if (!(fs instanceof DistributedFileSystem)) {
        throw new IOException("expected DistributedFileSystem but got " + fs.getClass().getName());
    }
    final DistributedFileSystem dfs = (DistributedFileSystem) fs;

    // get a list of corrupted files (not considering parity blocks just yet)
    // from the name node
    // these are the only files we need to consider:
    // if a file has no corrupted data blocks, it is OK even if some
    // of its parity blocks are corrupted, so no further checking is
    // necessary
    System.err.println("Querying NameNode for list of corrupt files under " + path);
    final String[] files = DFSUtil.getCorruptFiles(dfs, path);
    final List<String> corruptFileCandidates = new LinkedList<String>();
    for (final String f : files) {
        // if this file is a parity file
        // or if it does not start with the specified path,
        // ignore it
        boolean matched = false;
        for (Codec c : Codec.getCodecs()) {
            if (f.startsWith(c.getParityPrefix())) {
                matched = true;
            }
        }
        if (!matched) {
            corruptFileCandidates.add(f);
        }
    }
    // filter files marked for deletion
    RaidUtils.filterTrash(conf, corruptFileCandidates);

    //clear numStrpMissingBlks if missing blocks per stripe is to be counted
    if (cntMissingBlksPerStrp) {
        for (AtomicLongArray numStrpMissingBlks : numStrpMissingBlksMap.values()) {
            for (int i = 0; i < numStrpMissingBlks.length(); i++) {
                numStrpMissingBlks.set(i, 0);
            }
        }
    }
    System.err.println("Processing " + corruptFileCandidates.size() + " possibly corrupt files using "
            + numThreads + " threads");
    ExecutorService executor = null;
    ThreadFactory factory = new ThreadFactory() {
        final AtomicInteger tnum = new AtomicInteger();

        public Thread newThread(Runnable r) {
            Thread t = new Thread(r);
            t.setName("Raidfsck-" + dateString + "-" + tnum.incrementAndGet());
            return t;
        }
    };
    if (numThreads > 1) {
        executor = Executors.newFixedThreadPool(numThreads, factory);
    } else {
        numThreads = 1;
    }
    final List<String> unRecoverableFiles = Collections.synchronizedList(new LinkedList<String>());
    final List<String> recoverableFiles = Collections.synchronizedList(new LinkedList<String>());
    final boolean finalCountOnly = countOnly;
    final boolean finalMissingBlksPerStrpCnt = cntMissingBlksPerStrp;
    final boolean finalListRecoverableFile = listRecoverableFile;
    final int step = numThreads;
    final AtomicInteger finishNum = new AtomicInteger(0);
    for (int i = 0; i < numThreads; i++) {
        if (!dfs.getClient().isOpen()) {
            throw new IOException("Filesystem closed.");
        }
        final int startIdx = i;
        Runnable work = new Runnable() {
            public void run() {
                try {
                    for (int idx = startIdx; idx < corruptFileCandidates.size(); idx += step) {
                        String corruptFileCandidate = corruptFileCandidates.get(idx);
                        boolean corrupt = false;
                        try {
                            FileStatus corruptStat;
                            try {
                                corruptStat = dfs.getFileStatus(new Path(corruptFileCandidate));
                            } catch (FileNotFoundException fnfe) {
                                continue;
                            }
                            if (!dfs.getClient().isOpen()) {
                                LOG.warn("Filesystem closed.");
                                return;
                            }
                            corrupt = isFileCorrupt(dfs, corruptStat, finalMissingBlksPerStrpCnt);
                            if (corrupt) {
                                incrCorruptCount();
                                if (!finalCountOnly && !finalListRecoverableFile) {
                                    unRecoverableFiles.add(corruptFileCandidate);
                                }
                            } else {
                                if (!finalCountOnly && finalListRecoverableFile) {
                                    recoverableFiles.add(corruptFileCandidate);
                                }
                            }
                        } catch (Throwable e) {
                            LOG.error("Error in processing " + corruptFileCandidate, e);
                        }
                    }
                } finally {
                    finishNum.incrementAndGet();
                }
            }
        };
        if (executor != null) {
            executor.execute(work);
        } else {
            work.run();
        }
    }
    if (executor != null) {
        try {
            while (finishNum.get() < numThreads) {
                try {
                    Thread.sleep(2000);
                } catch (InterruptedException ie) {
                    LOG.warn("Raidfsck get exception ", ie);
                    throw new IOException(ie);
                }
            }
        } finally {
            executor.shutdown(); // Waits for submitted tasks to finish.
        }
    }

    // If client is closed, fail the fsck check.
    if (!dfs.getClient().isOpen()) {
        throw new IOException("Filesystem closed.");
    }

    if (countOnly) {
        //Number of corrupt files (which cannot be fixed by Raid)
        out.println(getCorruptCount());
        LOG.info("Nubmer of corrupt files:" + getCorruptCount());
        //Number of files with at least one missing block
        out.println(corruptFileCandidates.size());
        LOG.info("Number of files with at least one block missing/corrupt: " + corruptFileCandidates.size());
    } else {
        if (listRecoverableFile) {
            for (String file : recoverableFiles) {
                out.println(file);
            }
        } else {
            for (String file : unRecoverableFiles) {
                out.println(file);
            }
        }
    }

    /*Number of stripes with missing blocks array, separated by each code id:
     * Number of missing blocks found from non-raided files.
     * codeId1
     * index 0: Number of stripes found with one block missing in this fsck
     * index 1: Number of stripes found with two block missing in this fsck
     * and so on
     * codeId2
     * index 0: Number of stripes found with one block missing in this fsck
     * index 1: Number of stripes found with two block missing in this fsck
     * and so on
     */
    if (cntMissingBlksPerStrp) {
        out.println(this.numNonRaidedMissingBlks);
        for (String codecId : numStrpMissingBlksMap.keySet()) {
            out.println(codecId);
            AtomicLongArray numStrpMissingBlks = numStrpMissingBlksMap.get(codecId);
            for (int j = 0; j < numStrpMissingBlks.length(); j++) {
                long temp = numStrpMissingBlks.get(j);
                out.println(temp);
                LOG.info("Number of stripes with missing blocks at index " + j + " is " + temp);
            }
        }
    }
}

From source file:edu.cmu.tetrad.search.TestIndTestConditionalCorrelation.java

public void test8() {
    int NTHREDS = 100;
    long start = System.currentTimeMillis();

    ExecutorService executor = Executors.newFixedThreadPool(NTHREDS);
    for (int i = 0; i < 5000; i++) {
        Runnable worker = new MyRunnable(10000000L + i);
        executor.execute(worker);
    }//  ww  w.ja v  a 2s  .c o  m
    // This will make the executor accept no new threads
    // and finish all existing threads in the queue
    //        executor.shutdown();
    try {
        // Wait until all threads are finish
        executor.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
        System.out.println("Finished all threads");
    } catch (InterruptedException e) {
        e.printStackTrace();
    }

    long stop = System.currentTimeMillis();

    System.out.println((stop - start) + " ms");
}

From source file:structuredPredictionNLG.SFX.java

/**
 * Infers the feature/cost vectors for the content and word actions.
 * @return The feature/cost vectors for the content and word actions
 *//*from www  .  j  a v a2s  .c o m*/
public Object[] inferFeatureAndCostVectors() {
    ConcurrentHashMap<DatasetInstance, HashMap<String, ArrayList<Instance>>> contentTrainingData = new ConcurrentHashMap<>();
    ConcurrentHashMap<DatasetInstance, HashMap<String, HashMap<String, ArrayList<Instance>>>> wordTrainingData = new ConcurrentHashMap<>();

    if (!getAvailableWordActions().isEmpty() && !getPredicates().isEmpty()) {
        // Initialize collections
        getTrainingData().stream().map((di) -> {
            contentTrainingData.put(di, new HashMap<String, ArrayList<Instance>>());
            return di;
        }).map((di) -> {
            wordTrainingData.put(di, new HashMap<String, HashMap<String, ArrayList<Instance>>>());
            return di;
        }).forEachOrdered((di) -> {
            getPredicates().stream().map((predicate) -> {
                contentTrainingData.get(di).put(predicate, new ArrayList<Instance>());
                return predicate;
            }).map((predicate) -> {
                wordTrainingData.get(di).put(predicate, new HashMap<String, ArrayList<Instance>>());
                return predicate;
            }).forEachOrdered((predicate) -> {
                getAttributes().get(predicate).stream().filter(
                        (attribute) -> (!wordTrainingData.get(di).get(predicate).containsKey(attribute)))
                        .forEachOrdered((attribute) -> {
                            wordTrainingData.get(di).get(predicate).put(attribute, new ArrayList<Instance>());
                        });
            });
        });

        // Infer the vectors in parallel processes to save time
        ExecutorService executor = Executors.newFixedThreadPool(THREAD_COUNT);
        getTrainingData().forEach((di) -> {
            executor.execute(new InferSFXVectorsThread(di, this, contentTrainingData, wordTrainingData));
        });
        executor.shutdown();
        while (!executor.isTerminated()) {
        }
    }
    Object[] results = new Object[2];
    results[0] = contentTrainingData;
    results[1] = wordTrainingData;
    return results;
}