Example usage for java.util.concurrent ThreadPoolExecutor execute

List of usage examples for java.util.concurrent ThreadPoolExecutor execute

Introduction

In this page you can find the example usage for java.util.concurrent ThreadPoolExecutor execute.

Prototype

public void execute(Runnable command) 

Source Link

Document

Executes the given task sometime in the future.

Usage

From source file:org.trnltk.apps.morphology.contextless.parser.FolderContextlessMorphologicParsingApp.java

@App
public void parse8MWords_withOfflineAnalysis() throws Exception {
    final File folder = new File("D:\\devl\\data\\1MSentences");

    final List<File> files = new ArrayList<File>();

    for (File file : folder.listFiles()) {
        if (file.getName().endsWith("_tokenized.txt"))
            files.add(file);//from   ww  w  .  j av a 2s  . co  m
    }

    final ThreadPoolExecutor pool = (ThreadPoolExecutor) Executors.newFixedThreadPool(NUMBER_OF_THREADS);

    final StopWatch stopWatch = new StopWatch();
    stopWatch.start();

    for (File file : files) {
        final File targetFile = new File(file.getParent(),
                file.getName().substring(0, file.getName().length() - "_tokenized.txt".length())
                        + "_parsed.txt");
        final FileParseCommand fileParseCommand = new FileParseCommand(contextlessMorphologicParser, file,
                targetFile, false);
        pool.execute(fileParseCommand);
    }

    pool.shutdown();
    while (!pool.isTerminated()) {
        System.out.println("Waiting pool to be terminated!");
        pool.awaitTermination(1000, TimeUnit.MILLISECONDS);
    }

    stopWatch.stop();

    System.out.println("Total time :" + stopWatch.toString());
}

From source file:org.trnltk.apps.tokenizer.TextTokenizerCorpusApp.java

@App("Creates tokenized files")
public void tokenizeBig_files_onSource() throws IOException, InterruptedException {
    final StopWatch taskStopWatch = new StopWatch();
    taskStopWatch.start();//  ww w.j  av a2 s. c o m

    final File parentFolder = new File("D:\\devl\\data\\aakindan");
    final File sourceFolder = new File(parentFolder, "src_split");
    final File targetFolder = new File(parentFolder, "src_split_tokenized");
    final File errorFolder = new File(parentFolder, "src_split_tokenization_error");
    final File[] files = sourceFolder.listFiles();
    Validate.notNull(files);

    final List<File> filesToTokenize = new ArrayList<File>();
    for (File file : files) {
        if (file.isDirectory())
            continue;

        filesToTokenize.add(file);
    }

    int lineCountOfAllFiles = 0;
    for (File file : filesToTokenize) {
        lineCountOfAllFiles += Utilities.lineCount(file);
    }

    System.out.println("Total lines in all files " + lineCountOfAllFiles);

    final StopWatch callbackStopWatch = new StopWatch();
    final TokenizationCommandCallback callback = new TokenizationCommandCallback(lineCountOfAllFiles,
            callbackStopWatch);

    int NUMBER_OF_THREADS = 8;
    final ThreadPoolExecutor pool = (ThreadPoolExecutor) Executors.newFixedThreadPool(NUMBER_OF_THREADS);

    callbackStopWatch.start();
    for (File sourceFile : filesToTokenize) {
        final String fileBaseName = sourceFile.getName().substring(0,
                sourceFile.getName().length() - ".txt.0000".length());
        final String index = FilenameUtils.getExtension(sourceFile.getName());
        final File targetFile = new File(targetFolder, fileBaseName + "_tokenized.txt." + index);
        final File errorFile = new File(errorFolder, fileBaseName + "_tokenization_error.txt." + index);

        pool.execute(
                new TokenizationCommand(callback, fastRelaxedTokenizer, sourceFile, targetFile, errorFile));
    }

    pool.shutdown();
    while (!pool.isTerminated()) {
        //            System.out.println("Waiting pool to be terminated!");
        pool.awaitTermination(3000, TimeUnit.MILLISECONDS);
    }

    callbackStopWatch.stop();
    taskStopWatch.stop();
    System.out.println("Total time :" + taskStopWatch.toString());
    System.out.println("Nr of tokens : " + callback.getNumberOfTokens());
    System.out.println(
            "Avg time : " + (taskStopWatch.getTime() * 1.0d) / (callback.getNumberOfTokens() * 1.0d) + " ms");
}

From source file:org.trnltk.apps.tokenizer.TextTokenizerCorpusApp.java

@App("Creates tokenized files")
public void convertTokensToLines_Big_files_onSource() throws IOException, InterruptedException {
    final StopWatch taskStopWatch = new StopWatch();
    taskStopWatch.start();/*from  ww  w  .j a  va 2s . c o m*/

    final File parentFolder = new File("D:\\devl\\data\\aakindan");
    final File sourceFolder = new File(parentFolder, "src_split_tokenized");
    final File targetFolder = new File(parentFolder, "src_split_tokenized_lines");
    final File[] files = sourceFolder.listFiles();
    Validate.notNull(files);

    final List<File> filesToTokenize = new ArrayList<File>();
    for (File file : files) {
        if (file.isDirectory())
            continue;

        filesToTokenize.add(file);
    }

    final StopWatch callbackStopWatch = new StopWatch();

    int NUMBER_OF_THREADS = 8;
    final ThreadPoolExecutor pool = (ThreadPoolExecutor) Executors.newFixedThreadPool(NUMBER_OF_THREADS);

    callbackStopWatch.start();
    for (final File sourceFile : filesToTokenize) {
        final File targetFile = new File(targetFolder, sourceFile.getName());
        pool.execute(new Runnable() {
            @Override
            public void run() {
                System.out.println("Processing file " + sourceFile);
                BufferedWriter writer = null;
                try {
                    final List<String> lines = Files.readLines(sourceFile, Charsets.UTF_8);
                    writer = Files.newWriter(targetFile, Charsets.UTF_8);
                    for (String line : lines) {
                        final Iterable<String> tokens = Splitter.on(' ').omitEmptyStrings().trimResults()
                                .split(line);
                        for (String token : tokens) {
                            writer.write(token);
                            writer.write("\n");
                        }
                    }
                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    if (writer != null)
                        try {
                            writer.close();
                        } catch (IOException e) {
                            e.printStackTrace();
                        }
                }
            }
        });
    }

    pool.shutdown();
    while (!pool.isTerminated()) {
        //            System.out.println("Waiting pool to be terminated!");
        pool.awaitTermination(3000, TimeUnit.MILLISECONDS);
    }

    callbackStopWatch.stop();
    taskStopWatch.stop();
    System.out.println("Total time :" + taskStopWatch.toString());
}

From source file:org.trnltk.apps.tokenizer.TextTokenizerCorpusApp.java

@App("Creates tokenized files")
public void findUniqueChars_Big_files_onSource() throws IOException, InterruptedException {
    final StopWatch taskStopWatch = new StopWatch();
    taskStopWatch.start();// ww w  .  j ava  2s  . c o m

    final File parentFolder = new File("D:\\devl\\data\\aakindan");
    final File targetFile = new File(parentFolder, "chars_with_occurrence.txt");
    final File sourceFolder = new File(parentFolder, "src_split_tokenized_lines");
    final File[] files = sourceFolder.listFiles();
    Validate.notNull(files);

    final List<File> filesToInvestigate = new ArrayList<File>();
    for (File file : files) {
        if (file.isDirectory())
            continue;

        filesToInvestigate.add(file);
    }

    final StopWatch callbackStopWatch = new StopWatch();

    int NUMBER_OF_THREADS = 8;
    final ThreadPoolExecutor pool = (ThreadPoolExecutor) Executors.newFixedThreadPool(NUMBER_OF_THREADS);
    final boolean[] b = new boolean[65536 * 5];

    callbackStopWatch.start();
    for (final File sourceFile : filesToInvestigate) {
        pool.execute(new Runnable() {
            @Override
            public void run() {
                System.out.println("Processing file " + sourceFile);
                try {
                    final List<String> lines = Files.readLines(sourceFile, Charsets.UTF_8);
                    for (String token : lines) {
                        for (int i = 0; i < token.length(); i++) {
                            char aChar = token.charAt(i);
                            b[aChar] = true;
                        }
                    }
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
        });
    }

    pool.shutdown();
    while (!pool.isTerminated()) {
        //            System.out.println("Waiting pool to be terminated!");
        pool.awaitTermination(3000, TimeUnit.MILLISECONDS);
    }

    final BufferedWriter writer = Files.newWriter(targetFile, Charsets.UTF_8);
    for (int i = 0; i < b.length; i++) {
        boolean occurs = b[i];
        if (occurs) {
            writer.write((char) i);
            writer.write("\n");
        }
    }
    writer.close();

    callbackStopWatch.stop();
    taskStopWatch.stop();
    System.out.println("Total time :" + taskStopWatch.toString());
}

From source file:org.trnltk.apps.tokenizer.UniqueWordFinderApp.java

@App("Goes thru tokenized files, finds unique words")
public void findWordHistogram() throws InterruptedException {
    final StopWatch taskStopWatch = new StopWatch();
    taskStopWatch.start();/*from  w  w w  . ja  v  a  2 s.c  om*/

    final File parentFolder = new File("D:\\devl\\data\\aakindan");
    final File sourceFolder = new File(parentFolder, "src_split_tokenized");
    final File[] files = sourceFolder.listFiles();
    Validate.notNull(files);

    final List<File> filesToRead = new ArrayList<File>();
    for (File file : files) {
        if (file.isDirectory())
            continue;

        filesToRead.add(file);
    }

    int NUMBER_OF_THREADS = 8;
    final ThreadPoolExecutor pool = (ThreadPoolExecutor) Executors.newFixedThreadPool(NUMBER_OF_THREADS);
    Map[] countMaps = new Map[NUMBER_OF_THREADS];
    for (int i = 0; i < countMaps.length; i++) {
        countMaps[i] = new HashMap(1000000);
    }

    for (int i = 0; i < filesToRead.size(); i++) {
        File file = filesToRead.get(i);
        //noinspection unchecked
        pool.execute(new HistogramCommand(countMaps[i % NUMBER_OF_THREADS], file));
    }

    pool.shutdown();
    while (!pool.isTerminated()) {
        //System.out.println("Waiting pool to be terminated!");
        pool.awaitTermination(3000, TimeUnit.MILLISECONDS);
    }

    System.out.println("Merging countMaps");
    final HashMap<String, Integer> mergeMap = new HashMap<String, Integer>(
            countMaps[0].size() * NUMBER_OF_THREADS); //approx
    for (Map<String, Integer> countMap : countMaps) {
        for (Map.Entry<String, Integer> stringIntegerEntry : countMap.entrySet()) {
            final String surface = stringIntegerEntry.getKey();
            final Integer newCount = stringIntegerEntry.getValue();
            final Integer existingCount = mergeMap.get(surface);
            if (existingCount == null)
                mergeMap.put(surface, newCount);
            else
                mergeMap.put(surface, existingCount + newCount);
        }
    }

    System.out.println("Sorting mergeMaps");
    final Map<String, Integer> sortedMergeMap = new TreeMap<String, Integer>(new Comparator<String>() {
        @Override
        public int compare(String a, String b) {
            Integer x = mergeMap.get(a);
            Integer y = mergeMap.get(b);
            if (x.equals(y)) {
                return a.compareTo(b);
            }
            return y.compareTo(x);
        }
    });

    sortedMergeMap.putAll(mergeMap);

    System.out.println("Writing to file");
    int numberOfTokens = 0;
    final File outputFile = new File(parentFolder, "wordHistogram.txt");
    BufferedWriter bufferedWriter = null;
    try {
        bufferedWriter = Files.newWriter(outputFile, Charsets.UTF_8);
        for (Map.Entry<String, Integer> entry : sortedMergeMap.entrySet()) {
            numberOfTokens += entry.getValue();
            bufferedWriter.write(entry.getKey() + " " + entry.getValue() + "\n");
        }
    } catch (IOException e) {
        e.printStackTrace();
    } finally {
        if (bufferedWriter != null)
            try {
                bufferedWriter.close();
            } catch (IOException e) {
                System.err.println("Unable to close file ");
                e.printStackTrace();
            }
    }

    taskStopWatch.stop();

    System.out.println("Total time :" + taskStopWatch.toString());
    System.out.println("Nr of tokens : " + numberOfTokens);
    System.out.println("Nr of unique tokens : " + sortedMergeMap.size());
}

From source file:test.other.T_encrypt_password.java

@Test
public void testxx() {

    final Random random = new Random();

    final int pow = 15;
    final int loopNum = 30;
    final int taskNum = 10;

    class MyTask implements Runnable {

        public void run() {

            final Long[] elapsed = new Long[loopNum];

            for (int i = 0; i != loopNum; ++i) {

                String[] safes = new String[1 << pow];
                elapsed[i] = 0L;//from www  . ja  v a2 s.co  m

                for (int k = 0; k < 1 << pow; ++k) {
                    safes[i] = getRandomString(random.nextInt(50));
                    //                  assert (safes[i] != null);
                }

                long t1 = System.nanoTime();
                for (int j = 0; j < 1 << pow; j++) {
                    String tmp = safes[i];
                    // assert(safes[i] != null) : " index:" + j;
                    safes[i] = EncryptionService.transformPassword(tmp);
                }
                long t2 = System.nanoTime();

                elapsed[i] += t2 - t1;
            }
            long total = 0;
            for (Long long1 : elapsed) {
                total += long1;
            }
            System.out.println(total); //  return value by stdOut!
        }
    }

    //      Thread thread = new Thread(new MyTask());
    //      thread.run();
    ThreadPoolExecutor pool = new ThreadPoolExecutor(2, 16, 10, TimeUnit.SECONDS,
            new SynchronousQueue<Runnable>(), new ThreadPoolExecutor.CallerRunsPolicy());

    for (int i = 0; i < taskNum; ++i) {
        pool.execute(new MyTask());
    }

}