Example usage for java.util.concurrent ForkJoinPool execute

List of usage examples for java.util.concurrent ForkJoinPool execute

Introduction

In this page you can find the example usage for java.util.concurrent ForkJoinPool execute.

Prototype

public void execute(Runnable task) 

Source Link

Usage

From source file:com.hygenics.parser.SpecifiedDump.java

/**
 * Runs the Dump//  ww w .  jav a 2 s.co m
 */
public void run() {

    if (archive) {
        if (tables.keySet().size() > 0) {

            Archiver zip = new Archiver();
            String basefile = tables.keySet().iterator().next().split("\\|")[1];

            if (basefile.trim().length() > 0) {
                zip.setBasedirectory(basefile);
                zip.setZipDirectory(basefile + "archive.zip");
                zip.setAvoidanceString(".zip|archive");
                zip.setDelFiles(true);
                zip.run();
            }
        }
    }

    int dumped = 0;
    ForkJoinPool fjp = new ForkJoinPool(Runtime.getRuntime().availableProcessors());
    boolean checkedTables = (this.tablesMustHave == null);
    for (String tf : tables.keySet()) {
        String[] split = tf.split("\\|");
        log.info("Dumping for " + split[0]);
        String schema = null;
        try {
            schema = split[0].split("\\.")[0];

            if (!checkedTables) {
                ArrayList<String> mustHaveTemp = (ArrayList<String>) this.tablesMustHave.clone();
                ArrayList<String> existingTables = this.template.getJsonData(
                        "SELECT table_name FROM information_schema.tables WHERE table_schema ILIKE '%" + schema
                                + "%'");
                for (String tdict : existingTables) {
                    String table = JsonObject.readFrom(tdict).get("table_name").asString();
                    if (mustHaveTemp.contains(table)) {
                        mustHaveTemp.remove(table);

                        // get count
                        if (this.template.getCount(schema + "." + table) == 0) {
                            try {
                                throw new MissingData(
                                        "Data Missing from Required Table: " + schema + "." + table);
                            } catch (MissingData e) {
                                e.printStackTrace();
                            }
                        }
                    }
                }

                if (mustHaveTemp.size() > 0) {
                    log.error("Drop Schema " + schema + "  is missing the following tables:\n");
                    for (String table : mustHaveTemp) {
                        log.error(table + "\n");
                    }

                    try {
                        throw new TableMissingException();
                    } catch (TableMissingException e) {
                        e.printStackTrace();
                        System.exit(-1);
                    }
                }

            }

        } catch (IndexOutOfBoundsException e) {
            try {
                throw new SQLMalformedException("FATAL ERROR: Table name " + split[0] + " malformed");
            } catch (SQLMalformedException e2) {
                e2.printStackTrace();
                System.exit(-1);
            }
        }

        log.info("Checking  table: " + split[0] + "&& schema: " + schema);
        if (template.checkTable(split[0], schema)) {
            if (template.getCount(schema + "." + split[0].replace(schema + ".", "")) > 0) {
                Set<String> keys = tables.get(tf).keySet();
                String sql;
                String select = "SELECT ";
                String distinct = null;
                String attrs = null;
                String where = null;
                String group = null;
                String order = null;

                /**
                 * SET THE ATTRIBUTES WHICH CAN BE SPECIFIED WITH
                 * distinct-for concacting distinct part of query not0-for
                 * specifiying that the length must be greater than 0 in the
                 * WHERE clause group-for grouping the attribute not
                 * null-for specifying that the attr cannot be null
                 * orderby-for specifying our one order attr
                 */
                for (String k : keys) {
                    if (k.toLowerCase().contains("distinct")) {
                        distinct = (distinct == null)
                                ? "distinct on(" + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                : distinct + "," + tables.get(tf).get(k).replaceAll("\\sas.*", "");
                    }

                    if (k.toLowerCase().contains("group")) {
                        group = (group == null) ? "GROUP BY " + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                : group + "," + tables.get(tf).get(k).replaceAll("\\sas.*", "");
                    }

                    if (k.toLowerCase().contains("not0")) {
                        if (k.contains("not0OR")) {
                            where = (where == null)
                                    ? "WHERE length(" + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + ") >0 "
                                    : where + "OR length(" + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + ")";
                        } else {
                            where = (where == null)
                                    ? "WHERE length(" + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + ") >0 "
                                    : where + "AND length(" + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + ")";
                        }
                    }

                    if (k.toLowerCase().contains("notnull")) {
                        if (k.toLowerCase().contains("notnullor")) {
                            where = (where == null)
                                    ? "WHERE " + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + " IS NOT NULL"
                                    : where + " OR " + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + " IS NOT NULL";
                        } else {
                            where = (where == null)
                                    ? "WHERE " + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + " IS NOT NULL"
                                    : where + " AND " + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + " IS NOT NULL";
                        }
                    }

                    if (k.toLowerCase().contains("order")) {
                        if (k.toLowerCase().contains("orderdesc")) {
                            order = (order == null)
                                    ? "ORDER BY " + tables.get(tf).get(k).replaceAll("\\sas.*", "") + " ASC"
                                    : order;
                        } else {
                            order = (order == null)
                                    ? "ORDER BY " + tables.get(tf).get(k).replaceAll("\\sas.*", "") + " DESC"
                                    : order;
                        }
                    }

                    String field = tables.get(tf).get(k);
                    if (k.toLowerCase().contains("attr")) {
                        if (unicoderemove == true) {
                            field = "trim(replace(regexp_replace(" + field
                                    + ",'[^\\u0020-\\u007e,\\(\\);\\-\\[\\]]+',' '),'" + this.delimiter + "','"
                                    + this.replacedel + "')) as " + field;
                        } else {
                            field = "trim(replace(" + field + ",'" + this.delimiter + "','" + this.replacedel
                                    + "'))";
                        }

                        attrs = (attrs == null) ? field : attrs + "," + field;
                    }
                }

                select = (distinct == null) ? select : select.trim() + " " + distinct.trim() + ")";
                select += " " + attrs.trim();
                select += " FROM " + split[0].trim();
                select = (where == null) ? select : select.trim() + " " + where.trim();
                select = (group == null) ? select : select.trim() + " " + group.trim();
                select = (order == null) ? select : select.trim() + " " + order.trim();

                if (extracondition != null) {
                    select += (select.contains(" WHERE ") == true) ? " AND" + extracondition
                            : " WHERE " + extracondition;
                }

                select = select.trim();

                log.info("Dump Select Command: " + select);

                sql = "COPY  (" + select + ") TO STDOUT WITH DELIMITER '" + delimiter.trim()
                        + "' NULL as '' CSV HEADER";
                fjp.execute(new ToFile(sql, split[1].trim()));

                select = "SELECT ";
                distinct = null;
                attrs = null;
                where = null;
                group = null;
                order = null;
                dumped += 1;
            } else {
                try {
                    throw new NoDataException("No Data found in " + split[0]);
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        } else {
            try {
                throw new SQLMalformedException("WARNING: Table " + split[0] + " is missing");
            } catch (SQLMalformedException e) {
                e.printStackTrace();
            }
        }
    }

    try {
        fjp.awaitTermination(60000, TimeUnit.MILLISECONDS);
        fjp.shutdown();
    } catch (InterruptedException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }

    if (dumped == 0) {
        log.error("No Date found in any tables");
        System.exit(-1);
    }

}

From source file:com.hygenics.parser.SpecDumpWithReference.java

/**
 * Runs the Dump/*from  ww  w .  ja v a  2s.c om*/
 */
public void run() {

    if (archive) {
        Archiver zip = new Archiver();
        String[] barr = baseFile.split("\\/");
        String basefile = "";
        for (int i = 0; i > barr.length - 1; i++) {
            basefile += (i == 0) ? barr[i] : "/" + barr[i];
        }
        if (basefile.trim().length() > 0) {
            zip.setBasedirectory(basefile);
            zip.setZipDirectory(basefile + "archive.zip");
            zip.setAvoidanceString(".zip|archive");
            zip.setDelFiles(true);
            zip.run();
        }
    }

    int dumped = 0;
    log.info("Tables Found: " + tables.size());
    ForkJoinPool fjp = new ForkJoinPool(Runtime.getRuntime().availableProcessors());
    boolean checkedTables = (this.tablesMustHave == null);
    for (String tf : tables.keySet()) {
        String[] split = (this.baseschema + "." + tf + "|" + this.baseFile + tf).split("\\|");
        log.info("Dumping for " + split[0]);
        String schema = null;
        try {
            schema = split[0].split("\\.")[0];

            if (!checkedTables) {
                ArrayList<String> mustHaveTemp = (ArrayList<String>) this.tablesMustHave.clone();
                ArrayList<String> existingTables = this.template.getJsonData(
                        "SELECT table_name FROM information_schema.tables WHERE table_schema ILIKE '%" + schema
                                + "%'");
                for (String tdict : existingTables) {

                    String table = Json.parse(tdict).asObject().get("table_name").asString();
                    if (mustHaveTemp.contains(table)) {
                        mustHaveTemp.remove(table);

                        // get count
                        if (this.template.getCount(schema + "." + table) == 0) {
                            try {
                                throw new MissingData(
                                        "Data Missing from Required Table: " + schema + "." + table);
                            } catch (MissingData e) {
                                e.printStackTrace();
                                if (tablesMustHave.contains(table)) {
                                    log.error("Critical Table Missing Data! Terminating!");
                                    System.exit(-1);
                                }
                            }
                        }

                    }
                }

                if (mustHaveTemp.size() > 0) {
                    log.error("Drop Schema " + schema + "  is missing the following tables:\n");
                    for (String table : mustHaveTemp) {
                        log.error(table + "\n");
                    }

                    try {
                        throw new TableMissingException();
                    } catch (TableMissingException e) {
                        e.printStackTrace();
                        System.exit(-1);
                    }
                }
            }

        } catch (IndexOutOfBoundsException e) {
            try {
                throw new SQLMalformedException("FATAL ERROR: Table name " + split[0] + " malformed");
            } catch (SQLMalformedException e2) {
                e2.printStackTrace();
                System.exit(-1);
            }
        }

        log.info("Checking  table: " + split[0] + "&& schema: " + schema);

        if (template.checkTable(split[0], schema)) {
            // check if there are records

            if (template.getCount(schema + "." + split[0].replace(schema + ".", "")) > 0) {
                dumped += 1;
                Set<String> keys = tables.get(tf).keySet();
                String sql;
                String select = "SELECT ";
                String distinct = null;
                String attrs = null;
                String where = null;
                String group = null;
                String order = null;

                /**
                 * SET THE ATTRIBUTES WHICH CAN BE SPECIFIED WITH
                 * distinct-for concacting distinct part of query not0-for
                 * specifiying that the length must be greater than 0 in the
                 * WHERE clause group-for grouping the attribute not
                 * null-for specifying that the attr cannot be null
                 * orderby-for specifying our one order attr
                 */
                for (String k : keys) {
                    if (k.toLowerCase().contains("distinct")) {
                        distinct = (distinct == null)
                                ? "distinct on(" + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                : distinct + "," + tables.get(tf).get(k).replaceAll("\\sas.*", "");
                    }

                    if (k.toLowerCase().contains("group")) {
                        group = (group == null) ? "GROUP BY " + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                : group + "," + tables.get(tf).get(k).replaceAll("\\sas.*", "");
                    }

                    if (k.toLowerCase().contains("not0")) {
                        if (k.contains("not0OR")) {
                            where = (where == null)
                                    ? "WHERE length(" + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + ") >0 "
                                    : where + "OR length(" + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + ")";
                        } else {
                            where = (where == null)
                                    ? "WHERE length(" + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + ") >0 "
                                    : where + "AND length(" + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + ")";
                        }
                    }

                    if (k.toLowerCase().contains("notnull")) {
                        if (k.toLowerCase().contains("notnullor")) {
                            where = (where == null)
                                    ? "WHERE " + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + " IS NOT NULL"
                                    : where + " OR " + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + " IS NOT NULL";
                        } else {
                            where = (where == null)
                                    ? "WHERE " + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + " IS NOT NULL"
                                    : where + " AND " + tables.get(tf).get(k).replaceAll("\\sas.*", "")
                                            + " IS NOT NULL";
                        }
                    }

                    if (k.toLowerCase().contains("order")) {
                        if (k.toLowerCase().contains("orderdesc")) {
                            order = (order == null)
                                    ? "ORDER BY " + tables.get(tf).get(k).replaceAll("\\sas.*", "") + " ASC"
                                    : order;
                        } else {
                            order = (order == null)
                                    ? "ORDER BY " + tables.get(tf).get(k).replaceAll("\\sas.*", "") + " DESC"
                                    : order;
                        }
                    }

                    String field = tables.get(tf).get(k);
                    if (k.toLowerCase().contains("attr")) {
                        if (unicoderemove == true) {
                            field = "regexp_replace(trim(replace(regexp_replace(cast(" + field + " as text)"
                                    + ",'[^\\u0020-\\u007e,\\(\\);\\-\\[\\]]+',' '),'" + this.delimiter + "','"
                                    + this.replacedel + "')),'[\\r|\\n]+','   ','gm') as " + field;
                        } else {
                            field = "regexp_replace(trim(replace(cast(" + field + " as text),'" + this.delimiter
                                    + "','" + this.replacedel + "')),'[\\r|\\n]+','   ','gm')";
                        }

                        attrs = (attrs == null) ? field : attrs + "," + field;
                    }
                }

                select = (distinct == null) ? select : select.trim() + " " + distinct.trim() + ")";
                select += " " + attrs.trim();
                select += " FROM " + split[0].trim();
                select = (where == null) ? select : select.trim() + " " + where.trim();
                select = (group == null) ? select : select.trim() + " " + group.trim();
                select = (order == null) ? select : select.trim() + " " + order.trim();

                if (extracondition != null) {
                    select += (select.contains(" WHERE ") == true) ? " AND" + extracondition
                            : " WHERE " + extracondition;
                }

                select = select.trim();

                log.info("Dump Select Command: " + select);

                sql = "COPY  (" + select + ") TO STDOUT WITH DELIMITER '" + delimiter.trim()
                        + "' NULL as '' CSV HEADER";
                fjp.execute(new ToFile(sql, split[1].trim()));

                select = "SELECT ";
                distinct = null;
                attrs = null;
                where = null;
                group = null;
                order = null;
            } else {
                try {

                    throw new NoDataException("WARNING: Table " + split[0] + " has no Data");

                } catch (NoDataException e) {
                    e.printStackTrace();
                    if (tablesMustHave != null && tablesMustHave.contains(split[0])) {
                        log.error("Table is a Must Have Table by has not Data. Terminating!");
                        System.exit(-1);
                    }
                }
            }
        } else {
            try {
                throw new SQLMalformedException("WARNING: Table " + split[0] + " is missing");
            } catch (SQLMalformedException e) {
                e.printStackTrace();
            }
        }
    }

    try {
        fjp.awaitTermination(60000, TimeUnit.MILLISECONDS);
        fjp.shutdown();
    } catch (InterruptedException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }

    if (dumped == 0) {
        log.info("No Data Found in any Table");
        System.exit(-1);
    }

}

From source file:com.hygenics.parser.ParseJSoup.java

/**
 * Runs the Program//from  w w w .j a v a 2 s.c o  m
 */
public void run() {
    int its = 0;

    this.select = Properties.getProperty(this.select);
    this.extracondition = Properties.getProperty(this.extracondition);
    this.column = Properties.getProperty(this.column);

    createTables();
    log.info("Starting Parse via JSoup @ " + Calendar.getInstance().getTime().toString());

    ForkJoinPool fjp = new ForkJoinPool(Runtime.getRuntime().availableProcessors() * procs);
    Set<Callable<ArrayList<String>>> collection;
    List<Future<ArrayList<String>>> futures;
    ArrayList<String> data = new ArrayList<String>((commitsize + 10));
    ArrayList<String> outdata = new ArrayList<String>(((commitsize + 10) * 3));
    int offenderhash = offset;

    boolean run = true;
    int iteration = 0;

    int currpos = 0;
    do {
        collection = new HashSet<Callable<ArrayList<String>>>(qnums);
        log.info("Getting Data");
        // get data
        currpos = iteration * commitsize + offset;
        iteration += 1;
        String query = select;

        if (extracondition != null) {
            query += " " + extracondition;
        }

        if (extracondition != null) {
            query += " WHERE " + extracondition + " AND ";
        } else {
            query += " WHERE ";
        }

        for (int i = 0; i < qnums; i++) {

            if (currpos + (Math.round(commitsize / qnums * (i + 1))) < currpos + commitsize) {
                collection.add(new SplitQuery((query + pullid + " >= "
                        + Integer.toString(currpos + (Math.round(commitsize / qnums * (i)))) + " AND " + pullid
                        + " < " + Integer.toString(currpos + (Math.round(commitsize / qnums * (i + 1)))))));
            } else {
                collection.add(new SplitQuery((query + pullid + " >= "
                        + Integer.toString(currpos + (Math.round(commitsize / qnums * (i)))) + " AND " + pullid
                        + " < " + Integer.toString(currpos + commitsize))));
            }
        }

        if (collection.size() > 0) {

            futures = fjp.invokeAll(collection);

            int w = 0;

            while (fjp.isQuiescent() == false && fjp.getActiveThreadCount() > 0) {
                w++;
            }

            for (Future<ArrayList<String>> f : futures) {
                try {
                    // TODO Get Pages to Parse
                    data.addAll(f.get());
                } catch (NullPointerException e) {
                    log.info("Some Data Returned Null");
                } catch (InterruptedException e) {
                    // TODO Auto-generated catch block
                    e.printStackTrace();
                } catch (ExecutionException e) {
                    // TODO Auto-generated catch block
                    e.printStackTrace();
                }
            }

        }

        collection = new HashSet<Callable<ArrayList<String>>>(data.size());
        // checkstring
        if (data.size() == 0 && checkstring != null && its <= maxchecks) {
            its++;
            collection.add(new SplitQuery(checkstring));

            futures = fjp.invokeAll(collection);

            int w = 0;
            while (fjp.isQuiescent() == false && fjp.getActiveThreadCount() > 0) {
                w++;
            }

            for (Future<ArrayList<String>> f : futures) {
                try {
                    // TODO Get Pages to Parse
                    data.addAll(f.get());
                } catch (NullPointerException e) {
                    log.info("Some Data Returned Null");
                } catch (InterruptedException e) {
                    // TODO Auto-generated catch block
                    e.printStackTrace();
                } catch (ExecutionException e) {
                    // TODO Auto-generated catch block
                    e.printStackTrace();
                }
            }

        }

        if (data.size() == 0) {
            // set to stop if size is0
            log.info("No Pages to Parse. Will Terminate");
            run = false;
        } else {
            // parse
            log.info("Starting JSoup Parse @ " + Calendar.getInstance().getTime().toString());
            for (String json : data) {
                // faster json reader is minimal json but faster parser is
                // Simple Json
                Map<String, Json> jMap = Json.read(json).asJsonMap();

                if (jMap.containsKey("offenderhash")) {
                    // string to int in case it is a string and has some
                    // extra space
                    offenderhash = Integer.parseInt(jMap.get("offenderhash").asString().trim());
                }

                boolean allow = true;

                if (mustcontain != null) {
                    if (jMap.get(column).asString().contains(mustcontain) == false) {
                        allow = false;
                    }
                }

                if (cannotcontain != null) {
                    if (jMap.get(column).asString().contains(cannotcontain)) {
                        allow = false;
                    }
                }

                // this is the fastest way. I was learning before and will
                // rewrite when time permits.
                if (allow == true) {
                    if (jMap.containsKey("offenderhash")) {
                        if (this.singlepaths != null) {
                            collection.add(new ParseSingle(Integer.toString(offenderhash), header, footer,
                                    pagenarrow, singlepaths,
                                    StringEscapeUtils.unescapeXml(jMap.get(column).asString()), replace,
                                    replaceSequence));
                        }

                        if (this.multipaths != null) {
                            collection.add(new ParseRows(Integer.toString(offenderhash), header, footer,
                                    pagenarrow, multipaths,
                                    StringEscapeUtils.unescapeXml(jMap.get(column).asString()), replace,
                                    replaceSequence));
                        }

                        if (this.recordpaths != null) {
                            collection.add(new ParseLoop(Integer.toString(offenderhash), header, footer,
                                    pagenarrow, recordpaths,
                                    StringEscapeUtils.unescapeXml(jMap.get(column).asString()), replace,
                                    replaceSequence));
                        }
                    }
                }
                offenderhash += 1;

            }

            // complete parse
            log.info("Waiting for Parsing to Complete.");
            if (collection.size() > 0) {
                futures = fjp.invokeAll(collection);

                int w = 0;
                while (fjp.isQuiescent() && fjp.getActiveThreadCount() > 0) {
                    w++;
                }

                log.info("Waited for " + Integer.toString(w) + " Cycles!");
                for (Future<ArrayList<String>> f : futures) {
                    try {
                        outdata.addAll(f.get());
                    } catch (InterruptedException e) {
                        // TODO Auto-generated catch block
                        e.printStackTrace();
                    } catch (ExecutionException e) {
                        // TODO Auto-generated catch block
                        e.printStackTrace();
                    }
                }

            }
            log.info("Finished Parsing @ " + Calendar.getInstance().getTime().toString());

            int cp = 0;
            // post data
            log.info("Posting Data @ " + Calendar.getInstance().getTime().toString());
            if (outdata.size() > 0) {

                for (int i = 0; i < qnums; i++) {

                    ArrayList<String> od = new ArrayList<String>(
                            ((cp + (Math.round(outdata.size() / qnums) - cp))));

                    if (cp + (Math.round(outdata.size() / qnums)) < outdata.size()) {
                        od.addAll(outdata.subList(cp, (cp + (Math.round(outdata.size() / qnums)))));
                    } else {
                        od.addAll(outdata.subList(cp, (outdata.size() - 1)));
                    }
                    fjp.execute(new SplitPost(template, od));
                    cp += Math.round(outdata.size() / qnums);
                }

                int w = 0;
                while (fjp.getActiveThreadCount() > 0 && fjp.isQuiescent() == false) {
                    w++;
                }
                log.info("Waited for " + Integer.toString(w) + " cycles!");

            }
            log.info("Finished Posting to DB @ " + Calendar.getInstance().getTime().toString());

            // size should remain same with 10 slot buffer room
            data.clear();
            outdata.clear();
        }

        // my favorite really desperate attempt to actually invoke garbage
        // collection because of MASSIVE STRINGS
        System.gc();
        Runtime.getRuntime().gc();

    } while (run);

    log.info("Shutting Down FJP");
    // shutdown fjp
    if (fjp.isShutdown() == false) {
        fjp.shutdownNow();
    }

    log.info("Finished Parsing @ " + Calendar.getInstance().getTime().toString());

}

From source file:org.cryptomator.frontend.webdav.WebDavServerTest.java

@Test
public void testMultipleGetWithRangeAsync() throws IOException, URISyntaxException, InterruptedException {
    final String testResourceUrl = servletRoot + "/foo.txt";

    // prepare 8MiB test data:
    final byte[] plaintextData = new byte[2097152 * Integer.BYTES];
    final ByteBuffer plaintextDataByteBuffer = ByteBuffer.wrap(plaintextData);
    for (int i = 0; i < 2097152; i++) {
        plaintextDataByteBuffer.putInt(i);
    }/*ww  w .j a v  a  2s. co  m*/
    try (WritableFile w = fs.file("foo.txt").openWritable()) {
        plaintextDataByteBuffer.flip();
        w.write(plaintextDataByteBuffer);
    }

    final MultiThreadedHttpConnectionManager cm = new MultiThreadedHttpConnectionManager();
    cm.getParams().setDefaultMaxConnectionsPerHost(50);
    final HttpClient client = new HttpClient(cm);

    // multiple async range requests:
    final List<ForkJoinTask<?>> tasks = new ArrayList<>();
    final Random generator = new Random(System.currentTimeMillis());

    final AtomicBoolean success = new AtomicBoolean(true);

    // 10 full interrupted requests:
    for (int i = 0; i < 10; i++) {
        final ForkJoinTask<?> task = ForkJoinTask.adapt(() -> {
            try {
                final HttpMethod getMethod = new GetMethod(testResourceUrl);
                final int statusCode = client.executeMethod(getMethod);
                if (statusCode != 200) {
                    LOG.error("Invalid status code for interrupted full request");
                    success.set(false);
                }
                getMethod.getResponseBodyAsStream().read();
                getMethod.getResponseBodyAsStream().close();
                getMethod.releaseConnection();
            } catch (IOException e) {
                throw new RuntimeException(e);
            }
        });
        tasks.add(task);
    }

    // 50 crappy interrupted range requests:
    for (int i = 0; i < 50; i++) {
        final int lower = generator.nextInt(plaintextData.length);
        final ForkJoinTask<?> task = ForkJoinTask.adapt(() -> {
            try {
                final HttpMethod getMethod = new GetMethod(testResourceUrl);
                getMethod.addRequestHeader("Range", "bytes=" + lower + "-");
                final int statusCode = client.executeMethod(getMethod);
                if (statusCode != 206) {
                    LOG.error("Invalid status code for interrupted range request");
                    success.set(false);
                }
                getMethod.getResponseBodyAsStream().read();
                getMethod.getResponseBodyAsStream().close();
                getMethod.releaseConnection();
            } catch (IOException e) {
                throw new RuntimeException(e);
            }
        });
        tasks.add(task);
    }

    // 50 normal open range requests:
    for (int i = 0; i < 50; i++) {
        final int lower = generator.nextInt(plaintextData.length - 512);
        final int upper = plaintextData.length - 1;
        final ForkJoinTask<?> task = ForkJoinTask.adapt(() -> {
            try {
                final HttpMethod getMethod = new GetMethod(testResourceUrl);
                getMethod.addRequestHeader("Range", "bytes=" + lower + "-");
                final byte[] expected = Arrays.copyOfRange(plaintextData, lower, upper + 1);
                final int statusCode = client.executeMethod(getMethod);
                final byte[] responseBody = new byte[upper - lower + 10];
                final int bytesRead = IOUtils.read(getMethod.getResponseBodyAsStream(), responseBody);
                getMethod.releaseConnection();
                if (statusCode != 206) {
                    LOG.error("Invalid status code for open range request");
                    success.set(false);
                } else if (upper - lower + 1 != bytesRead) {
                    LOG.error("Invalid response length for open range request");
                    success.set(false);
                } else if (!Arrays.equals(expected, Arrays.copyOfRange(responseBody, 0, bytesRead))) {
                    LOG.error("Invalid response body for open range request");
                    success.set(false);
                }
            } catch (IOException e) {
                throw new RuntimeException(e);
            }
        });
        tasks.add(task);
    }

    // 200 normal closed range requests:
    for (int i = 0; i < 200; i++) {
        final int pos1 = generator.nextInt(plaintextData.length - 512);
        final int pos2 = pos1 + 512;
        final ForkJoinTask<?> task = ForkJoinTask.adapt(() -> {
            try {
                final int lower = Math.min(pos1, pos2);
                final int upper = Math.max(pos1, pos2);
                final HttpMethod getMethod = new GetMethod(testResourceUrl);
                getMethod.addRequestHeader("Range", "bytes=" + lower + "-" + upper);
                final byte[] expected = Arrays.copyOfRange(plaintextData, lower, upper + 1);
                final int statusCode = client.executeMethod(getMethod);
                final byte[] responseBody = new byte[upper - lower + 1];
                final int bytesRead = IOUtils.read(getMethod.getResponseBodyAsStream(), responseBody);
                getMethod.releaseConnection();
                if (statusCode != 206) {
                    LOG.error("Invalid status code for closed range request");
                    success.set(false);
                } else if (upper - lower + 1 != bytesRead) {
                    LOG.error("Invalid response length for closed range request");
                    success.set(false);
                } else if (!Arrays.equals(expected, Arrays.copyOfRange(responseBody, 0, bytesRead))) {
                    LOG.error("Invalid response body for closed range request");
                    success.set(false);
                }
            } catch (IOException e) {
                throw new RuntimeException(e);
            }
        });
        tasks.add(task);
    }

    Collections.shuffle(tasks, generator);

    final ForkJoinPool pool = new ForkJoinPool(4);
    for (ForkJoinTask<?> task : tasks) {
        pool.execute(task);
    }
    for (ForkJoinTask<?> task : tasks) {
        task.join();
    }
    pool.shutdown();
    cm.shutdown();

    Assert.assertTrue(success.get());
}