Example usage for java.util BitSet nextClearBit

List of usage examples for java.util BitSet nextClearBit

Introduction

In this page you can find the example usage for java.util BitSet nextClearBit.

Prototype

public int nextClearBit(int fromIndex) 

Source Link

Document

Returns the index of the first bit that is set to false that occurs on or after the specified starting index.

Usage

From source file:Main.java

public static void main(String[] args) {

    BitSet bitset1 = new BitSet(8);
    BitSet bitset2 = new BitSet(8);

    // assign values to bitset1
    bitset1.set(0);//from w w  w. ja v  a  2s  .c  o m
    bitset1.set(1);
    bitset1.set(2);

    // assign values to bitset2
    bitset2.set(2);
    bitset2.set(4);

    // print the sets
    System.out.println("Bitset1:" + bitset1);
    System.out.println("Bitset2:" + bitset2);

    // print the first clear bit of bitset1
    System.out.println(bitset1.nextClearBit(0));

    // print the first clear bit of bitset2 after index 5
    System.out.println(bitset2.nextClearBit(5));
}

From source file:hivemall.ftvec.ranking.PopulateNotInUDTF.java

private void populateItems(@Nonnull BitSet bits) throws HiveException {
    for (int i = bits.nextClearBit(0); i <= maxItemId; i = bits.nextClearBit(i + 1)) {
        populatedItemId.set(i);// ww w.ja v  a2 s  . c  o m
        forward(forwardObjs);
    }
}

From source file:hivemall.dataset.LogisticRegressionDataGeneratorUDTF.java

private void generateSparseData() throws HiveException {
    float label = rnd1.nextFloat();
    float sign = (label <= prob_one) ? 1.f : 0.f;
    labels[position] = classification ? sign : label;
    String[] features = featuresArray[position];
    assert (features != null);
    final BitSet used = new BitSet(n_dimensions);
    int searchClearBitsFrom = 0;
    for (int i = 0, retry = 0; i < n_features; i++) {
        int f = rnd2.nextInt(n_dimensions);
        if (used.get(f)) {
            if (retry < 3) {
                --i;/*from w  w w.ja v a 2 s.  co m*/
                ++retry;
                continue;
            }
            searchClearBitsFrom = used.nextClearBit(searchClearBitsFrom);
            f = searchClearBitsFrom;
        }
        used.set(f);
        float w = (float) rnd2.nextGaussian() + (sign * eps);
        String y = f + ":" + w;
        features[i] = y;
        retry = 0;
    }
    if (sort) {
        Arrays.sort(features, new Comparator<String>() {
            @Override
            public int compare(String o1, String o2) {
                int i1 = Integer.parseInt(o1.split(":")[0]);
                int i2 = Integer.parseInt(o2.split(":")[0]);
                return Primitives.compare(i1, i2);
            }
        });
    }
}

From source file:com.turn.griffin.data.GriffinUploadTask.java

private void uploadFile(FileInfo fileInfo, BitSet availableBlockBitmap) {

    String filename = fileInfo.getFilename();
    long fileVersion = fileInfo.getVersion();
    long blockCount = fileInfo.getBlockCount();
    long blockSize = fileInfo.getBlockSize();
    byte[] buffer = new byte[(int) blockSize];

    GriffinLibCacheUtil libCacheManager = dataManager.getLibCacheManager().get();
    String dataTopicNameForProducer = GriffinKafkaTopicNameUtil.getDataTopicNameForProducer(filename,
            fileVersion);/*from w  ww  .  j  a  v a  2s  .  c  om*/
    GriffinProducer producer = null;
    try {
        String libCacheUploadFilePath = libCacheManager.getUploadFilePath(fileInfo);
        RandomAccessFile libCacheUploadFile = new RandomAccessFile(libCacheUploadFilePath, "r");
        producer = new GriffinProducer(GriffinModule.BROKERS);

        logger.info(String.format("Starting to push %s",
                fileInfo.toString().replaceAll(System.getProperty("line.separator"), " ")));

        int uploadAttempts = 0;
        while (availableBlockBitmap.nextClearBit(0) != blockCount) {

            /* If a new version has arrived abort uploading older version */
            if (!libCacheManager.isLatestGlobalVersion(fileInfo)) {
                logger.info(
                        String.format("Aborting upload for %s version %s as a newer version is now available.",
                                filename, fileVersion));
                break;
            }

            if (uploadAttempts >= maxUploadAttempts) {
                logger.warn(String.format("Unable to upload %s version %s after %s attempts", filename,
                        fileVersion, uploadAttempts));
                String subject = String.format("WARNING: GriffinUploadTask failed for blob:%s", filename);
                String body = String.format(
                        "Action: GriffinUploadTask failed for blob:%s version:%s%n"
                                + "Reason: Unable to upload after %s attempts%n",
                        filename, fileVersion, uploadAttempts);
                GriffinModule.emailAlert(subject, body);
                break;
            }

            int blockToUpload = availableBlockBitmap.nextClearBit(0);
            libCacheUploadFile.seek(blockToUpload * blockSize);
            int bytesRead = libCacheUploadFile.read(buffer);
            DataMessage msg = DataMessage.newBuilder().setBlockSeqNo(blockToUpload).setByteCount(bytesRead)
                    .setData(ByteString.copyFrom(buffer)).build();
            try {
                producer.send(dataTopicNameForProducer, DigestUtils.md5Hex(buffer), msg);
                availableBlockBitmap.set(blockToUpload);
                uploadAttempts = 0;
            } catch (FailedToSendMessageException ftsme) {
                /* Retry the same block again */
                logger.warn(String.format("Unable to send block %s for file: %s version: %s "
                        + "due to FailedToSendMessageException", blockToUpload, filename, fileVersion));
                uploadAttempts++;
            } catch (Exception e) {
                logger.warn(String.format("Unable to send block %s for file: %s version: %s", blockToUpload,
                        filename, fileVersion), e);
                logger.warn("Exception", e);
                uploadAttempts++;
            }
        }
        logger.info(String.format("Ending file upload for file %s version %s to %s", filename, fileVersion,
                dataTopicNameForProducer));
        libCacheUploadFile.close();
    } catch (IOException | RuntimeException e) {
        logger.error(String.format("Unable to upload file %s to %s", filename, dataTopicNameForProducer), e);
        String subject = String.format("WARNING: GriffinUploadTask failed for blob:%s", filename);
        String body = String.format(
                "Action: GriffinUploadTask failed for blob:%s version:%s%n"
                        + "Reason: Exception in GriffinUploadTask%n %s",
                filename, fileVersion, Throwables.getStackTraceAsString(e));
        GriffinModule.emailAlert(subject, body);
    } finally {
        if (producer != null) {
            producer.shutdown();
        }
    }

}

From source file:com.turn.griffin.data.GriffinUploadTask.java

private BitSet getAvailableBitmap(FileInfo fileInfo) {

    String filename = fileInfo.getFilename();
    long fileVersion = fileInfo.getVersion();
    long blockCount = fileInfo.getBlockCount();

    Optional<GriffinConsumer> consumer = Optional.absent();
    BitSet availableBlockBitmap = new BitSet((int) blockCount);
    try {//from w  w  w. j av  a  2 s. c  om
        BlockingQueue<byte[]> dataQueue = new ArrayBlockingQueue<>(
                GriffinDownloadTask.DOWNLOAD_CONSUMER_QUEUE_SIZE);
        Properties properties = new Properties();
        properties.put("auto.offset.reset", "smallest");

        /* The groupId should be unique to avoid conflict with other consumers running on this machine */
        String consumerGroupId = GriffinKafkaTopicNameUtil.getDataTopicConsumerGroupId(filename, fileVersion,
                new String[] { dataManager.getMyServerId(), this.getClass().getSimpleName(),
                        UUID.randomUUID().toString() });
        String dataTopicNameForConsumer = GriffinKafkaTopicNameUtil.getDataTopicNameForConsumer(filename,
                fileVersion);

        consumer = Optional.fromNullable(new GriffinConsumer(GriffinModule.ZOOKEEPER, consumerGroupId,
                dataTopicNameForConsumer, GriffinDownloadTask.DOWNLOAD_THREAD_COUNT, properties, dataQueue));

        /* TODO: Change this to a better bitmap (Check out RoaringBitmap) */
        while (availableBlockBitmap.nextClearBit(0) != blockCount) {
            Optional<byte[]> message = Optional.fromNullable(dataQueue
                    .poll(GriffinLeaderSelectionTask.LEADER_SELECTION_PERIOD_MS, TimeUnit.MILLISECONDS));
            if (!message.isPresent()) {
                /* We know how much of the file is available in Kafka */
                break;
            }
            DataMessage dataMessage = DataMessage.parseFrom(message.get());
            availableBlockBitmap.set((int) dataMessage.getBlockSeqNo());
        }
    } catch (Exception e) {
        logger.warn(String.format("Unable to download file %s to get available bitmap ", filename), e);
        /* Work with whatever information we have gathered till now */
    } finally {
        if (consumer.isPresent()) {
            consumer.get().shutdown(true);
        }
    }

    return availableBlockBitmap;
}

From source file:hivemall.smile.classification.GradientTreeBoostingClassifierUDTF.java

private void train2(@Nonnull final double[][] x, @Nonnull final int[] y) throws HiveException {
    final int numVars = SmileExtUtils.computeNumInputVars(_numVars, x);
    if (logger.isInfoEnabled()) {
        logger.info("k: " + 2 + ", numTrees: " + _numTrees + ", shirinkage: " + _eta + ", subsample: "
                + _subsample + ", numVars: " + numVars + ", maxDepth: " + _maxDepth + ", minSamplesSplit: "
                + _minSamplesSplit + ", maxLeafs: " + _maxLeafNodes + ", seed: " + _seed);
    }// www  .  j  a  v  a 2 s.  c o  m

    final int numInstances = x.length;
    final int numSamples = (int) Math.round(numInstances * _subsample);

    final double[] h = new double[numInstances]; // current F(x_i)
    final double[] response = new double[numInstances]; // response variable for regression tree.

    final double mu = smile.math.Math.mean(y);
    final double intercept = 0.5d * Math.log((1.d + mu) / (1.d - mu));

    for (int i = 0; i < numInstances; i++) {
        h[i] = intercept;
    }

    final int[][] order = SmileExtUtils.sort(_attributes, x);
    final RegressionTree.NodeOutput output = new L2NodeOutput(response);

    final BitSet sampled = new BitSet(numInstances);
    final int[] bag = new int[numSamples];
    final int[] perm = new int[numSamples];
    for (int i = 0; i < numSamples; i++) {
        perm[i] = i;
    }

    long s = (this._seed == -1L) ? SmileExtUtils.generateSeed() : new smile.math.Random(_seed).nextLong();
    final smile.math.Random rnd1 = new smile.math.Random(s);
    final smile.math.Random rnd2 = new smile.math.Random(rnd1.nextLong());

    for (int m = 0; m < _numTrees; m++) {
        reportProgress(_progressReporter);

        SmileExtUtils.shuffle(perm, rnd1);
        for (int i = 0; i < numSamples; i++) {
            int index = perm[i];
            bag[i] = index;
            sampled.set(index);
        }

        for (int i = 0; i < numInstances; i++) {
            response[i] = 2.0d * y[i] / (1.d + Math.exp(2.d * y[i] * h[i]));
        }

        RegressionTree tree = new RegressionTree(_attributes, x, response, numVars, _maxDepth, _maxLeafNodes,
                _minSamplesSplit, _minSamplesLeaf, order, bag, output, rnd2);

        for (int i = 0; i < numInstances; i++) {
            h[i] += _eta * tree.predict(x[i]);
        }

        // out-of-bag error estimate
        int oobTests = 0, oobErrors = 0;
        for (int i = sampled.nextClearBit(0); i < numInstances; i = sampled.nextClearBit(i + 1)) {
            oobTests++;
            final int pred = (h[i] > 0.d) ? 1 : 0;
            if (pred != y[i]) {
                oobErrors++;
            }
        }
        float oobErrorRate = 0.f;
        if (oobTests > 0) {
            oobErrorRate = ((float) oobErrors) / oobTests;
        }

        forward(m + 1, intercept, _eta, oobErrorRate, tree);
        sampled.clear();
    }
}

From source file:bes.injector.InjectorBurnTest.java

private void testPromptnessOfExecution(long intervalNanos, float loadIncrement)
        throws InterruptedException, ExecutionException, TimeoutException {
    final int executorCount = 4;
    int threadCount = 8;
    int maxQueued = 1024;
    final WeibullDistribution workTime = new WeibullDistribution(3, 200000);
    final long minWorkTime = TimeUnit.MICROSECONDS.toNanos(1);
    final long maxWorkTime = TimeUnit.MILLISECONDS.toNanos(1);

    final int[] threadCounts = new int[executorCount];
    final WeibullDistribution[] workCount = new WeibullDistribution[executorCount];
    final ExecutorService[] executors = new ExecutorService[executorCount];
    final Injector injector = new Injector("");
    for (int i = 0; i < executors.length; i++) {
        executors[i] = injector.newExecutor(threadCount, maxQueued);
        threadCounts[i] = threadCount;//from   www. j av  a 2s. c o m
        workCount[i] = new WeibullDistribution(2, maxQueued);
        threadCount *= 2;
        maxQueued *= 2;
    }

    long runs = 0;
    long events = 0;
    final TreeSet<Batch> pending = new TreeSet<Batch>();
    final BitSet executorsWithWork = new BitSet(executorCount);
    long until = 0;
    // basic idea is to go through different levels of load on the executor service; initially is all small batches
    // (mostly within max queue size) of very short operations, moving to progressively larger batches
    // (beyond max queued size), and longer operations
    for (float multiplier = 0f; multiplier < 2.01f;) {
        if (System.nanoTime() > until) {
            System.out.println(String.format("Completed %.0fK batches with %.1fM events", runs * 0.001f,
                    events * 0.000001f));
            events = 0;
            until = System.nanoTime() + intervalNanos;
            multiplier += loadIncrement;
            System.out.println(String.format("Running for %ds with load multiplier %.1f",
                    TimeUnit.NANOSECONDS.toSeconds(intervalNanos), multiplier));
        }

        // wait a random amount of time so we submit new tasks in various stages of
        long timeout;
        if (pending.isEmpty())
            timeout = 0;
        else if (Math.random() > 0.98)
            timeout = Long.MAX_VALUE;
        else if (pending.size() == executorCount)
            timeout = pending.first().timeout;
        else
            timeout = (long) (Math.random() * pending.last().timeout);

        while (!pending.isEmpty() && timeout > System.nanoTime()) {
            Batch first = pending.first();
            boolean complete = false;
            try {
                for (Result result : first.results.descendingSet())
                    result.future.get(timeout - System.nanoTime(), TimeUnit.NANOSECONDS);
                complete = true;
            } catch (TimeoutException e) {
            }
            if (!complete && System.nanoTime() > first.timeout) {
                for (Result result : first.results)
                    if (!result.future.isDone())
                        throw new AssertionError();
                complete = true;
            }
            if (complete) {
                pending.pollFirst();
                executorsWithWork.clear(first.executorIndex);
            }
        }

        // if we've emptied the executors, give all our threads an opportunity to spin down
        if (timeout == Long.MAX_VALUE) {
            try {
                Thread.sleep(10);
            } catch (InterruptedException e) {
            }
        }

        // submit a random batch to the first free executor service
        int executorIndex = executorsWithWork.nextClearBit(0);
        if (executorIndex >= executorCount)
            continue;
        executorsWithWork.set(executorIndex);
        ExecutorService executor = executors[executorIndex];
        TreeSet<Result> results = new TreeSet<Result>();
        int count = (int) (workCount[executorIndex].sample() * multiplier);
        long targetTotalElapsed = 0;
        long start = System.nanoTime();
        long baseTime;
        if (Math.random() > 0.5)
            baseTime = 2 * (long) (workTime.sample() * multiplier);
        else
            baseTime = 0;
        for (int j = 0; j < count; j++) {
            long time;
            if (baseTime == 0)
                time = (long) (workTime.sample() * multiplier);
            else
                time = (long) (baseTime * Math.random());
            if (time < minWorkTime)
                time = minWorkTime;
            if (time > maxWorkTime)
                time = maxWorkTime;
            targetTotalElapsed += time;
            Future<?> future = executor.submit(new WaitTask(time));
            results.add(new Result(future, System.nanoTime() + time));
        }
        long end = start + (long) Math.ceil(targetTotalElapsed / (double) threadCounts[executorIndex])
                + TimeUnit.MILLISECONDS.toNanos(100L);
        long now = System.nanoTime();
        if (runs++ > executorCount && now > end)
            throw new AssertionError();
        events += results.size();
        pending.add(new Batch(results, end, executorIndex));
        //            System.out.println(String.format("Submitted batch to executor %d with %d items and %d permitted millis", executorIndex, count, TimeUnit.NANOSECONDS.toMillis(end - start)));
    }
}

From source file:hivemall.smile.classification.GradientTreeBoostingClassifierUDTF.java

/**
 * Train L-k tree boost.//w  w w .  j  a v  a  2 s  . c o  m
 */
private void traink(final double[][] x, final int[] y, final int k) throws HiveException {
    final int numVars = SmileExtUtils.computeNumInputVars(_numVars, x);
    if (logger.isInfoEnabled()) {
        logger.info("k: " + k + ", numTrees: " + _numTrees + ", shirinkage: " + _eta + ", subsample: "
                + _subsample + ", numVars: " + numVars + ", minSamplesSplit: " + _minSamplesSplit
                + ", maxDepth: " + _maxDepth + ", maxLeafs: " + _maxLeafNodes + ", seed: " + _seed);
    }

    final int numInstances = x.length;
    final int numSamples = (int) Math.round(numInstances * _subsample);

    final double[][] h = new double[k][numInstances]; // boost tree output.
    final double[][] p = new double[k][numInstances]; // posteriori probabilities.
    final double[][] response = new double[k][numInstances]; // pseudo response.

    final int[][] order = SmileExtUtils.sort(_attributes, x);
    final RegressionTree.NodeOutput[] output = new LKNodeOutput[k];
    for (int i = 0; i < k; i++) {
        output[i] = new LKNodeOutput(response[i], k);
    }

    final BitSet sampled = new BitSet(numInstances);
    final int[] bag = new int[numSamples];
    final int[] perm = new int[numSamples];
    for (int i = 0; i < numSamples; i++) {
        perm[i] = i;
    }

    long s = (this._seed == -1L) ? SmileExtUtils.generateSeed() : new smile.math.Random(_seed).nextLong();
    final smile.math.Random rnd1 = new smile.math.Random(s);
    final smile.math.Random rnd2 = new smile.math.Random(rnd1.nextLong());

    // out-of-bag prediction
    final int[] prediction = new int[numInstances];

    for (int m = 0; m < _numTrees; m++) {
        for (int i = 0; i < numInstances; i++) {
            double max = Double.NEGATIVE_INFINITY;
            for (int j = 0; j < k; j++) {
                final double h_ji = h[j][i];
                if (max < h_ji) {
                    max = h_ji;
                }
            }
            double Z = 0.0d;
            for (int j = 0; j < k; j++) {
                double p_ji = Math.exp(h[j][i] - max);
                p[j][i] = p_ji;
                Z += p_ji;
            }
            for (int j = 0; j < k; j++) {
                p[j][i] /= Z;
            }
        }

        final RegressionTree[] trees = new RegressionTree[k];

        Arrays.fill(prediction, -1);
        double max_h = Double.NEGATIVE_INFINITY;
        int oobTests = 0, oobErrors = 0;

        for (int j = 0; j < k; j++) {
            reportProgress(_progressReporter);

            final double[] response_j = response[j];
            final double[] p_j = p[j];
            final double[] h_j = h[j];

            for (int i = 0; i < numInstances; i++) {
                if (y[i] == j) {
                    response_j[i] = 1.0d;
                } else {
                    response_j[i] = 0.0d;
                }
                response_j[i] -= p_j[i];
            }

            SmileExtUtils.shuffle(perm, rnd1);
            for (int i = 0; i < numSamples; i++) {
                int index = perm[i];
                bag[i] = index;
                sampled.set(i);
            }

            RegressionTree tree = new RegressionTree(_attributes, x, response[j], numVars, _maxDepth,
                    _maxLeafNodes, _minSamplesSplit, _minSamplesLeaf, order, bag, output[j], rnd2);
            trees[j] = tree;

            for (int i = 0; i < numInstances; i++) {
                double h_ji = h_j[i] + _eta * tree.predict(x[i]);
                h_j[i] += h_ji;
                if (h_ji > max_h) {
                    max_h = h_ji;
                    prediction[i] = j;
                }
            }

        } // for each k

        // out-of-bag error estimate
        for (int i = sampled.nextClearBit(0); i < numInstances; i = sampled.nextClearBit(i + 1)) {
            oobTests++;
            if (prediction[i] != y[i]) {
                oobErrors++;
            }
        }
        sampled.clear();
        float oobErrorRate = 0.f;
        if (oobTests > 0) {
            oobErrorRate = ((float) oobErrors) / oobTests;
        }

        // forward a row
        forward(m + 1, 0.d, _eta, oobErrorRate, trees);

    } // for each m
}

From source file:gov.noaa.pfel.erddap.dataset.EDDTableFromHttpGet.java

/** 
     * This is used to add insert or delete commands into a data file of this dataset. 
     * This is EDDTableFromHttpGet overwriting the default implementation.
     *//from w ww  .ja v a 2s .c o  m
     * <p>The key should be author_secret. So keys are specific to specific people/actors.
     * The author will be kept and added to the 'author' column in the dataset.
     *
     * <p>INSERT works like SQL's INSERT and UPDATE.
     * If the info matches existing values of sortColumnSourceNames,
     * the previous data is updated/overwritten. Otherwise, it is inserted.
     *
     * <p>DELETE works like SQL's DELETE
     *
     * @param tDirStructureColumnNames the column names for the parts of the 
     *   dir and file names. All of these names must be in requiredColumnNames.
     * @param keys the valid values of author= (to authenticate the author)
     * @param columnNames the names of all of the dataset's source variables.
     *   This does not include timestamp, author, or command.
     *   The time variable must be named time.
     * @param columnUnits any of them may be null or "".
     *   All timestamp columns (in the general sense) should have UDUNITS 
     *   String time units (e.g., "yyyy-MM-dd'T'HH:mm:ss") 
     *   or numeric time units (e.g., "days since 1985-01-01").
     *   For INSERT and DELETE calls, the time values must be in that format
     *   (you can't revert to ISO 8601 format as with data requests in the rest of ERDDAP).
     * @param columnTypes the Java names for the types (e.g., double).
     *   The missing values are the default missing values for PrimitiveArrays.
     *   All timestamp columns MUST be doubles.
     *   'long' is not supported because .nc3 files don't support longs.
     * @param columnStringLengths -1 if not a string column.
     * @param requiredColumnNames the names which identify a unique row.
     *   RequiredColumnNames MUST all be in columnNames.
     *   Insert requests MUST have all of the requiredColumnNames and usually have all 
     *     columnNames + author. Missing columns will get (standard PrimitiveArray) 
     *     missing values.
     *   Delete requests MUST have all of the requiredColumnNames and, in addition,
     *     usually have just author. Other columns are irrelevant.
     *   This should be as minimal as possible, and always includes time:  
     *   For TimeSeries: stationID, time.
     *   For Trajectory: trajectoryID, time.
     *   For Profile: stationID, time, depth.
     *   For TimeSeriesProfile: stationID, time, depth.
     *   For TrajectoryProfile: trajectoryID, time, depth.
     * @param command INSERT_COMMAND or DELETE_COMMAND
     * @param userDapQuery the param string, still percent-encoded
     * @param dirTable  a copy of the dirTable  (changes may be made to it) or null.
     * @param fileTable a copy of the fileTable (changes may be made to it) or null.
     * @return the response string 
     * @throws Throwable if any kind of trouble
     */
    public static String insertOrDelete(String startDir, StringArray tDirStructureColumnNames,
            IntArray tDirStructureNs, IntArray tDirStructureCalendars, HashSet<String> keys, String columnNames[],
            String columnUnits[], String columnTypes[], int columnStringLengths[], String requiredColumnNames[],
            byte command, String userDapQuery, Table dirTable, Table fileTable) throws Throwable {

        double timestamp = System.currentTimeMillis() / 1000.0;
        if (dirTable == null || fileTable == null) { //ensure both or neither
            dirTable = null;
            fileTable = null;
        }

        //store values parallelling columnNames
        int nColumns = columnNames.length;
        PrimitiveArray columnValues[] = new PrimitiveArray[nColumns];
        Class columnClasses[] = new Class[nColumns];
        DataType columnDataTypes[] = new DataType[nColumns];
        boolean columnIsString[] = new boolean[nColumns];
        int timeColumn = -1;
        DateTimeFormatter timeFormatter = null; //used if time variable is string
        double timeBaseAndFactor[] = null; //used if time variable is numeric
        for (int col = 0; col < nColumns; col++) {
            if (!String2.isSomething(columnUnits[col]))
                columnUnits[col] = "";

            if (columnNames[col].equals(EDV.TIME_NAME)) {
                timeColumn = col;
                if (columnIsString[col]) {
                    if (columnUnits[col].toLowerCase().indexOf("yyyy") < 0) //was "yy"
                        throw new SimpleException(
                                EDStatic.queryError + "Invalid units for the string time variable. "
                                        + "Units MUST specify the format of the time values.");
                    timeFormatter = DateTimeFormat.forPattern(columnUnits[col]).withZone(ZoneId.of("UTC"));
                } else { //numeric time values
                    timeBaseAndFactor = Calendar2.getTimeBaseAndFactor(columnUnits[col]); //throws RuntimeException if trouble
                }
            }

            if (columnTypes[col].equals("String")) {
                columnClasses[col] = String.class;
                columnDataTypes[col] = DataType.STRING;
                columnIsString[col] = true;
                if (columnStringLengths[col] < 1 || columnStringLengths[col] > 64000)
                    throw new SimpleException(EDStatic.queryError + "Invalid string length="
                            + columnStringLengths[col] + " for column=" + columnNames[col] + ".");
            } else {
                columnClasses[col] = PrimitiveArray.elementStringToClass(columnTypes[col]);
                columnDataTypes[col] = NcHelper.getDataType(columnClasses[col]);
            }
        }

        //parse the userDapQuery's parts. Ensure it is valid. 
        String parts[] = String2.split(userDapQuery, '&');
        int nParts = parts.length;
        String author = null; //the part before '_'
        int arraySize = -1; //until an array is found
        BitSet requiredColumnsFound = new BitSet();
        for (int p = 0; p < nParts; p++) {
            parts[p] = SSR.percentDecode(parts[p]);
            int eqPo = parts[p].indexOf('=');
            if (eqPo <= 0 || //no '=' or no name
                    "<>~!".indexOf(parts[p].charAt(eqPo - 1)) >= 0) // <= >= != ~=
                throw new SimpleException(
                        EDStatic.queryError + "The \"" + parts[p] + "\" parameter isn't in the form name=value.");
            String tName = parts[p].substring(0, eqPo);
            String tValue = parts[p].substring(eqPo + 1);
            if (tValue.startsWith("~")) // =~
                throw new SimpleException(
                        EDStatic.queryError + "The \"" + parts[p] + "\" parameter isn't in the form name=value.");

            //catch and verify author=
            if (tName.equals(AUTHOR)) {
                if (author != null)
                    throw new SimpleException(EDStatic.queryError + "There are two parameters with name=author.");
                if (!keys.contains(tValue))
                    throw new SimpleException(EDStatic.queryError + "Invalid author_key.");
                if (p != nParts - 1)
                    throw new SimpleException(EDStatic.queryError + "name=author must be the last parameter.");
                int po = Math.max(0, tValue.indexOf('_'));
                author = tValue.substring(0, po); //may be ""

            } else {
                //is it a requiredColumn?
                int whichRC = String2.indexOf(requiredColumnNames, tName);
                if (whichRC >= 0)
                    requiredColumnsFound.set(whichRC);

                //whichColumn? 
                int whichCol = String2.indexOf(columnNames, tName);
                if (whichCol < 0)
                    throw new SimpleException(EDStatic.queryError + "Unknown columnName=" + tName);
                if (columnValues[whichCol] != null)
                    throw new SimpleException(
                            EDStatic.queryError + "There are two parameters with columnName=" + tName + ".");

                //get the values
                if (tValue.startsWith("[") && tValue.endsWith("]")) {
                    //deal with array of values: name=[valuesCSV]
                    columnValues[whichCol] = PrimitiveArray.csvFactory(columnClasses[whichCol], tValue);
                    if (arraySize < 0)
                        arraySize = columnValues[whichCol].size();
                    else if (arraySize != columnValues[whichCol].size())
                        throw new SimpleException(
                                EDStatic.queryError + "Different parameters with arrays have different sizes: "
                                        + arraySize + "!=" + columnValues[whichCol].size() + ".");

                } else {
                    //deal with single value: name=value
                    columnValues[whichCol] = PrimitiveArray.csvFactory(columnClasses[whichCol], tValue);

                    if (columnClasses[whichCol] == String.class && (tValue.length() < 2 || tValue.charAt(0) != '"'
                            || tValue.charAt(tValue.length() - 1) != '"'))
                        throw new SimpleException(EDStatic.queryError + "The String value for columnName=" + tName
                                + " must start and end with \"'s.");
                    if (columnValues[whichCol].size() != 1)
                        throw new SimpleException(
                                EDStatic.queryError + "One value (not " + columnValues[whichCol].size()
                                        + ") expected for columnName=" + tName + ". (missing [ ] ?)");
                }
            }
        }

        //ensure required parameters were specified 
        if (author == null)
            throw new SimpleException(EDStatic.queryError + "author= was not specified.");
        int notFound = requiredColumnsFound.nextClearBit(0);
        if (notFound < requiredColumnNames.length)
            throw new SimpleException(EDStatic.queryError + "requiredColumnName=" + requiredColumnNames[notFound]
                    + " wasn't specified.");

        //make all columnValues the same size
        //(timestamp, author, command are separate and have just 1 value)
        int maxSize = Math.max(1, arraySize);
        for (int col = 0; col < nColumns; col++) {
            PrimitiveArray pa = columnValues[col];
            if (pa == null) {
                //this var wasn't in the command, so use mv's
                columnValues[col] = PrimitiveArray.factory(columnClasses[col], maxSize, "");
            } else if (pa.size() == 1 && maxSize > 1) {
                columnValues[col] = PrimitiveArray.factory(columnClasses[col], maxSize, pa.getString(0));
            }
        }

        //figure out the fullFileName for each row
        StringArray fullFileNames = new StringArray(maxSize, false);
        for (int row = 0; row < maxSize; row++) {
            //figure out the epochSeconds time value
            double tTime = timeColumn < 0 ? Double.NaN : //no time column
                    timeBaseAndFactor == null
                            ? Calendar2.toEpochSeconds(columnValues[timeColumn].getString(row), timeFormatter)
                            : Calendar2.unitsSinceToEpochSeconds( //numeric time
                                    timeBaseAndFactor[0], timeBaseAndFactor[1],
                                    columnValues[timeColumn].getDouble(row));

            fullFileNames.add(whichFile(startDir, tDirStructureColumnNames, tDirStructureNs, tDirStructureCalendars,
                    columnNames, columnValues, row, tTime));
        }

        //EVERYTHING SHOULD BE VALIDATED BY NOW. NO ERRORS AFTER HERE!
        //append each input row to the appropriate file
        Array oneTimestampArray = Array.factory(new double[] { timestamp });
        //I reported to netcdf-java mailing list: this generated null pointer exception in 4.6.6:
        // String tsar[] = new String[]{author};
        // Array oneAuthorArray    = Array.factory(tsar); //new String[]{author});
        //This works:
        ArrayString.D1 oneAuthorArray = new ArrayString.D1(1);
        oneAuthorArray.set(0, author);

        Array oneCommandArray = Array.factory(new byte[] { command });
        int row = 0;
        while (row < maxSize) {
            //figure out which file
            String fullFileName = fullFileNames.get(row);

            //open the file
            NetcdfFileWriter file = null;
            boolean fileIsNew = false;
            int[] origin = new int[1];
            try {

                Group rootGroup = null;
                Dimension rowDim = null;
                Variable vars[] = new Variable[nColumns];
                Variable timestampVar = null;
                Variable authorVar = null;
                Variable commandVar = null;
                if (File2.isFile(fullFileName)) {
                    file = NetcdfFileWriter.openExisting(fullFileName);
                    rootGroup = file.addGroup(null, "");
                    rowDim = rootGroup.findDimension("row");

                    //find Variables for columnNames.   May be null, but shouldn't be.
                    StringArray columnsNotFound = new StringArray();
                    for (int col = 0; col < nColumns; col++) {
                        vars[col] = rootGroup.findVariable(columnNames[col]);
                        if (vars[col] == null)
                            columnsNotFound.add(columnNames[col]);
                    }
                    timestampVar = rootGroup.findVariable(TIMESTAMP);
                    authorVar = rootGroup.findVariable(AUTHOR);
                    commandVar = rootGroup.findVariable(COMMAND);
                    if (timestampVar == null)
                        columnsNotFound.add(TIMESTAMP);
                    if (authorVar == null)
                        columnsNotFound.add(AUTHOR);
                    if (commandVar == null)
                        columnsNotFound.add(COMMAND);
                    if (columnsNotFound.size() > 0)
                        throw new SimpleException(MustBe.InternalError + ": column(s)=" + columnsNotFound
                                + " not found in " + fullFileName);

                } else {
                    //if file doesn't exist, create it
                    fileIsNew = true; //first
                    file = NetcdfFileWriter.createNew(NetcdfFileWriter.Version.netcdf3, fullFileName);
                    rootGroup = file.addGroup(null, "");
                    rowDim = file.addUnlimitedDimension("row");
                    ArrayList rowDimAL = new ArrayList();
                    rowDimAL.add(rowDim);

                    //define Variables
                    for (int col = 0; col < nColumns; col++) {
                        String cName = columnNames[col];
                        String cType = columnTypes[col];
                        if (columnIsString[col]) {
                            vars[col] = file.addStringVariable(rootGroup, cName, rowDimAL,
                                    columnStringLengths[col]);
                        } else {
                            vars[col] = file.addVariable(rootGroup, cName, columnDataTypes[col], rowDimAL);
                        }
                    }
                    timestampVar = file.addVariable(rootGroup, TIMESTAMP, DataType.DOUBLE, rowDimAL);
                    authorVar = file.addStringVariable(rootGroup, AUTHOR, rowDimAL, AUTHOR_STRLEN);
                    commandVar = file.addVariable(rootGroup, COMMAND, DataType.BYTE, rowDimAL);

                    // create the file
                    file.create();
                }

                //append the series of commands that go to this fullFileName
                int startRow = row++;
                while (row < maxSize && fullFileNames.get(row).equals(fullFileName))
                    row++;
                int stopRow = row; //1 past end

                //which row in the file table?
                int fileTableRow = -1;
                if (fileTable != null) {
                    //already in fileTable?
                    //fileTableRow = ...

                    //add to fileTable
                }

                //write the data to the file
                origin[0] = rowDim.getLength();
                for (int col = 0; col < nColumns; col++) {
                    PrimitiveArray subsetPA = columnValues[col];
                    if (startRow > 0 || stopRow != maxSize)
                        subsetPA = subsetPA.subset(startRow, 1, stopRow - 1); //inclusive
                    file.write(vars[col], origin, Array.factory(subsetPA.toObjectArray()));

                    //adjust min/max in fileTable
                    if (fileTable != null && command == INSERT_COMMAND) {
                        if (columnIsString[col]) {
                            //fileTableRow...   
                        } else {
                            double stats[] = subsetPA.calculateStats();
                            if (stats[PrimitiveArray.STATS_N] > 0) { //has some non MVs
                                //fileTableRow... Math.min(  , stats[PrimitiveArray.STATS_MIN]));
                                //fileTableRow....Math.max(  , stats[PrimitiveArray.STATS_MAX]));
                            }
                            if (stats[PrimitiveArray.STATS_N] < stopRow - startRow) {
                                //fileTableRow... hasMV
                            }
                        }
                    }
                }
                Array timestampArray = oneTimestampArray;
                Array authorArray = oneAuthorArray;
                Array commandArray = oneCommandArray;
                if (stopRow - startRow > 1) {
                    //double timestampAr[] = new double[stopRow - startRow]; 
                    //String authorAr[]    = new String[stopRow - startRow];
                    //byte   commandAr[]   = new byte  [stopRow - startRow];
                    //Arrays.fill(timestampAr, timestamp);
                    //Arrays.fill(authorAr,    author);
                    //Arrays.fill(commandAr,   command);
                    //timestampArray = Array.factory(timestampAr);
                    //authorArray    = Array.factory(authorAr);
                    //commandArray   = Array.factory(commandAr);

                    int thisShape[] = new int[] { stopRow - startRow };
                    timestampArray = Array.factoryConstant(double.class, thisShape, new Double(timestamp));
                    authorArray = Array.factoryConstant(String.class, thisShape, author);
                    commandArray = Array.factoryConstant(byte.class, thisShape, new Byte(command));
                }
                file.write(timestampVar, origin, timestampArray);
                file.writeStringData(authorVar, origin, authorArray);
                file.write(commandVar, origin, commandArray);

                //adjust min/max in fileTable
                if (fileTable != null && command == INSERT_COMMAND) {
                    //fileTableRow... Math.min(   , timestamp));
                    //fileTableRow....Math.max(   , timestamp));

                    //fileTableRow... Math.min(   , author));
                    //fileTableRow....Math.max(   , author));

                    //fileTableRow... Math.min(   , command));
                    //fileTableRow....Math.max(   , command));
                }

                //make it so!
                file.flush(); //force file update

                //close the file
                file.close();
                file = null;

            } catch (Throwable t) {
                if (file != null) {
                    try {
                        file.close();
                    } catch (Throwable t2) {
                    }
                }
                if (fileIsNew)
                    File2.delete(fullFileName);
                String2.log(
                        String2.ERROR + " while " + (fileIsNew ? "creating" : "adding to") + " " + fullFileName);
                throw t;
            }
        }

        //Don't ever change any of this (except adding somthing new to the end). 
        //Clients rely on it.
        return "SUCCESS: Data received. No errors. timestamp=" + Calendar2.epochSecondsToIsoStringT3(timestamp)
                + "Z=" + timestamp + " seconds since 1970-01-01T00:00:00Z.\n";
    }

From source file:org.apache.cassandra.concurrent.LongSharedExecutorPoolTest.java

private void testPromptnessOfExecution(long intervalNanos, float loadIncrement)
        throws InterruptedException, ExecutionException {
    final int executorCount = 4;
    int threadCount = 8;
    int maxQueued = 1024;
    final WeibullDistribution workTime = new WeibullDistribution(3, 200000);
    final long minWorkTime = TimeUnit.MICROSECONDS.toNanos(1);
    final long maxWorkTime = TimeUnit.MILLISECONDS.toNanos(1);

    final int[] threadCounts = new int[executorCount];
    final WeibullDistribution[] workCount = new WeibullDistribution[executorCount];
    final ExecutorService[] executors = new ExecutorService[executorCount];
    for (int i = 0; i < executors.length; i++) {
        executors[i] = SharedExecutorPool.SHARED.newExecutor(threadCount, maxQueued, "test" + i, "test" + i);
        threadCounts[i] = threadCount;//from  w  w w.  j  a  v a  2s.  com
        workCount[i] = new WeibullDistribution(2, maxQueued);
        threadCount *= 2;
        maxQueued *= 2;
    }

    long runs = 0;
    long events = 0;
    final TreeSet<Batch> pending = new TreeSet<>();
    final BitSet executorsWithWork = new BitSet(executorCount);
    long until = 0;
    // basic idea is to go through different levels of load on the executor service; initially is all small batches
    // (mostly within max queue size) of very short operations, moving to progressively larger batches
    // (beyond max queued size), and longer operations
    for (float multiplier = 0f; multiplier < 2.01f;) {
        if (System.nanoTime() > until) {
            System.out.println(String.format("Completed %.0fK batches with %.1fM events", runs * 0.001f,
                    events * 0.000001f));
            events = 0;
            until = System.nanoTime() + intervalNanos;
            multiplier += loadIncrement;
            System.out.println(String.format("Running for %ds with load multiplier %.1f",
                    TimeUnit.NANOSECONDS.toSeconds(intervalNanos), multiplier));
        }

        // wait a random amount of time so we submit new tasks in various stages of
        long timeout;
        if (pending.isEmpty())
            timeout = 0;
        else if (Math.random() > 0.98)
            timeout = Long.MAX_VALUE;
        else if (pending.size() == executorCount)
            timeout = pending.first().timeout;
        else
            timeout = (long) (Math.random() * pending.last().timeout);

        while (!pending.isEmpty() && timeout > System.nanoTime()) {
            Batch first = pending.first();
            boolean complete = false;
            try {
                for (Result result : first.results.descendingSet())
                    result.future.get(timeout - System.nanoTime(), TimeUnit.NANOSECONDS);
                complete = true;
            } catch (TimeoutException e) {
            }
            if (!complete && System.nanoTime() > first.timeout) {
                for (Result result : first.results)
                    if (!result.future.isDone())
                        throw new AssertionError();
                complete = true;
            }
            if (complete) {
                pending.pollFirst();
                executorsWithWork.clear(first.executorIndex);
            }
        }

        // if we've emptied the executors, give all our threads an opportunity to spin down
        if (timeout == Long.MAX_VALUE)
            Uninterruptibles.sleepUninterruptibly(10, TimeUnit.MILLISECONDS);

        // submit a random batch to the first free executor service
        int executorIndex = executorsWithWork.nextClearBit(0);
        if (executorIndex >= executorCount)
            continue;
        executorsWithWork.set(executorIndex);
        ExecutorService executor = executors[executorIndex];
        TreeSet<Result> results = new TreeSet<>();
        int count = (int) (workCount[executorIndex].sample() * multiplier);
        long targetTotalElapsed = 0;
        long start = System.nanoTime();
        long baseTime;
        if (Math.random() > 0.5)
            baseTime = 2 * (long) (workTime.sample() * multiplier);
        else
            baseTime = 0;
        for (int j = 0; j < count; j++) {
            long time;
            if (baseTime == 0)
                time = (long) (workTime.sample() * multiplier);
            else
                time = (long) (baseTime * Math.random());
            if (time < minWorkTime)
                time = minWorkTime;
            if (time > maxWorkTime)
                time = maxWorkTime;
            targetTotalElapsed += time;
            Future<?> future = executor.submit(new WaitTask(time));
            results.add(new Result(future, System.nanoTime() + time));
        }
        long end = start + (long) Math.ceil(targetTotalElapsed / (double) threadCounts[executorIndex])
                + TimeUnit.MILLISECONDS.toNanos(100L);
        long now = System.nanoTime();
        if (runs++ > executorCount && now > end)
            throw new AssertionError();
        events += results.size();
        pending.add(new Batch(results, end, executorIndex));
        //            System.out.println(String.format("Submitted batch to executor %d with %d items and %d permitted millis", executorIndex, count, TimeUnit.NANOSECONDS.toMillis(end - start)));
    }
}