Example usage for java.util.concurrent.atomic AtomicLong get

List of usage examples for java.util.concurrent.atomic AtomicLong get

Introduction

In this page you can find the example usage for java.util.concurrent.atomic AtomicLong get.

Prototype

public final long get() 

Source Link

Document

Returns the current value, with memory effects as specified by VarHandle#getVolatile .

Usage

From source file:com.nextdoor.bender.handler.BaseHandler.java

/**
 * Method called by Handler implementations to process records.
 *
 * @param context Lambda invocation context.
 * @throws HandlerException//from  ww w . j  a  va 2 s .  com
 */
private void processInternal(Context context) throws HandlerException {
    Stat runtime = new Stat("runtime.ns");
    runtime.start();

    Source source = this.getSource();
    DeserializerProcessor deser = source.getDeserProcessor();
    List<OperationProcessor> operations = source.getOperationProcessors();
    List<String> containsStrings = source.getContainsStrings();
    List<Pattern> regexPatterns = source.getRegexPatterns();

    this.getIpcService().setContext(context);

    Iterator<InternalEvent> events = this.getInternalEventIterator();

    /*
     * For logging purposes log when the function started running
     */
    this.monitor.invokeTimeNow();

    AtomicLong eventCount = new AtomicLong(0);
    AtomicLong oldestArrivalTime = new AtomicLong(System.currentTimeMillis());
    AtomicLong oldestOccurrenceTime = new AtomicLong(System.currentTimeMillis());

    /*
     * eventQueue allows for InternalEvents to be pulled from the Iterator and published to a
     * stream. A Thread is created that loops through events in the iterator and offers them to the
     * queue. Note that offering will be blocked if the queue is full (back pressure being applied).
     * When the iterator reaches the end (hasNext = false) the queue is closed.
     */
    this.eventQueue = new Queue<InternalEvent>(new LinkedBlockingQueue<InternalEvent>(this.queueSize));

    /*
     * Thread will live for duration of invocation and supply Stream with events.
     */
    new Thread(new Runnable() {
        @Override
        public void run() {
            while (events.hasNext()) {
                try {
                    eventQueue.offer(events.next());
                } catch (Queue.ClosedQueueException e) {
                    break;
                }
            }
            try {
                eventQueue.close();
            } catch (Queue.ClosedQueueException e) {
            }
        }
    }).start();

    Stream<InternalEvent> input = this.eventQueue.jdkStream();

    /*
     * Filter out raw events
     */
    Stream<InternalEvent> filtered = input.filter(
            /*
             * Perform regex filter
             */
            ievent -> {
                eventCount.incrementAndGet();
                String eventStr = ievent.getEventString();

                /*
                 * Apply String contains filters before deserialization
                 */
                for (String containsString : containsStrings) {
                    if (eventStr.contains(containsString)) {
                        return false;
                    }
                }

                /*
                 * Apply regex patterns before deserialization
                 */
                for (Pattern regexPattern : regexPatterns) {
                    Matcher m = regexPattern.matcher(eventStr);

                    if (m.find()) {
                        return false;
                    }
                }

                return true;
            });

    /*
     * Deserialize
     */
    Stream<InternalEvent> deserialized = filtered.map(ievent -> {
        DeserializedEvent data = deser.deserialize(ievent.getEventString());

        if (data == null || data.getPayload() == null) {
            logger.warn("Failed to deserialize: " + ievent.getEventString());
            return null;
        }

        ievent.setEventObj(data);
        return ievent;
    }).filter(Objects::nonNull);

    /*
     * Perform Operations
     */
    Stream<InternalEvent> operated = deserialized;
    for (OperationProcessor operation : operations) {
        operated = operation.perform(operated);
    }

    /*
     * Serialize
     */
    Stream<InternalEvent> serialized = operated.map(ievent -> {
        try {
            String raw = null;
            raw = this.ser.serialize(this.wrapper.getWrapped(ievent));
            ievent.setSerialized(raw);
            return ievent;
        } catch (SerializationException e) {
            return null;
        }
    }).filter(Objects::nonNull);

    /*
     * Transport
     */
    serialized.forEach(ievent -> {
        /*
         * Update times
         */
        updateOldest(oldestArrivalTime, ievent.getArrivalTime());
        updateOldest(oldestOccurrenceTime, ievent.getEventTime());

        try {
            this.getIpcService().add(ievent);
        } catch (TransportException e) {
            logger.warn("error adding event", e);
        }
    });

    /*
     * Wait for transporters to finish
     */
    try {
        this.getIpcService().flush();
    } catch (TransportException e) {
        throw new HandlerException("encounted TransportException while shutting down ipcService", e);
    } catch (InterruptedException e) {
        throw new HandlerException("thread was interruptedwhile shutting down ipcService", e);
    } finally {
        String evtSource = this.getSourceName();

        runtime.stop();

        if (!this.skipWriteStats) {
            writeStats(eventCount.get(), oldestArrivalTime.get(), oldestOccurrenceTime.get(), evtSource,
                    runtime);
        }

        if (logger.isTraceEnabled()) {
            getGCStats();
        }
    }
}

From source file:org.apache.hadoop.raid.RaidShell.java

private long estimateSaving(final Codec codec, final List<Path> files, final int targetReplication,
        final int numThreads, final boolean isDebug) throws IOException {
    final AtomicLong totalSavingSize = new AtomicLong(0);
    ExecutorService executor = Executors.newFixedThreadPool(numThreads);
    LOG.info("Processing " + files.size() + " files/dirs for " + codec.id + " in " + numThreads + " threads");
    if (isDebug) {
        System.out.println("oldDiskSize | oldParitySize | newDiskSize | newParitySize"
                + "| savingSize | totalSavingSize | path ");
    }//from  w  w  w  . j a va  2 s.  c  o  m
    final AtomicInteger finishNum = new AtomicInteger(0);
    for (int i = 0; i < numThreads; i++) {
        final int startIdx = i;
        Runnable work = new Runnable() {
            public void run() {
                try {
                    for (int idx = startIdx; idx < files.size(); idx += numThreads) {
                        try {
                            Path p = files.get(idx);
                            FileSystem fs = FileSystem.get(conf);
                            p = fs.makeQualified(p);
                            FileStatus stat = null;
                            try {
                                stat = fs.getFileStatus(p);
                            } catch (FileNotFoundException e) {
                                LOG.warn("Path " + p + " does not exist", e);
                            }
                            if (stat == null) {
                                continue;
                            }
                            short repl = 0;
                            List<FileStatus> lfs = null;
                            if (codec.isDirRaid) {
                                if (!stat.isDir()) {
                                    continue;
                                }
                                lfs = RaidNode.listDirectoryRaidFileStatus(conf, fs, p);
                                if (lfs == null) {
                                    continue;
                                }
                                repl = DirectoryStripeReader.getReplication(lfs);
                            } else {
                                repl = stat.getReplication();
                            }

                            // if should not raid, will not put the file into the write list.
                            if (!RaidNode.shouldRaid(conf, fs, stat, codec, lfs)) {
                                LOG.info("Should not raid file: " + p);
                                continue;
                            }
                            // check the replication.
                            boolean add = false;
                            if (repl > targetReplication) {
                                add = true;
                            } else if (repl == targetReplication
                                    && !ParityFilePair.parityExists(stat, codec, conf)) {
                                add = true;
                            }
                            if (add) {
                                long oldDiskSize = 0L;
                                long newDiskSize = 0L;
                                long numBlocks = 0L;
                                long parityBlockSize = 0L;
                                if (codec.isDirRaid) {
                                    for (FileStatus fsStat : lfs) {
                                        oldDiskSize += fsStat.getLen() * (fsStat.getReplication());
                                        newDiskSize += fsStat.getLen() * targetReplication;
                                    }
                                    numBlocks = DirectoryStripeReader.getBlockNum(lfs);
                                    parityBlockSize = DirectoryStripeReader.getParityBlockSize(conf, lfs);
                                } else {
                                    oldDiskSize = stat.getLen() * stat.getReplication();
                                    newDiskSize = stat.getLen() * targetReplication;
                                    numBlocks = RaidNode.getNumBlocks(stat);
                                    parityBlockSize = stat.getBlockSize();
                                }

                                long numStripes = RaidNode.numStripes(numBlocks, codec.stripeLength);
                                long newParitySize = numStripes * codec.parityLength * parityBlockSize
                                        * targetReplication;
                                long oldParitySize = 0L;
                                for (Codec other : Codec.getCodecs()) {
                                    if (other.priority < codec.priority) {
                                        Path parityPath = new Path(other.parityDirectory,
                                                RaidNode.makeRelative(stat.getPath()));
                                        long logicalSize = 0;
                                        try {
                                            logicalSize = fs.getContentSummary(parityPath).getSpaceConsumed();
                                        } catch (IOException ioe) {
                                            // doesn't exist
                                            continue;
                                        }
                                        oldParitySize += logicalSize;
                                    }
                                }
                                long savingSize = oldDiskSize + oldParitySize - newDiskSize - newParitySize;
                                totalSavingSize.addAndGet(savingSize);
                                if (isDebug) {
                                    System.out.println(oldDiskSize + " " + oldParitySize + " " + newDiskSize
                                            + " " + newParitySize + " " + savingSize + " "
                                            + totalSavingSize.get() + " " + stat.getPath());
                                }
                            }
                        } catch (IOException ioe) {
                            LOG.warn("Get IOException", ioe);
                        }
                    }
                } finally {
                    finishNum.incrementAndGet();
                }
            }
        };
        if (executor != null) {
            executor.execute(work);
        }
    }
    if (executor != null) {
        try {
            while (finishNum.get() < numThreads) {
                try {
                    Thread.sleep(2000);
                } catch (InterruptedException ie) {
                    LOG.warn("EstimateSaving get exception ", ie);
                    throw new IOException(ie);
                }
            }
        } finally {
            executor.shutdown(); // Waits for submitted tasks to finish.
        }
    }
    return totalSavingSize.get();
}

From source file:com.milaboratory.core.alignment.KAlignerTest.java

@Test
public void testRandomCorrectnessConcurrent() throws Exception {
    KAlignerParameters p = gParams.clone().setMapperKValue(6).setAlignmentStopPenalty(Integer.MIN_VALUE)
            .setMapperAbsoluteMinScore(2.1f).setMapperMinSeedsDistance(4);
    p.setScoring(new LinearGapAlignmentScoring(NucleotideSequence.ALPHABET,
            ScoringUtils.getSymmetricMatrix(4, -4, 4), -5)).setMaxAdjacentIndels(2);

    KAlignerParameters[] params = new KAlignerParameters[] { p.clone(), p.clone().setFloatingLeftBound(true),
            p.clone().setFloatingRightBound(true),
            p.clone().setFloatingLeftBound(true).setFloatingRightBound(true) };

    RandomDataGenerator rdi = new RandomDataGenerator(new Well19937c(127368647891L));
    final int baseSize = its(400, 2000);
    final int total = its(3000, 30000);
    final int threadCount = 20;
    int i, id;//from w  ww .  j av a2 s .  c  o  m

    final NucleotideMutationModel mutationModel = MutationModels.getEmpiricalNucleotideMutationModel()
            .multiplyProbabilities(2.0);
    mutationModel.reseed(12343L);

    for (final KAlignerParameters parameters : params) {
        final KAligner aligner = new KAligner(parameters);

        final AtomicInteger correct = new AtomicInteger(0), incorrect = new AtomicInteger(0),
                miss = new AtomicInteger(0), scoreError = new AtomicInteger(0), random = new AtomicInteger(0);

        final List<NucleotideSequence> ncs = new ArrayList<>(baseSize);
        for (i = 0; i < baseSize; ++i) {
            NucleotideSequence reference = randomSequence(NucleotideSequence.ALPHABET, rdi, 100, 300);
            ncs.add(reference);
            aligner.addReference(reference);
        }

        final AtomicInteger counter = new AtomicInteger(total);

        Thread[] threads = new Thread[threadCount];

        final AtomicLong time = new AtomicLong(0L);

        final AtomicLong seedCounter = new AtomicLong(1273L);
        for (i = 0; i < threadCount; ++i) {
            threads[i] = new Thread() {
                @Override
                public void run() {
                    long timestamp;
                    //Different seed for different thread.
                    RandomDataGenerator rdi = new RandomDataGenerator(
                            new Well19937c(seedCounter.addAndGet(117L)));
                    while (counter.decrementAndGet() >= 0) {
                        int id = rdi.nextInt(0, baseSize - 1);
                        NucleotideSequence ref = ncs.get(id);
                        int trimRight, trimLeft;
                        boolean addLeft, addRight;

                        if (parameters.isFloatingLeftBound()) {
                            trimLeft = rdi.nextInt(0, ref.size() / 3);
                            addLeft = true;
                        } else {
                            if (rdi.nextInt(0, 1) == 0) {
                                trimLeft = 0;
                                addLeft = true;
                            } else {
                                trimLeft = rdi.nextInt(0, ref.size() / 3);
                                addLeft = false;
                            }
                        }

                        if (parameters.isFloatingRightBound()) {
                            trimRight = rdi.nextInt(0, ref.size() / 3);
                            addRight = true;
                        } else {
                            if (rdi.nextInt(0, 1) == 0) {
                                trimRight = 0;
                                addRight = true;
                            } else {
                                trimRight = rdi.nextInt(0, ref.size() / 3);
                                addRight = false;
                            }
                        }

                        NucleotideSequence subSeq = ref.getRange(trimLeft, ref.size() - trimRight);
                        NucleotideSequence left = addLeft
                                ? randomSequence(NucleotideSequence.ALPHABET, rdi, 10, 30)
                                : EMPTY;
                        NucleotideSequence right = addRight
                                ? randomSequence(NucleotideSequence.ALPHABET, rdi, 10, 30)
                                : EMPTY;

                        int[] subSeqMutations;
                        Mutations<NucleotideSequence> mmutations;
                        synchronized (mutationModel) {
                            mmutations = generateMutations(subSeq, mutationModel);
                            subSeqMutations = mmutations.getAllMutations();
                        }
                        float actionScore = AlignmentUtils.calculateScore(parameters.getScoring(),
                                subSeq.size(), mmutations);

                        int indels = 0;
                        for (int mut : subSeqMutations)
                            if (isDeletion(mut) || isInsertion(mut))
                                ++indels;

                        NucleotideSequence target = left.concatenate(mutate(subSeq, subSeqMutations))
                                .concatenate(right);

                        timestamp = System.nanoTime();
                        KAlignmentResult result = aligner.align(target);
                        time.addAndGet(System.nanoTime() - timestamp);

                        boolean found = false;
                        for (KAlignmentHit hit : result.hits) {
                            if (hit.getId() == id) {
                                //System.out.println(hit.getAlignmentScore());
                                found = true;
                                if (!parameters.isFloatingLeftBound())
                                    Assert.assertTrue(hit.getAlignment().getSequence1Range().getFrom() == 0
                                            || hit.getAlignment().getSequence2Range().getFrom() == 0);
                                if (!parameters.isFloatingRightBound())
                                    Assert.assertTrue(hit.getAlignment().getSequence1Range().getTo() == ref
                                            .size()
                                            || hit.getAlignment().getSequence2Range().getTo() == target.size());
                                if (hit.getAlignment().getScore() < actionScore
                                        && indels <= parameters.getMaxAdjacentIndels()) {
                                    scoreError.incrementAndGet();
                                    //System.out.println(target);
                                    //System.out.println(left);
                                    //printAlignment(subSeq, subSeqMutations);
                                    //System.out.println(right);
                                    //printHitAlignment(hit);
                                    ////printAlignment(ncs.get(hit.getId()).getRange(hit.getAlignment().getSequence1Range()),
                                    ////        hit.getAlignment().getMutations());
                                    //found = true;
                                }
                            } else {
                                //printHitAlignment(hit);
                                //System.out.println(hit.getAlignmentScore());
                                incorrect.incrementAndGet();
                            }
                        }

                        if (found)
                            correct.incrementAndGet();
                        else {
                            if (indels <= parameters.getMaxAdjacentIndels()) {
                                miss.incrementAndGet();
                                //System.out.println(target);
                                //System.out.println(left);
                                //printAlignment(subSeq, subSeqMutations);
                                //System.out.println(right);
                            }
                        }

                        NucleotideSequence randomSequence = randomSequence(NucleotideSequence.ALPHABET, rdi,
                                target.size() - 1, target.size());
                        for (KAlignmentHit hit : aligner.align(randomSequence).hits) {
                            hit.calculateAlignmnet();
                            if (hit.getAlignment().getScore() >= 110.0)
                                random.incrementAndGet();
                        }
                    }
                }
            };
        }

        for (i = 0; i < threadCount; ++i)
            threads[i].start();

        for (i = 0; i < threadCount; ++i)
            threads[i].join();

        System.out.println("C=" + correct.get() + ";I=" + incorrect.get() + ";M=" + miss.get() + ";ScE="
                + scoreError.get() + ";R=" + (1.0 * random.get() / baseSize / total) + " AlignmentTime = "
                + time(time.get() / total));
        Assert.assertEquals(1.0, 1.0 * correct.get() / total, 0.01);
        Assert.assertEquals(0.0, 1.0 * incorrect.get() / total, 0.001);
        Assert.assertEquals(0.0, 1.0 * miss.get() / total, 0.001);
        Assert.assertEquals(0.0, 1.0 * scoreError.get() / total, 0.001);
        Assert.assertEquals(0.0, 1.0 * random.get() / total / baseSize, 5E-6);
    }
}

From source file:io.warp10.script.functions.FETCH.java

@Override
public Object apply(WarpScriptStack stack) throws WarpScriptException {
    ////from ww  w  .  ja v  a  2  s .  c o m
    // Extract parameters from the stack
    //

    Object top = stack.peek();

    //
    // Handle the new (as of 20150805) parameter passing mechanism as a map
    //

    Map<String, Object> params = null;

    if (top instanceof Map) {
        stack.pop();
        params = paramsFromMap(stack, (Map<String, Object>) top);
    }

    if (top instanceof List) {
        if (5 != ((List) top).size()) {
            stack.drop();
            throw new WarpScriptException(getName() + " expects 5 parameters.");
        }

        //
        // Explode list and remove its size
        //

        listTo.apply(stack);
        stack.drop();
    }

    if (null == params) {

        params = new HashMap<String, Object>();

        //
        // Extract time span
        //

        Object oStop = stack.pop();
        Object oStart = stack.pop();

        long endts;
        long timespan;

        if (oStart instanceof String && oStop instanceof String) {
            long start = fmt.parseDateTime((String) oStart).getMillis() * Constants.TIME_UNITS_PER_MS;
            long stop = fmt.parseDateTime((String) oStop).getMillis() * Constants.TIME_UNITS_PER_MS;

            if (start < stop) {
                endts = stop;
                timespan = stop - start;
            } else {
                endts = start;
                timespan = start - stop;
            }
        } else if (oStart instanceof Long && oStop instanceof Long) {
            endts = (long) oStart;
            timespan = (long) oStop;
        } else {
            throw new WarpScriptException("Invalid timespan specification.");
        }

        params.put(PARAM_END, endts);

        if (timespan < 0) {
            params.put(PARAM_COUNT, -timespan);
        } else {
            params.put(PARAM_TIMESPAN, timespan);
        }

        //
        // Extract labels selector
        //

        Object oLabelsSelector = stack.pop();

        if (!(oLabelsSelector instanceof Map)) {
            throw new WarpScriptException("Label selectors must be a map.");
        }

        Map<String, String> labelSelectors = (Map<String, String>) oLabelsSelector;

        params.put(PARAM_LABELS, labelSelectors);

        //
        // Extract class selector
        //

        Object oClassSelector = stack.pop();

        if (!(oClassSelector instanceof String)) {
            throw new WarpScriptException("Class selector must be a string.");
        }

        String classSelector = (String) oClassSelector;

        params.put(PARAM_CLASS, classSelector);

        //
        // Extract token
        //

        Object oToken = stack.pop();

        if (!(oToken instanceof String)) {
            throw new WarpScriptException("Token must be a string.");
        }

        String token = (String) oToken;

        params.put(PARAM_TOKEN, token);
    }

    StoreClient gtsStore = stack.getStoreClient();

    DirectoryClient directoryClient = stack.getDirectoryClient();

    GeoTimeSerie base = null;
    GeoTimeSerie[] bases = null;
    String typelabel = (String) params.get(PARAM_TYPEATTR);

    if (null != typelabel) {
        bases = new GeoTimeSerie[4];
    }

    ReadToken rtoken = Tokens.extractReadToken(params.get(PARAM_TOKEN).toString());

    List<String> clsSels = new ArrayList<String>();
    List<Map<String, String>> lblsSels = new ArrayList<Map<String, String>>();

    if (params.containsKey(PARAM_SELECTOR_PAIRS)) {
        for (Pair<Object, Object> pair : (List<Pair<Object, Object>>) params.get(PARAM_SELECTOR_PAIRS)) {
            clsSels.add(pair.getLeft().toString());
            Map<String, String> labelSelectors = (Map<String, String>) pair.getRight();
            labelSelectors.putAll(Tokens.labelSelectorsFromReadToken(rtoken));
            lblsSels.add((Map<String, String>) labelSelectors);
        }
    } else {
        Map<String, String> labelSelectors = (Map<String, String>) params.get(PARAM_LABELS);
        labelSelectors.putAll(Tokens.labelSelectorsFromReadToken(rtoken));
        clsSels.add(params.get(PARAM_CLASS).toString());
        lblsSels.add(labelSelectors);
    }

    List<Metadata> metadatas = null;

    Iterator<Metadata> iter = null;

    try {
        metadatas = directoryClient.find(clsSels, lblsSels);
        iter = metadatas.iterator();
    } catch (IOException ioe) {
        try {
            iter = directoryClient.iterator(clsSels, lblsSels);
        } catch (Exception e) {
            throw new WarpScriptException(e);
        }
    }

    metadatas = new ArrayList<Metadata>();

    List<GeoTimeSerie> series = new ArrayList<GeoTimeSerie>();
    AtomicLong fetched = (AtomicLong) stack.getAttribute(WarpScriptStack.ATTRIBUTE_FETCH_COUNT);
    long fetchLimit = (long) stack.getAttribute(WarpScriptStack.ATTRIBUTE_FETCH_LIMIT);
    long gtsLimit = (long) stack.getAttribute(WarpScriptStack.ATTRIBUTE_GTS_LIMIT);

    AtomicLong gtscount = (AtomicLong) stack.getAttribute(WarpScriptStack.ATTRIBUTE_GTS_COUNT);

    try {
        while (iter.hasNext()) {

            metadatas.add(iter.next());

            if (gtscount.incrementAndGet() > gtsLimit) {
                throw new WarpScriptException(getName() + " exceeded limit of " + gtsLimit
                        + " Geo Time Series, current count is " + gtscount);
            }

            if (metadatas.size() < EgressFetchHandler.FETCH_BATCHSIZE && iter.hasNext()) {
                continue;
            }

            //
            // Filter the retrieved Metadata according to geo
            //

            if (params.containsKey(PARAM_GEO)) {
                GeoDirectoryClient geoclient = stack.getGeoDirectoryClient();
                long end = (long) params.get(PARAM_END);
                long start = Long.MIN_VALUE;
                if (params.containsKey(PARAM_TIMESPAN)) {
                    start = end - (long) params.get(PARAM_TIMESPAN);
                }

                boolean inside = false;

                if (PARAM_GEOOP_IN.equals(params.get(PARAM_GEOOP))) {
                    inside = true;
                }

                try {
                    metadatas = geoclient.filter((String) params.get(PARAM_GEODIR), metadatas,
                            (GeoXPShape) params.get(PARAM_GEO), inside, start, end);
                } catch (IOException ioe) {
                    throw new WarpScriptException(ioe);
                }
            }

            //
            // Generate extra Metadata if PARAM_EXTRA is set
            //

            if (params.containsKey(PARAM_EXTRA)) {

                Set<Metadata> withextra = new HashSet<Metadata>();

                withextra.addAll(metadatas);

                for (Metadata meta : metadatas) {
                    for (String cls : (Set<String>) params.get(PARAM_EXTRA)) {
                        // The following is safe, the constructor allocates new maps
                        Metadata metadata = new Metadata(meta);
                        metadata.setName(cls);
                        metadata.setClassId(GTSHelper.classId(this.SIPHASH_CLASS, cls));
                        metadata.setLabelsId(GTSHelper.labelsId(this.SIPHASH_LABELS, metadata.getLabels()));
                        withextra.add(metadata);
                    }
                }

                metadatas.clear();
                metadatas.addAll(withextra);
            }

            //
            // We assume that GTS will be fetched in a continuous way, i.e. without having a GTSDecoder from one
            // then one from another, then one from the first one.
            //

            long timespan = params.containsKey(PARAM_TIMESPAN) ? (long) params.get(PARAM_TIMESPAN)
                    : -((long) params.get(PARAM_COUNT));

            TYPE type = (TYPE) params.get(PARAM_TYPE);

            if (null != this.forcedType) {
                if (null != type) {
                    throw new WarpScriptException(getName() + " type of fetched GTS cannot be changed.");
                }
                type = this.forcedType;
            }

            boolean writeTimestamp = Boolean.TRUE.equals(params.get(PARAM_WRITE_TIMESTAMP));

            boolean showUUID = Boolean.TRUE.equals(params.get(PARAM_SHOWUUID));

            try (GTSDecoderIterator gtsiter = gtsStore.fetch(rtoken, metadatas, (long) params.get(PARAM_END),
                    timespan, fromArchive, writeTimestamp)) {
                while (gtsiter.hasNext()) {
                    GTSDecoder decoder = gtsiter.next();

                    GeoTimeSerie gts;

                    //
                    // If we should ventilate per type, do so now
                    //

                    if (null != typelabel) {

                        Map<String, String> labels = new HashMap<String, String>(
                                decoder.getMetadata().getLabels());
                        labels.remove(Constants.PRODUCER_LABEL);
                        labels.remove(Constants.OWNER_LABEL);

                        java.util.UUID uuid = null;

                        if (showUUID) {
                            uuid = new java.util.UUID(decoder.getClassId(), decoder.getLabelsId());
                        }

                        long count = 0;

                        Metadata decoderMeta = decoder.getMetadata();

                        while (decoder.next()) {
                            count++;
                            long ts = decoder.getTimestamp();
                            long location = decoder.getLocation();
                            long elevation = decoder.getElevation();
                            Object value = decoder.getValue();

                            int gtsidx = 0;
                            String typename = "DOUBLE";

                            if (value instanceof Long) {
                                gtsidx = 1;
                                typename = "LONG";
                            } else if (value instanceof Boolean) {
                                gtsidx = 2;
                                typename = "BOOLEAN";
                            } else if (value instanceof String) {
                                gtsidx = 3;
                                typename = "STRING";
                            }

                            base = bases[gtsidx];

                            if (null == base || !base.getMetadata().getName().equals(decoderMeta.getName())
                                    || !base.getMetadata().getLabels().equals(decoderMeta.getLabels())) {
                                bases[gtsidx] = new GeoTimeSerie();
                                base = bases[gtsidx];
                                series.add(base);
                                base.setLabels(decoder.getLabels());
                                base.getMetadata().putToAttributes(typelabel, typename);
                                base.setName(decoder.getName());
                                if (null != uuid) {
                                    base.getMetadata().putToAttributes(Constants.UUID_ATTRIBUTE,
                                            uuid.toString());
                                }
                            }

                            GTSHelper.setValue(base, ts, location, elevation, value, false);
                        }

                        if (fetched.addAndGet(count) > fetchLimit) {
                            Map<String, String> sensisionLabels = new HashMap<String, String>();
                            sensisionLabels.put(SensisionConstants.SENSISION_LABEL_CONSUMERID,
                                    Tokens.getUUID(rtoken.getBilledId()));
                            Sensision.update(SensisionConstants.SENSISION_CLASS_EINSTEIN_FETCHCOUNT_EXCEEDED,
                                    sensisionLabels, 1);
                            throw new WarpScriptException(getName() + " exceeded limit of " + fetchLimit
                                    + " datapoints, current count is " + fetched.get());
                        }

                        continue;
                    }

                    if (null != type) {
                        gts = decoder.decode(type);
                    } else {
                        gts = decoder.decode();
                    }

                    //
                    // Remove producer/owner labels
                    //

                    //
                    // Add a .uuid attribute if instructed to do so
                    //

                    if (showUUID) {
                        java.util.UUID uuid = new java.util.UUID(gts.getClassId(), gts.getLabelsId());
                        gts.getMetadata().putToAttributes(Constants.UUID_ATTRIBUTE, uuid.toString());
                    }

                    Map<String, String> labels = new HashMap<String, String>();
                    labels.putAll(gts.getMetadata().getLabels());
                    labels.remove(Constants.PRODUCER_LABEL);
                    labels.remove(Constants.OWNER_LABEL);
                    gts.setLabels(labels);

                    //
                    // If it's the first GTS, take it as is.
                    //

                    if (null == base) {
                        base = gts;
                    } else {
                        //
                        // If name and labels are identical to the previous GTS, merge them
                        // Otherwise add 'base' to the stack and set it to 'gts'.
                        //
                        if (!base.getMetadata().getName().equals(gts.getMetadata().getName())
                                || !base.getMetadata().getLabels().equals(gts.getMetadata().getLabels())) {
                            series.add(base);
                            base = gts;
                        } else {
                            base = GTSHelper.merge(base, gts);
                        }
                    }

                    if (fetched.addAndGet(gts.size()) > fetchLimit) {
                        Map<String, String> sensisionLabels = new HashMap<String, String>();
                        sensisionLabels.put(SensisionConstants.SENSISION_LABEL_CONSUMERID,
                                Tokens.getUUID(rtoken.getBilledId()));
                        Sensision.update(SensisionConstants.SENSISION_CLASS_EINSTEIN_FETCHCOUNT_EXCEEDED,
                                sensisionLabels, 1);
                        throw new WarpScriptException(getName() + " exceeded limit of " + fetchLimit
                                + " datapoints, current count is " + fetched.get());
                        //break;
                    }
                }
            } catch (WarpScriptException ee) {
                throw ee;
            } catch (Exception e) {
                e.printStackTrace();
            }

            //
            // If there is one current GTS, push it onto the stack (only if not ventilating per type)
            //

            if (null != base && null == typelabel) {
                series.add(base);
            }

            //
            // Reset state
            //

            base = null;
            metadatas.clear();
        }
    } catch (Throwable t) {
        throw t;
    } finally {
        if (iter instanceof MetadataIterator) {
            try {
                ((MetadataIterator) iter).close();
            } catch (Exception e) {
            }
        }
    }

    stack.push(series);

    //
    // Apply a possible postfetch hook
    //

    if (rtoken.getHooksSize() > 0 && rtoken.getHooks().containsKey(POSTFETCH_HOOK)) {
        stack.execMulti(rtoken.getHooks().get(POSTFETCH_HOOK));
    }

    return stack;
}

From source file:com.unboundid.scim.tools.SCIMQueryRate.java

/**
 * Performs the actual processing for this tool.  In this case, it gets a
 * connection to the directory server and uses it to perform the requested
 * searches.//  www  .j  a  va  2 s  .  c o m
 *
 * @return  The result code for the processing that was performed.
 */
@Override()
public ResultCode doToolProcessing() {
    //Initalize the Debugger
    Debug.setEnabled(true);
    Debug.getLogger().addHandler(new ConsoleHandler());
    Debug.getLogger().setUseParentHandlers(false);

    // Determine the random seed to use.
    final Long seed;
    if (randomSeed.isPresent()) {
        seed = Long.valueOf(randomSeed.getValue());
    } else {
        seed = null;
    }

    // Create a value pattern for the filter.
    final ValuePattern filterPattern;
    boolean isQuery = true;
    if (filter.isPresent()) {
        try {
            filterPattern = new ValuePattern(filter.getValue(), seed);
        } catch (ParseException pe) {
            Debug.debugException(pe);
            err(ERR_QUERY_TOOL_BAD_FILTER_PATTERN.get(pe.getMessage()));
            return ResultCode.PARAM_ERROR;
        }
    } else if (resourceId.isPresent()) {
        isQuery = false;
        try {
            filterPattern = new ValuePattern(resourceId.getValue());
        } catch (ParseException pe) {
            Debug.debugException(pe);
            err(ERR_QUERY_TOOL_BAD_RESOURCE_ID_PATTERN.get(pe.getMessage()));
            return ResultCode.PARAM_ERROR;
        }
    } else {
        filterPattern = null;
    }

    // Get the attributes to return.
    final String[] attrs;
    if (attributes.isPresent()) {
        final List<String> attrList = attributes.getValues();
        attrs = new String[attrList.size()];
        attrList.toArray(attrs);
    } else {
        attrs = NO_STRINGS;
    }

    // If the --ratePerSecond option was specified, then limit the rate
    // accordingly.
    FixedRateBarrier fixedRateBarrier = null;
    if (ratePerSecond.isPresent()) {
        final int intervalSeconds = collectionInterval.getValue();
        final int ratePerInterval = ratePerSecond.getValue() * intervalSeconds;

        fixedRateBarrier = new FixedRateBarrier(1000L * intervalSeconds, ratePerInterval);
    }

    // Determine whether to include timestamps in the output and if so what
    // format should be used for them.
    final boolean includeTimestamp;
    final String timeFormat;
    if (timestampFormat.getValue().equalsIgnoreCase("with-date")) {
        includeTimestamp = true;
        timeFormat = "dd/MM/yyyy HH:mm:ss";
    } else if (timestampFormat.getValue().equalsIgnoreCase("without-date")) {
        includeTimestamp = true;
        timeFormat = "HH:mm:ss";
    } else {
        includeTimestamp = false;
        timeFormat = null;
    }

    // Determine whether any warm-up intervals should be run.
    final long totalIntervals;
    final boolean warmUp;
    int remainingWarmUpIntervals = warmUpIntervals.getValue();
    if (remainingWarmUpIntervals > 0) {
        warmUp = true;
        totalIntervals = 0L + numIntervals.getValue() + remainingWarmUpIntervals;
    } else {
        warmUp = true;
        totalIntervals = 0L + numIntervals.getValue();
    }

    // Create the table that will be used to format the output.
    final OutputFormat outputFormat;
    if (csvFormat.isPresent()) {
        outputFormat = OutputFormat.CSV;
    } else {
        outputFormat = OutputFormat.COLUMNS;
    }

    final ColumnFormatter formatter = new ColumnFormatter(includeTimestamp, timeFormat, outputFormat, " ",
            new FormattableColumn(15, HorizontalAlignment.RIGHT, "Recent", "Queries/Sec"),
            new FormattableColumn(15, HorizontalAlignment.RIGHT, "Recent", "Avg Dur ms"),
            new FormattableColumn(15, HorizontalAlignment.RIGHT, "Recent", "Resources/Query"),
            new FormattableColumn(15, HorizontalAlignment.RIGHT, "Recent", "Errors/Sec"),
            new FormattableColumn(15, HorizontalAlignment.RIGHT, "Overall", "Queries/Sec"),
            new FormattableColumn(15, HorizontalAlignment.RIGHT, "Overall", "Avg Dur ms"));

    // Create values to use for statistics collection.
    final AtomicLong queryCounter = new AtomicLong(0L);
    final AtomicLong resourceCounter = new AtomicLong(0L);
    final AtomicLong errorCounter = new AtomicLong(0L);
    final AtomicLong queryDurations = new AtomicLong(0L);

    // Determine the length of each interval in milliseconds.
    final long intervalMillis = 1000L * collectionInterval.getValue();

    // We will use Apache's HttpClient library for this tool.
    SSLUtil sslUtil;
    try {
        sslUtil = createSSLUtil();
    } catch (LDAPException e) {
        debugException(e);
        err(e.getMessage());
        return e.getResultCode();
    }

    RegistryBuilder<ConnectionSocketFactory> registryBuilder = RegistryBuilder.create();
    final String schemeName;
    if (sslUtil != null) {
        try {
            SSLConnectionSocketFactory sslConnectionSocketFactory = new SSLConnectionSocketFactory(
                    sslUtil.createSSLContext("TLS"), new NoopHostnameVerifier());
            schemeName = "https";
            registryBuilder.register(schemeName, sslConnectionSocketFactory);
        } catch (GeneralSecurityException e) {
            debugException(e);
            err(ERR_SCIM_TOOL_CANNOT_CREATE_SSL_CONTEXT.get(getExceptionMessage(e)));
            return ResultCode.LOCAL_ERROR;
        }
    } else {
        schemeName = "http";
        registryBuilder.register(schemeName, new PlainConnectionSocketFactory());
    }
    final Registry<ConnectionSocketFactory> socketFactoryRegistry = registryBuilder.build();

    RequestConfig requestConfig = RequestConfig.custom().setConnectionRequestTimeout(30000)
            .setExpectContinueEnabled(true).build();

    SocketConfig socketConfig = SocketConfig.custom().setSoTimeout(30000).setSoReuseAddress(true).build();

    final PoolingHttpClientConnectionManager mgr = new PoolingHttpClientConnectionManager(
            socketFactoryRegistry);
    mgr.setMaxTotal(numThreads.getValue());
    mgr.setDefaultMaxPerRoute(numThreads.getValue());
    mgr.setDefaultSocketConfig(socketConfig);
    mgr.setValidateAfterInactivity(-1);

    ClientConfig jerseyConfig = new ClientConfig();

    jerseyConfig.property(ApacheClientProperties.CONNECTION_MANAGER, mgr);
    jerseyConfig.property(ApacheClientProperties.REQUEST_CONFIG, requestConfig);
    ApacheConnectorProvider connectorProvider = new ApacheConnectorProvider();
    jerseyConfig.connectorProvider(connectorProvider);

    if (authID.isPresent()) {
        try {
            final String password;
            if (authPassword.isPresent()) {
                password = authPassword.getValue();
            } else if (authPasswordFile.isPresent()) {
                password = authPasswordFile.getNonBlankFileLines().get(0);
            } else {
                password = null;
            }

            BasicCredentialsProvider provider = new BasicCredentialsProvider();
            provider.setCredentials(new AuthScope(host.getValue(), port.getValue()),
                    new UsernamePasswordCredentials(authID.getValue(), password));

            jerseyConfig.property(ApacheClientProperties.CREDENTIALS_PROVIDER, provider);
            jerseyConfig.property(ApacheClientProperties.PREEMPTIVE_BASIC_AUTHENTICATION, true);
        } catch (IOException e) {
            Debug.debugException(e);
            err(ERR_QUERY_TOOL_SET_BASIC_AUTH.get(e.getMessage()));
            return ResultCode.LOCAL_ERROR;
        }
    } else if (bearerToken.isPresent()) {
        jerseyConfig.register(new ClientRequestFilter() {
            public void filter(final ClientRequestContext clientRequestContext) throws IOException {
                try {
                    clientRequestContext.getHeaders().add("Authorization", "Bearer " + bearerToken.getValue());
                } catch (Exception ex) {
                    throw new RuntimeException("Unable to add authorization handler", ex);
                }
            }
        });
    }

    // Create the SCIM client to use for the queries.
    final URI uri;
    try {
        final String path;
        if (contextPath.getValue().startsWith("/")) {
            path = contextPath.getValue();
        } else {
            path = "/" + contextPath.getValue();
        }
        uri = new URI(schemeName, null, host.getValue(), port.getValue(), path, null, null);
    } catch (URISyntaxException e) {
        Debug.debugException(e);
        err(ERR_QUERY_TOOL_CANNOT_CREATE_URL.get(e.getMessage()));
        return ResultCode.OTHER;
    }
    final SCIMService service = new SCIMService(uri, jerseyConfig);

    if (xmlFormat.isPresent()) {
        service.setContentType(MediaType.APPLICATION_XML_TYPE);
        service.setAcceptType(MediaType.APPLICATION_XML_TYPE);
    }

    // Retrieve the resource schema.
    final ResourceDescriptor resourceDescriptor;
    try {
        resourceDescriptor = service.getResourceDescriptor(resourceName.getValue(), null);
        if (resourceDescriptor == null) {
            throw new ResourceNotFoundException(
                    "Resource " + resourceName.getValue() + " is not defined by the service provider");
        }
    } catch (SCIMException e) {
        Debug.debugException(e);
        err(ERR_QUERY_TOOL_RETRIEVE_RESOURCE_SCHEMA.get(e.getMessage()));
        return ResultCode.OTHER;
    }

    final SCIMEndpoint<? extends BaseResource> endpoint = service.getEndpoint(resourceDescriptor,
            BaseResource.BASE_RESOURCE_FACTORY);

    // Create the threads to use for the searches.
    final CyclicBarrier barrier = new CyclicBarrier(numThreads.getValue() + 1);
    final QueryRateThread[] threads = new QueryRateThread[numThreads.getValue()];
    for (int i = 0; i < threads.length; i++) {
        threads[i] = new QueryRateThread(i, isQuery, endpoint, filterPattern, attrs, barrier, queryCounter,
                resourceCounter, queryDurations, errorCounter, fixedRateBarrier);
        threads[i].start();
    }

    // Display the table header.
    for (final String headerLine : formatter.getHeaderLines(true)) {
        out(headerLine);
    }

    // Indicate that the threads can start running.
    try {
        barrier.await();
    } catch (Exception e) {
        Debug.debugException(e);
    }
    long overallStartTime = System.nanoTime();
    long nextIntervalStartTime = System.currentTimeMillis() + intervalMillis;

    boolean setOverallStartTime = false;
    long lastDuration = 0L;
    long lastNumEntries = 0L;
    long lastNumErrors = 0L;
    long lastNumSearches = 0L;
    long lastEndTime = System.nanoTime();
    for (long i = 0; i < totalIntervals; i++) {
        final long startTimeMillis = System.currentTimeMillis();
        final long sleepTimeMillis = nextIntervalStartTime - startTimeMillis;
        nextIntervalStartTime += intervalMillis;
        try {
            if (sleepTimeMillis > 0) {
                Thread.sleep(sleepTimeMillis);
            }
        } catch (Exception e) {
            Debug.debugException(e);
        }

        final long endTime = System.nanoTime();
        final long intervalDuration = endTime - lastEndTime;

        final long numSearches;
        final long numEntries;
        final long numErrors;
        final long totalDuration;
        if (warmUp && (remainingWarmUpIntervals > 0)) {
            numSearches = queryCounter.getAndSet(0L);
            numEntries = resourceCounter.getAndSet(0L);
            numErrors = errorCounter.getAndSet(0L);
            totalDuration = queryDurations.getAndSet(0L);
        } else {
            numSearches = queryCounter.get();
            numEntries = resourceCounter.get();
            numErrors = errorCounter.get();
            totalDuration = queryDurations.get();
        }

        final long recentNumSearches = numSearches - lastNumSearches;
        final long recentNumEntries = numEntries - lastNumEntries;
        final long recentNumErrors = numErrors - lastNumErrors;
        final long recentDuration = totalDuration - lastDuration;

        final double numSeconds = intervalDuration / 1000000000.0d;
        final double recentSearchRate = recentNumSearches / numSeconds;
        final double recentErrorRate = recentNumErrors / numSeconds;

        final double recentAvgDuration;
        final double recentEntriesPerSearch;
        if (recentNumSearches > 0L) {
            recentEntriesPerSearch = 1.0d * recentNumEntries / recentNumSearches;
            recentAvgDuration = 1.0d * recentDuration / recentNumSearches / 1000000;
        } else {
            recentEntriesPerSearch = 0.0d;
            recentAvgDuration = 0.0d;
        }

        if (warmUp && (remainingWarmUpIntervals > 0)) {
            out(formatter.formatRow(recentSearchRate, recentAvgDuration, recentEntriesPerSearch,
                    recentErrorRate, "warming up", "warming up"));

            remainingWarmUpIntervals--;
            if (remainingWarmUpIntervals == 0) {
                out(INFO_QUERY_TOOL_WARM_UP_COMPLETED.get());
                setOverallStartTime = true;
            }
        } else {
            if (setOverallStartTime) {
                overallStartTime = lastEndTime;
                setOverallStartTime = false;
            }

            final double numOverallSeconds = (endTime - overallStartTime) / 1000000000.0d;
            final double overallSearchRate = numSearches / numOverallSeconds;

            final double overallAvgDuration;
            if (numSearches > 0L) {
                overallAvgDuration = 1.0d * totalDuration / numSearches / 1000000;
            } else {
                overallAvgDuration = 0.0d;
            }

            out(formatter.formatRow(recentSearchRate, recentAvgDuration, recentEntriesPerSearch,
                    recentErrorRate, overallSearchRate, overallAvgDuration));

            lastNumSearches = numSearches;
            lastNumEntries = numEntries;
            lastNumErrors = numErrors;
            lastDuration = totalDuration;
        }

        lastEndTime = endTime;
    }

    // Stop all of the threads.
    ResultCode resultCode = ResultCode.SUCCESS;
    for (final QueryRateThread t : threads) {
        t.signalShutdown();
    }

    // Interrupt any blocked threads after a grace period.
    final WakeableSleeper sleeper = new WakeableSleeper();
    sleeper.sleep(1000);
    mgr.shutdown();

    for (final QueryRateThread t : threads) {
        final ResultCode r = t.waitForShutdown();
        if (resultCode == ResultCode.SUCCESS) {
            resultCode = r;
        }
    }

    return resultCode;
}