Example usage for java.lang Long MIN_VALUE

List of usage examples for java.lang Long MIN_VALUE

Introduction

In this page you can find the example usage for java.lang Long MIN_VALUE.

Prototype

long MIN_VALUE

To view the source code for java.lang Long MIN_VALUE.

Click Source Link

Document

A constant holding the minimum value a long can have, -263.

Usage

From source file:com.facebook.tsdb.tsdash.server.model.Metric.java

public void alignAllTimeSeries() {
    long cycle = guessTimeCycle();
    // wrap the time stamps to a multiple of cycle
    for (ArrayList<DataPoint> points : timeSeries.values()) {
        for (DataPoint p : points) {
            p.ts -= p.ts % cycle;//  w  w w.  jav  a2  s  . c  om
        }
    }
    // do the actual aligning
    TreeMap<TagsArray, ArrayList<DataPoint>> aligned = new TreeMap<TagsArray, ArrayList<DataPoint>>(
            Tag.arrayComparator());
    for (TagsArray header : timeSeries.keySet()) {
        aligned.put(header, TimeSeries.align(timeSeries.get(header), cycle));
    }
    // align the time series between each other
    long maxmin = Long.MIN_VALUE;
    long minmax = Long.MAX_VALUE;
    long maxmax = Long.MIN_VALUE;
    for (ArrayList<DataPoint> points : aligned.values()) {
        if (points.size() == 0) {
            logger.error("We have found an empty timeseries");
            continue;
        }
        DataPoint first = points.get(0);
        if (points.size() > 0 && points.get(0).ts > maxmin) {
            maxmin = first.ts;
        }
        DataPoint last = points.get(points.size() - 1);
        if (last.ts < minmax) {
            minmax = last.ts;
        }
        if (last.ts > maxmax) {
            maxmax = last.ts;
        }
    }
    if (maxmax - minmax > DATA_MISSING_THOLD) {
        // we've just detected missing data from this set of time series
        logger.error("Missing data detected");

        // add padding to maxmax
        for (ArrayList<DataPoint> points : aligned.values()) {
            if (points.size() == 0) {
                continue;
            }
            long max = points.get(points.size() - 1).ts;
            for (long ts = max + cycle; ts <= maxmax; ts += cycle) {
                points.add(new DataPoint(ts, 0.0));
            }
        }
    } else {
        // cut off the tail
        for (ArrayList<DataPoint> points : aligned.values()) {
            while (points.size() > 0 && points.get(points.size() - 1).ts > minmax) {
                points.remove(points.size() - 1);
            }
        }
    }
    // cut off the head
    for (ArrayList<DataPoint> points : aligned.values()) {
        while (points.size() > 0 && points.get(0).ts < maxmin) {
            points.remove(0);
        }
    }
    this.timeSeries = aligned;
}

From source file:com.tilab.fiware.metaware.service.DatasetServiceTest.java

/**
 * Test of deleteDataset method, of class DatasetService.
 *
 * @throws com.fasterxml.jackson.core.JsonProcessingException
 *//*w  w w  .j a v a2  s  .  c  o m*/
@Test
public void testDeleteDataset() throws JsonProcessingException {
    System.out.println("deleteDataset");
    DatasetService instance = INSTANCE.getDatasetService();
    Dataset dataset = new Dataset("dataset test name", "dataset test description", "test", Long.MIN_VALUE,
            Long.MIN_VALUE, null, null, "private", true, new DatasetStructure());
    dataset.setPermissions(Arrays.asList(perm1));
    dataset.setOwner(userId2);
    String id = instance.createDataset(dataset);
    instance.deleteDataset(id);
}

From source file:jetbrains.exodus.env.EnvironmentImpl.java

@Override
public void executeTransactionSafeTask(@NotNull final Runnable task) {
    final long newestTxnRoot = getNewestTxnRootAddress();
    if (newestTxnRoot == Long.MIN_VALUE) {
        task.run();/*  ww w  . j  av  a 2  s. c o  m*/
    } else {
        synchronized (txnSafeTasks) {
            txnSafeTasks.addLast(new RunnableWithTxnRoot(task, newestTxnRoot));
        }
    }
}

From source file:net.ymate.platform.configuration.provider.impl.JConfigProvider.java

public long getLong(String key) {
    return getLong(key, Long.MIN_VALUE);
}

From source file:io.warp10.continuum.egress.EgressFetchHandler.java

@Override
public void handle(String target, Request baseRequest, HttpServletRequest req, HttpServletResponse resp)
        throws IOException, ServletException {
    boolean fromArchive = false;
    boolean splitFetch = false;
    boolean writeTimestamp = false;

    if (Constants.API_ENDPOINT_FETCH.equals(target)) {
        baseRequest.setHandled(true);//www .  jav  a  2  s .  c  o  m
        fromArchive = false;
    } else if (Constants.API_ENDPOINT_AFETCH.equals(target)) {
        baseRequest.setHandled(true);
        fromArchive = true;
    } else if (Constants.API_ENDPOINT_SFETCH.equals(target)) {
        baseRequest.setHandled(true);
        splitFetch = true;
    } else if (Constants.API_ENDPOINT_CHECK.equals(target)) {
        baseRequest.setHandled(true);
        resp.setStatus(HttpServletResponse.SC_OK);
        return;
    } else {
        return;
    }

    try {
        // Labels for Sensision
        Map<String, String> labels = new HashMap<String, String>();

        labels.put(SensisionConstants.SENSISION_LABEL_TYPE, target);

        //
        // Add CORS header
        //

        resp.setHeader("Access-Control-Allow-Origin", "*");

        String start = null;
        String stop = null;

        long now = Long.MIN_VALUE;
        long timespan = 0L;

        String nowParam = null;
        String timespanParam = null;
        String dedupParam = null;
        String showErrorsParam = null;

        if (splitFetch) {
            nowParam = req.getHeader(Constants.getHeader(Configuration.HTTP_HEADER_NOW_HEADERX));
            timespanParam = req.getHeader(Constants.getHeader(Configuration.HTTP_HEADER_TIMESPAN_HEADERX));
            showErrorsParam = req.getHeader(Constants.getHeader(Configuration.HTTP_HEADER_SHOW_ERRORS_HEADERX));
        } else {
            start = req.getParameter(Constants.HTTP_PARAM_START);
            stop = req.getParameter(Constants.HTTP_PARAM_STOP);

            nowParam = req.getParameter(Constants.HTTP_PARAM_NOW);
            timespanParam = req.getParameter(Constants.HTTP_PARAM_TIMESPAN);
            dedupParam = req.getParameter(Constants.HTTP_PARAM_DEDUP);
            showErrorsParam = req.getParameter(Constants.HTTP_PARAM_SHOW_ERRORS);
        }

        String maxDecoderLenParam = req.getParameter(Constants.HTTP_PARAM_MAXSIZE);
        int maxDecoderLen = null != maxDecoderLenParam ? Integer.parseInt(maxDecoderLenParam)
                : Constants.DEFAULT_PACKED_MAXSIZE;

        String suffix = req.getParameter(Constants.HTTP_PARAM_SUFFIX);
        if (null == suffix) {
            suffix = Constants.DEFAULT_PACKED_CLASS_SUFFIX;
        }

        boolean unpack = null != req.getParameter(Constants.HTTP_PARAM_UNPACK);

        long chunksize = Long.MAX_VALUE;

        if (null != req.getParameter(Constants.HTTP_PARAM_CHUNKSIZE)) {
            chunksize = Long.parseLong(req.getParameter(Constants.HTTP_PARAM_CHUNKSIZE));
        }

        if (chunksize <= 0) {
            throw new IOException("Invalid chunksize.");
        }

        boolean showErrors = null != showErrorsParam;
        boolean dedup = null != dedupParam && "true".equals(dedupParam);

        if (null != start && null != stop) {
            long tsstart = fmt.parseDateTime(start).getMillis() * Constants.TIME_UNITS_PER_MS;
            long tsstop = fmt.parseDateTime(stop).getMillis() * Constants.TIME_UNITS_PER_MS;

            if (tsstart < tsstop) {
                now = tsstop;
                timespan = tsstop - tsstart;
            } else {
                now = tsstart;
                timespan = tsstart - tsstop;
            }
        } else if (null != nowParam && null != timespanParam) {
            if ("now".equals(nowParam)) {
                now = TimeSource.getTime();
            } else {
                try {
                    now = Long.parseLong(nowParam);
                } catch (Exception e) {
                    now = fmt.parseDateTime(nowParam).getMillis() * Constants.TIME_UNITS_PER_MS;
                }
            }

            timespan = Long.parseLong(timespanParam);
        }

        if (Long.MIN_VALUE == now) {
            resp.sendError(HttpServletResponse.SC_BAD_REQUEST,
                    "Missing now/timespan or start/stop parameters.");
            return;
        }

        String selector = splitFetch ? null : req.getParameter(Constants.HTTP_PARAM_SELECTOR);

        //
        // Extract token from header
        //

        String token = req.getHeader(Constants.getHeader(Configuration.HTTP_HEADER_TOKENX));

        // If token was not found in header, extract it from the 'token' parameter
        if (null == token && !splitFetch) {
            token = req.getParameter(Constants.HTTP_PARAM_TOKEN);
        }

        String fetchSig = req.getHeader(Constants.getHeader(Configuration.HTTP_HEADER_FETCH_SIGNATURE));

        //
        // Check token signature if it was provided
        //

        boolean signed = false;

        if (splitFetch) {
            // Force showErrors
            showErrors = true;
            signed = true;
        }

        if (null != fetchSig) {
            if (null != fetchPSK) {
                String[] subelts = fetchSig.split(":");
                if (2 != subelts.length) {
                    throw new IOException("Invalid fetch signature.");
                }
                long nowts = System.currentTimeMillis();
                long sigts = new BigInteger(subelts[0], 16).longValue();
                long sighash = new BigInteger(subelts[1], 16).longValue();

                if (nowts - sigts > 10000L) {
                    throw new IOException("Fetch signature has expired.");
                }

                // Recompute hash of ts:token

                String tstoken = Long.toString(sigts) + ":" + token;

                long checkedhash = SipHashInline.hash24(fetchPSK, tstoken.getBytes(Charsets.ISO_8859_1));

                if (checkedhash != sighash) {
                    throw new IOException("Corrupted fetch signature");
                }

                signed = true;
            } else {
                throw new IOException("Fetch PreSharedKey is not set.");
            }
        }

        ReadToken rtoken = null;

        String format = splitFetch ? "wrapper" : req.getParameter(Constants.HTTP_PARAM_FORMAT);

        if (!splitFetch) {
            try {
                rtoken = Tokens.extractReadToken(token);

                if (rtoken.getHooksSize() > 0) {
                    throw new IOException("Tokens with hooks cannot be used for fetching data.");
                }
            } catch (WarpScriptException ee) {
                throw new IOException(ee);
            }

            if (null == rtoken) {
                resp.sendError(HttpServletResponse.SC_FORBIDDEN, "Missing token.");
                return;
            }
        }

        boolean showAttr = "true".equals(req.getParameter(Constants.HTTP_PARAM_SHOWATTR));

        boolean sortMeta = "true".equals(req.getParameter(Constants.HTTP_PARAM_SORTMETA));

        //
        // Extract the class and labels selectors
        // The class selector and label selectors are supposed to have
        // values which use percent encoding, i.e. explicit percent encoding which
        // might have been re-encoded using percent encoding when passed as parameter
        //
        //

        Set<Metadata> metadatas = new HashSet<Metadata>();
        List<Iterator<Metadata>> iterators = new ArrayList<Iterator<Metadata>>();

        if (!splitFetch) {

            if (null == selector) {
                throw new IOException("Missing '" + Constants.HTTP_PARAM_SELECTOR + "' parameter.");
            }

            String[] selectors = selector.split("\\s+");

            for (String sel : selectors) {
                Matcher m = SELECTOR_RE.matcher(sel);

                if (!m.matches()) {
                    resp.sendError(HttpServletResponse.SC_BAD_REQUEST);
                    return;
                }

                String classSelector = URLDecoder.decode(m.group(1), "UTF-8");
                String labelsSelection = m.group(2);

                Map<String, String> labelsSelectors;

                try {
                    labelsSelectors = GTSHelper.parseLabelsSelectors(labelsSelection);
                } catch (ParseException pe) {
                    throw new IOException(pe);
                }

                //
                // Force 'producer'/'owner'/'app' from token
                //

                labelsSelectors.remove(Constants.PRODUCER_LABEL);
                labelsSelectors.remove(Constants.OWNER_LABEL);
                labelsSelectors.remove(Constants.APPLICATION_LABEL);

                labelsSelectors.putAll(Tokens.labelSelectorsFromReadToken(rtoken));

                List<Metadata> metas = null;

                List<String> clsSels = new ArrayList<String>();
                List<Map<String, String>> lblsSels = new ArrayList<Map<String, String>>();

                clsSels.add(classSelector);
                lblsSels.add(labelsSelectors);

                try {
                    metas = directoryClient.find(clsSels, lblsSels);
                    metadatas.addAll(metas);
                } catch (Exception e) {
                    //
                    // If metadatas is not empty, create an iterator for it, then clear it
                    //
                    if (!metadatas.isEmpty()) {
                        iterators.add(metadatas.iterator());
                        metadatas.clear();
                    }
                    iterators.add(directoryClient.iterator(clsSels, lblsSels));
                }
            }
        } else {
            //
            // Add an iterator which reads splits from the request body
            //

            boolean gzipped = false;

            if (null != req.getHeader("Content-Type")
                    && "application/gzip".equals(req.getHeader("Content-Type"))) {
                gzipped = true;
            }

            BufferedReader br = null;

            if (gzipped) {
                GZIPInputStream is = new GZIPInputStream(req.getInputStream());
                br = new BufferedReader(new InputStreamReader(is));
            } else {
                br = req.getReader();
            }

            final BufferedReader fbr = br;

            MetadataIterator iterator = new MetadataIterator() {

                private List<Metadata> metadatas = new ArrayList<Metadata>();

                private boolean done = false;

                private String lasttoken = "";

                @Override
                public void close() throws Exception {
                    fbr.close();
                }

                @Override
                public Metadata next() {
                    if (!metadatas.isEmpty()) {
                        Metadata meta = metadatas.get(metadatas.size() - 1);
                        metadatas.remove(metadatas.size() - 1);
                        return meta;
                    } else {
                        if (hasNext()) {
                            return next();
                        } else {
                            throw new NoSuchElementException();
                        }
                    }
                }

                @Override
                public boolean hasNext() {
                    if (!metadatas.isEmpty()) {
                        return true;
                    }

                    if (done) {
                        return false;
                    }

                    String line = null;

                    try {
                        line = fbr.readLine();
                    } catch (IOException ioe) {
                        throw new RuntimeException(ioe);
                    }

                    if (null == line) {
                        done = true;
                        return false;
                    }

                    //
                    // Decode/Unwrap/Deserialize the split
                    //

                    byte[] data = OrderPreservingBase64.decode(line.getBytes(Charsets.US_ASCII));
                    if (null != fetchAES) {
                        data = CryptoUtils.unwrap(fetchAES, data);
                    }

                    if (null == data) {
                        throw new RuntimeException("Invalid wrapped content.");
                    }

                    TDeserializer deserializer = new TDeserializer(new TCompactProtocol.Factory());

                    GTSSplit split = new GTSSplit();

                    try {
                        deserializer.deserialize(split, data);
                    } catch (TException te) {
                        throw new RuntimeException(te);
                    }

                    //
                    // Check the expiry
                    //

                    long instant = System.currentTimeMillis();

                    if (instant - split.getTimestamp() > maxSplitAge || instant > split.getExpiry()) {
                        throw new RuntimeException("Split has expired.");
                    }

                    this.metadatas.addAll(split.getMetadatas());

                    // We assume there was at least one metadata instance in the split!!!
                    return true;
                }
            };

            iterators.add(iterator);
        }

        List<Metadata> metas = new ArrayList<Metadata>();
        metas.addAll(metadatas);

        if (!metas.isEmpty()) {
            iterators.add(metas.iterator());
        }

        //
        // Loop over the iterators, storing the read metadata to a temporary file encrypted on disk
        // Data is encrypted using a onetime pad
        //

        final byte[] onetimepad = new byte[(int) Math.min(65537, System.currentTimeMillis() % 100000)];
        new Random().nextBytes(onetimepad);

        final File cache = File.createTempFile(
                Long.toHexString(System.currentTimeMillis()) + "-" + Long.toHexString(System.nanoTime()),
                ".dircache");
        cache.deleteOnExit();

        FileWriter writer = new FileWriter(cache);

        TSerializer serializer = new TSerializer(new TCompactProtocol.Factory());

        int padidx = 0;

        for (Iterator<Metadata> itermeta : iterators) {
            try {
                while (itermeta.hasNext()) {
                    Metadata metadata = itermeta.next();

                    try {
                        byte[] bytes = serializer.serialize(metadata);
                        // Apply onetimepad
                        for (int i = 0; i < bytes.length; i++) {
                            bytes[i] = (byte) (bytes[i] ^ onetimepad[padidx++]);
                            if (padidx >= onetimepad.length) {
                                padidx = 0;
                            }
                        }
                        OrderPreservingBase64.encodeToWriter(bytes, writer);
                        writer.write('\n');
                    } catch (TException te) {
                    }
                }

                if (!itermeta.hasNext() && (itermeta instanceof MetadataIterator)) {
                    try {
                        ((MetadataIterator) itermeta).close();
                    } catch (Exception e) {
                    }
                }
            } catch (Throwable t) {
                throw t;
            } finally {
                if (itermeta instanceof MetadataIterator) {
                    try {
                        ((MetadataIterator) itermeta).close();
                    } catch (Exception e) {
                    }
                }
            }
        }

        writer.close();

        //
        // Create an iterator based on the cache
        //

        MetadataIterator cacheiterator = new MetadataIterator() {

            BufferedReader reader = new BufferedReader(new FileReader(cache));

            private Metadata current = null;
            private boolean done = false;

            private TDeserializer deserializer = new TDeserializer(new TCompactProtocol.Factory());

            int padidx = 0;

            @Override
            public boolean hasNext() {
                if (done) {
                    return false;
                }

                if (null != current) {
                    return true;
                }

                try {
                    String line = reader.readLine();
                    if (null == line) {
                        done = true;
                        return false;
                    }
                    byte[] raw = OrderPreservingBase64.decode(line.getBytes(Charsets.US_ASCII));
                    // Apply one time pad
                    for (int i = 0; i < raw.length; i++) {
                        raw[i] = (byte) (raw[i] ^ onetimepad[padidx++]);
                        if (padidx >= onetimepad.length) {
                            padidx = 0;
                        }
                    }
                    Metadata metadata = new Metadata();
                    try {
                        deserializer.deserialize(metadata, raw);
                        this.current = metadata;
                        return true;
                    } catch (TException te) {
                        LOG.error("", te);
                    }
                } catch (IOException ioe) {
                    LOG.error("", ioe);
                }

                return false;
            }

            @Override
            public Metadata next() {
                if (null != this.current) {
                    Metadata metadata = this.current;
                    this.current = null;
                    return metadata;
                } else {
                    throw new NoSuchElementException();
                }
            }

            @Override
            public void close() throws Exception {
                this.reader.close();
                cache.delete();
            }
        };

        iterators.clear();
        iterators.add(cacheiterator);

        metas = new ArrayList<Metadata>();

        PrintWriter pw = resp.getWriter();

        AtomicReference<Metadata> lastMeta = new AtomicReference<Metadata>(null);
        AtomicLong lastCount = new AtomicLong(0L);

        long fetchtimespan = timespan;

        for (Iterator<Metadata> itermeta : iterators) {
            while (itermeta.hasNext()) {
                metas.add(itermeta.next());

                //
                // Access the data store every 'FETCH_BATCHSIZE' GTS or at the end of each iterator
                //

                if (metas.size() > FETCH_BATCHSIZE || !itermeta.hasNext()) {
                    try (GTSDecoderIterator iterrsc = storeClient.fetch(rtoken, metas, now, fetchtimespan,
                            fromArchive, writeTimestamp)) {
                        GTSDecoderIterator iter = iterrsc;

                        if (unpack) {
                            iter = new UnpackingGTSDecoderIterator(iter, suffix);
                            timespan = Long.MIN_VALUE + 1;
                        }

                        if ("text".equals(format)) {
                            textDump(pw, iter, now, timespan, false, dedup, signed, showAttr, lastMeta,
                                    lastCount, sortMeta);
                        } else if ("fulltext".equals(format)) {
                            textDump(pw, iter, now, timespan, true, dedup, signed, showAttr, lastMeta,
                                    lastCount, sortMeta);
                        } else if ("raw".equals(format)) {
                            rawDump(pw, iter, dedup, signed, timespan, lastMeta, lastCount, sortMeta);
                        } else if ("wrapper".equals(format)) {
                            wrapperDump(pw, iter, dedup, signed, fetchPSK, timespan, lastMeta, lastCount);
                        } else if ("json".equals(format)) {
                            jsonDump(pw, iter, now, timespan, dedup, signed, lastMeta, lastCount);
                        } else if ("tsv".equals(format)) {
                            tsvDump(pw, iter, now, timespan, false, dedup, signed, lastMeta, lastCount,
                                    sortMeta);
                        } else if ("fulltsv".equals(format)) {
                            tsvDump(pw, iter, now, timespan, true, dedup, signed, lastMeta, lastCount,
                                    sortMeta);
                        } else if ("pack".equals(format)) {
                            packedDump(pw, iter, now, timespan, dedup, signed, lastMeta, lastCount,
                                    maxDecoderLen, suffix, chunksize, sortMeta);
                        } else if ("null".equals(format)) {
                            nullDump(iter);
                        } else {
                            textDump(pw, iter, now, timespan, false, dedup, signed, showAttr, lastMeta,
                                    lastCount, sortMeta);
                        }
                    } catch (Throwable t) {
                        LOG.error("", t);
                        Sensision.update(SensisionConstants.CLASS_WARP_FETCH_ERRORS, Sensision.EMPTY_LABELS, 1);
                        if (showErrors) {
                            pw.println();
                            StringWriter sw = new StringWriter();
                            PrintWriter pw2 = new PrintWriter(sw);
                            t.printStackTrace(pw2);
                            pw2.close();
                            sw.flush();
                            String error = URLEncoder.encode(sw.toString(), "UTF-8");
                            pw.println(Constants.EGRESS_FETCH_ERROR_PREFIX + error);
                        }
                        throw new IOException(t);
                    } finally {
                        if (!itermeta.hasNext() && (itermeta instanceof MetadataIterator)) {
                            try {
                                ((MetadataIterator) itermeta).close();
                            } catch (Exception e) {
                            }
                        }
                    }

                    //
                    // Reset 'metas'
                    //

                    metas.clear();
                }
            }

            if (!itermeta.hasNext() && (itermeta instanceof MetadataIterator)) {
                try {
                    ((MetadataIterator) itermeta).close();
                } catch (Exception e) {
                }
            }
        }

        Sensision.update(SensisionConstants.SENSISION_CLASS_CONTINUUM_FETCH_REQUESTS, labels, 1);
    } catch (Exception e) {
        if (!resp.isCommitted()) {
            resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e.getMessage());
            return;
        }
    }
}

From source file:org.apache.hadoop.hbase.mapreduce.WALInputFormat.java

/**
 * implementation shared with deprecated HLogInputFormat
 *//*  w  w w  .j a va 2 s  . co  m*/
List<InputSplit> getSplits(final JobContext context, final String startKey, final String endKey)
        throws IOException, InterruptedException {
    Configuration conf = context.getConfiguration();
    Path inputDir = new Path(conf.get("mapreduce.input.fileinputformat.inputdir"));

    long startTime = conf.getLong(startKey, Long.MIN_VALUE);
    long endTime = conf.getLong(endKey, Long.MAX_VALUE);

    FileSystem fs = inputDir.getFileSystem(conf);
    List<FileStatus> files = getFiles(fs, inputDir, startTime, endTime);

    List<InputSplit> splits = new ArrayList<InputSplit>(files.size());
    for (FileStatus file : files) {
        splits.add(new WALSplit(file.getPath().toString(), file.getLen(), startTime, endTime));
    }
    return splits;
}

From source file:com.netbase.insightapi.clientlib.InsightAPIQuery.java

/**
 * Sets the query to be restricted to a specific published date range. The
 * operation removes any existing published date range. If d1 is
 * Long.MIN_VALUE, nothing else is done. If d2 is Long.MAX_VALUE, the
 * endpoint of the range is not set./*from w w  w .  j a  va 2  s.  c o m*/
 * 
 * @param d1
 * @param d2
 */
public void setPublishedDateRange(long d1, long d2) {
    setPublishedDateRange(d1 == Long.MIN_VALUE ? null : new java.util.Date(d1),
            d2 == Long.MAX_VALUE ? null : new java.util.Date(d2));
}

From source file:eu.qualimaster.monitoring.profiling.predictors.Kalman.java

/**
 * This method predicts the value of a time line one or multiple time step(s) ahead of the
 * last (via update) given value.//from  ww w.ja v  a  2  s  .  c  om
 * @param steps Number of times steps to predict.
 * 
 * @return Predictions for the last time step ahead as {@link Double} or Double.MIN_VALUE if the prediction failed.
 */
public double predict(int steps) {
    double prediction = Double.MIN_VALUE;
    if (lastUpdated != Long.MIN_VALUE) {
        try {
            if (steps > 0) {
                // Gap-Handling
                /* 
                 * As long as the time stamp of the last update and the time step to predict 
                 * are more than an allowed gap apart from each other ...
                 */
                long oldLastUpdated = lastUpdated;
                double oldLastUpdate = lastUpdate;
                boolean gap = false;
                while (((System.currentTimeMillis() + (steps - 1) * 1000)
                        - (lastUpdated * 1000)) > allowedGap) {
                    /* 
                     * ... simulate updates using the last prediction.
                     * If an update must be simulated and there is no predicted value 
                     * to use instead of the measurement, 'defaultMeasurenment' value is used for the update.
                     */
                    update(lastUpdated + 1, prediction == Double.MIN_VALUE ? lastUpdate : defaultMeasurement);
                    prediction = predict(0);
                    gap = true;
                }
                // Reset values overwritten by gap handling to make predict-updates non-persistent.
                if (gap) {
                    lastUpdated = oldLastUpdated;
                    lastUpdate = oldLastUpdate;
                }
            }
            filter.predict(controlVector);
            prediction = filter.getStateEstimation()[2];
            predictedSinceUpdate = true;
        } catch (DimensionMismatchException e) {
            LogManager.getLogger(Kalman.class).error(e.getMessage(), e);
            prediction = Double.MIN_VALUE;
        }
    } else {
        System.err.println("Warning: Prediction should only be called after at least one update-call!");
    }
    return prediction;
}

From source file:com.basho.riak.presto.CoverageRecordCursor.java

private OtpErlangTuple buildQuery() //List<RiakColumnHandle> columnHandles,
//TupleDomain tupleDom)
{

    // case where a='b'
    Map<ConnectorColumnHandle, Comparable<?>> fixedValues = tupleDomain.extractFixedValues();
    for (Map.Entry<ConnectorColumnHandle, Comparable<?>> fixedValue : fixedValues.entrySet()) {
        log.debug("> %s (%s)", fixedValue, fixedValue.getClass());
        log.debug(">> %s", fixedValue.getKey());

        checkNotNull(fixedValue.getKey());
        checkArgument(fixedValue.getKey() instanceof ConnectorColumnHandle);
        checkArgument(fixedValue.getKey() instanceof RiakColumnHandle);

        RiakColumnHandle c = (RiakColumnHandle) fixedValue.getKey();

        for (RiakColumnHandle columnHandle : columnHandles) {
            if (c.getColumn().getName().equals(columnHandle.getColumn().getName())
                    && c.getColumn().getType().equals(columnHandle.getColumn().getType())
                    && columnHandle.getColumn().getIndex()) {
                String field = null;
                OtpErlangObject value;//w  ww . j  a  v  a 2s.c  om
                if (columnHandle.getColumn().getType() == BigintType.BIGINT) {
                    field = columnHandle.getColumn().getName() + "_int";
                    Long l = (Long) fixedValue.getValue();
                    value = new OtpErlangLong(l);
                } else if (columnHandle.getColumn().getType() == VarcharType.VARCHAR) {
                    field = columnHandle.getColumn().getName() + "_bin";
                    Slice s = (Slice) fixedValue.getValue();
                    value = new OtpErlangBinary(s.getBytes());
                } else {
                    continue;
                }
                OtpErlangObject[] t = { new OtpErlangAtom("eq"), new OtpErlangBinary(field.getBytes()), value };

                return new OtpErlangTuple(t);
            }
        }
    }

    //case where a < b and ... blah
    Map<RiakColumnHandle, Domain> map = tupleDomain.getDomains();
    for (Map.Entry<RiakColumnHandle, Domain> entry : map.entrySet()) {
        RiakColumnHandle c = entry.getKey();
        for (RiakColumnHandle columnHandle : columnHandles) {
            if (c.getColumn().getName().equals(columnHandle.getColumn().getName())
                    && c.getColumn().getType().equals(columnHandle.getColumn().getType())
                    && columnHandle.getColumn().getIndex()) {
                String field = null;
                OtpErlangObject lhs, rhs;
                Range span = entry.getValue().getRanges().getSpan();
                //log.debug("value:%s, range:%s, span:%s",
                //        entry.getValue(), entry.getValue().getRanges(),span);
                //log.debug("min: %s max:%s", span.getLow(), span.getHigh());
                if (columnHandle.getColumn().getType() == BigintType.BIGINT) {
                    field = columnHandle.getColumn().getName() + "_int";
                    // NOTE: Both Erlang and JSON can express smaller integer than Long.MIN_VALUE
                    Long l = Long.MIN_VALUE;
                    if (!span.getLow().isLowerUnbounded()) {
                        l = (Long) span.getLow().getValue();
                    }
                    // NOTE: Both Erlang and JSON can express greater integer lang Long.MAX_VALUE
                    Long r = Long.MAX_VALUE;
                    if (!span.getHigh().isUpperUnbounded()) {
                        r = (Long) span.getHigh().getValue();
                    }

                    lhs = new OtpErlangLong(l);
                    rhs = new OtpErlangLong(r);
                } else if (columnHandle.getColumn().getType() == VarcharType.VARCHAR) {
                    field = columnHandle.getColumn().getName() + "_bin";
                    //Byte m = Byte.MIN_VALUE;
                    byte[] l = { 0 };
                    if (!span.getLow().isLowerUnbounded()) {
                        l = ((String) span.getLow().getValue()).getBytes();
                    }
                    Byte m2 = Byte.MAX_VALUE;
                    byte[] r = { m2 };
                    if (!span.getHigh().isUpperUnbounded()) {
                        r = ((String) span.getHigh().getValue()).getBytes();
                    }
                    lhs = new OtpErlangBinary(l);
                    rhs = new OtpErlangBinary(r);

                } else {
                    continue;
                }
                OtpErlangObject[] t = { new OtpErlangAtom("range"), new OtpErlangBinary(field.getBytes()), lhs,
                        rhs };
                return new OtpErlangTuple(t);
            }
        }
    }
    return null;
}

From source file:com.tilab.fiware.metaware.service.DataSourceServiceTest.java

/**
 * Test of deleteDataSource method, of class DataSourceService.
 *//*from  www  . ja v a 2 s .c o  m*/
@Test
public void testDeleteDataSource() {
    System.out.println("deleteDataSource");
    DataSource datasource = new DataSource("datasource test", "this is just a test", "test type",
            Long.MIN_VALUE, Long.MIN_VALUE, null, null, "test status", "test subtype",
            "jdbc:mysql://localhost/test", "testUsername", "superSecret", "query", "SELECT * FROM TEST");
    DataSourceService instance = new DataSourceService();
    datasource.setPermissions(Arrays.asList(perm1));
    datasource.setOwner(userId2);
    DataSource expResult = datasource;
    String datasId = instance.createDataSource(datasource);
    instance.deleteDataSource(datasId);
}