Example usage for java.lang Long MAX_VALUE

List of usage examples for java.lang Long MAX_VALUE

Introduction

In this page you can find the example usage for java.lang Long MAX_VALUE.

Prototype

long MAX_VALUE

To view the source code for java.lang Long MAX_VALUE.

Click Source Link

Document

A constant holding the maximum value a long can have, 263-1.

Usage

From source file:cz.jirutka.spring.http.client.cache.DefaultCachingPolicy.java

/**
 * Creates a new instance of {@code DefaultCachingPolicy} with defined
 * size limit of responses that should be stored in the cache.
 *
 * <p>A private cache will not, for example, cache responses to requests
 * with Authorization headers or responses marked with <tt>Cache-Control:
 * private</tt>. If, however, the cache is only going to be used by one
 * logical "user" (behaving similarly to a browser cache), then you will
 * want to turn off the shared cache setting.</p>
 *
 * @param maxBodySizeBytes The maximum content length.
 * @param sharedCache Whether to behave as a shared cache (true) or a
 *                    non-shared/private cache (false).
 *///  w  ww.j  a  v a  2  s  . c  o m
public DefaultCachingPolicy(boolean sharedCache, long maxBodySizeBytes) {
    this.sharedCache = sharedCache;
    this.maxBodySizeBytes = maxBodySizeBytes > 0 ? maxBodySizeBytes : Long.MAX_VALUE;
}

From source file:com.streamsets.pipeline.stage.destination.hdfs.writer.RecordWriter.java

private RecordWriter(Path path, long timeToLiveMillis, DataGeneratorFactory generatorFactory) {
    this.expires = (timeToLiveMillis == Long.MAX_VALUE) ? timeToLiveMillis
            : System.currentTimeMillis() + timeToLiveMillis;
    this.path = path;
    this.generatorFactory = generatorFactory;
    LOG.debug("Path[{}] - Creating", path);
    this.idleTimeout = -1L;
}

From source file:dbseer.comp.live.LiveTransactionProcessor.java

@Override
public void run() {
    try {//from ww w  . ja v a2 s  .  c  o  m
        this.transactionCountWriter = new PrintWriter(new FileWriter(this.transactionCountFile, true));
        this.avgLatencyWriter = new PrintWriter(new FileWriter(this.avgLatencyFile, true));
    } catch (IOException e) {
        e.printStackTrace();
    }

    long time;
    // wait for transactions to come in
    while (true) {
        time = map.getMinEndTime();
        if (time != Long.MAX_VALUE) {
            break;
        } else {
            try {
                Thread.sleep(250);
            } catch (InterruptedException e) {
                if (!terminate) {
                    e.printStackTrace();
                } else {
                    return;
                }
            }
        }
        if (terminate) {
            break;
        }
    }

    String gap = "   ";
    double totalCount = 0;
    double currentCount = 0;
    double[] count = new double[DBSeerConstants.MAX_NUM_TABLE];
    double[] latencySum = new double[DBSeerConstants.MAX_NUM_TABLE];
    int maxClusterId = 0;
    long transCount = 0;

    // start processing transactions
    while (true) {
        long maxTime, maxClusterEndTime;
        maxTime = map.getMaxEndTime();
        if (!StreamClustering.getDBSCAN().isInitialized() && transCount < DBSeerConstants.DBSCAN_INIT_PTS) {
            transCount = map.getCount();
            monitor.setGlobalTransactionCount(transCount);
            try {
                Thread.sleep(250);
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
        }
        //         synchronized (StreamClustering.LOCK)
        try {
            StreamClustering.LOCK.lockInterruptibly();
            {
                maxClusterEndTime = StreamClustering.getDBSCAN().getMaxEndTime();
            }
            StreamClustering.LOCK.unlock();
            while (time < maxTime && time < maxClusterEndTime) {
                currentCount = 0;
                Set<Transaction> transactions = map.pollTransactions(time);

                // if no transactions for the time, skip to the next timestamp.
                if (transactions.isEmpty()) {
                    ++time;
                    continue;
                }

                // if sys log not available for the time, also skip to the next timestamp;
                if (map.getMinSysLogTime() != Long.MAX_VALUE && map.getMinSysLogTime() > time) {
                    ++time;
                    continue;
                }

                boolean monitorLogFound = true;
                String monitorLog;
                while ((monitorLog = map.getSysLog(time)) == null) {
                    if (time < map.getLastSysLogTime()) {
                        monitorLogFound = false;
                        break;
                    }
                    try {
                        Thread.sleep(100);
                    } catch (InterruptedException e) {
                        if (!terminate) {
                            e.printStackTrace();
                        } else {
                            return;
                        }
                    }
                }

                if (!monitorLogFound) {
                    ++time;
                    continue;
                }

                monitorWriter.println(monitorLog);
                monitorWriter.flush();

                for (Transaction t : transactions) {
                    Cluster c = t.getCluster();
                    // if cluster is null, skip
                    if (c == null) {
                        continue;
                    }

                    int cId = c.getId();
                    long latency = t.getLatency();

                    // ignore outliers
                    if (cId >= 0) {
                        latencySum[cId] += latency;
                        ++count[cId];
                        ++totalCount;
                        ++currentCount;

                        ArrayList<Double> latencyList = latencyMap.get(cId);
                        if (latencyList == null) {
                            latencyList = new ArrayList<Double>();
                            latencyMap.put(cId, latencyList);
                        }
                        latencyList.add((double) latency / 1000.0);
                    }
                    if (cId > maxClusterId) {
                        maxClusterId = cId;
                    }
                }

                // update live monitor
                //               int numTrans = maxClusterId + 1;
                int numTrans = StreamClustering.getDBSCAN().getAllClusters().size();
                synchronized (LiveMonitorInfo.LOCK) {
                    monitor.setCurrentTimestamp(time);
                    monitor.setNumTransactionTypes(numTrans);
                    monitor.setGlobalTransactionCount(totalCount);
                    for (int i = 0; i < numTrans; ++i) {
                        monitor.setCurrentTPS(i, count[i]);
                        if (count[i] == 0) {
                            monitor.setCurrentAverageLatency(i, 0.0);
                        } else {
                            monitor.setCurrentAverageLatency(i, latencySum[i] / count[i]);
                        }
                    }
                }

                transactionCountWriter.print(gap);
                avgLatencyWriter.print(gap);

                transactionCountWriter.printf("%.16e", (double) time);
                avgLatencyWriter.printf("%.16e", (double) time);

                for (int i = 0; i < numTrans; ++i) {
                    transactionCountWriter.print(gap);
                    transactionCountWriter.printf("%.16e", count[i]);
                    avgLatencyWriter.print(gap);
                    if (count[i] == 0.0) {
                        avgLatencyWriter.printf("%.16e", 0.0);
                    } else {
                        avgLatencyWriter.printf("%.16e", (latencySum[i] / (double) count[i] / 1000.0));
                    }
                    count[i] = 0;
                    latencySum[i] = 0;

                    // write percentile
                    PrintWriter writer = percentileLatencyWriter.get(i);
                    ArrayList<Double> latencyList = latencyMap.get(i);
                    if (latencyList == null) {
                        latencyList = new ArrayList<Double>();
                        latencyMap.put(i, latencyList);
                    }
                    if (writer == null) {
                        try {
                            writer = new PrintWriter(new FileOutputStream(String.format("%s%03d",
                                    DBSeerGUI.userSettings.getDBSeerRootPath() + File.separator
                                            + DBSeerConstants.LIVE_DATASET_PATH + File.separator
                                            + "prctile_latency_",
                                    i), true));
                        } catch (FileNotFoundException e) {
                            e.printStackTrace();
                        }
                        percentileLatencyWriter.put(i, writer);
                    }

                    double[] latencies = Doubles.toArray(latencyList);
                    writer.printf("%d,", time);
                    for (double p : percentiles) {
                        Percentile percentile = new Percentile(p);
                        percentile.setData(latencies);
                        double val = percentile.evaluate();
                        if (Double.isNaN(val))
                            val = 0.0;
                        writer.printf("%f,", val);
                    }
                    writer.println();
                    writer.flush();
                }

                transactionCountWriter.println();
                avgLatencyWriter.println();
                transactionCountWriter.flush();
                avgLatencyWriter.flush();

                //            System.out.print((maxClusterId + 1) + ": ");
                //            for (int i = 0; i <= maxClusterId; ++i)
                //            {
                //               System.out.print(count[i] + ", ");
                //               count[i] = 0;
                //            }
                //            System.out.println();
                //            ArrayList<Cluster> clusters = (ArrayList<Cluster>)StreamClustering.getDBSCAN().getCurrentClusters();
                //            for (int i = 0; i < clusters.size(); ++i)
                //            {
                //               Cluster c1 = clusters.get(i);
                //               for (int j = 0; j < clusters.size(); ++j)
                //               {
                //                  Cluster c2 = clusters.get(j);
                //                  System.out.print(c1.getClusterDistance(c2) + " ");
                //               }
                //               System.out.println();
                //            }
                //            System.out.println("----");
                // is it correct to set it here?
                DBSeerGUI.isLiveDataReady = true;

                ++time;
            }

            if (terminate) {
                break;
            }

            Thread.sleep(100);
        } catch (InterruptedException e) {
            if (!terminate) {
                e.printStackTrace();
            } else {
                return;
            }
        }
    }
}

From source file:io.hops.erasure_coding.TestBlockReconstructor.java

@Test(timeout = 30000)
public void testSourceBlockRepair() throws IOException, InterruptedException {
    DistributedFileSystem dfs = (DistributedFileSystem) getFileSystem();
    TestDfsClient testDfsClient = new TestDfsClient(getConfig());
    testDfsClient.injectIntoDfs(dfs);//  ww  w  .j a  va  2 s.  c om
    FileStatus testFileStatus = dfs.getFileStatus(testFile);

    String path = testFileStatus.getPath().toUri().getPath();
    int blockToLoose = new Random(seed)
            .nextInt((int) (testFileStatus.getLen() / testFileStatus.getBlockSize()));
    LocatedBlock lb = dfs.getClient().getLocatedBlocks(path, 0, Long.MAX_VALUE).get(blockToLoose);
    DataNodeUtil.loseBlock(getCluster(), lb);
    List<LocatedBlock> lostBlocks = new ArrayList<LocatedBlock>();
    lostBlocks.add(lb);
    LocatedBlocks locatedBlocks = new LocatedBlocks(0, false, lostBlocks, null, true);
    testDfsClient.setMissingLocatedBlocks(locatedBlocks);

    LocatedBlocks missingBlocks = new LocatedBlocks(testFileStatus.getLen(), false,
            new ArrayList<LocatedBlock>(), null, true);
    missingBlocks.getLocatedBlocks().add(lb);
    BlockReconstructor blockReconstructor = new BlockReconstructor(conf);
    Decoder decoder = new Decoder(conf, Util.getCodec(Util.Codecs.SRC));
    blockReconstructor.processFile(testFile, testParityFile, missingBlocks, decoder, null);

    // Block is recovered to the same data node so no need to wait for the block report
    try {
        FSDataInputStream in = dfs.open(testFile);
        byte[] buff = new byte[TEST_BLOCK_COUNT * DFS_TEST_BLOCK_SIZE];
        in.readFully(0, buff);
    } catch (BlockMissingException e) {
        LOG.error("Reading failed", e);
        Assert.fail("Repair failed. Missing a block.");
    }
}

From source file:com.gtwm.pb.model.manageData.WordCloud.java

/**
 * @param textLowerCase/*from w  w w .  ja  v  a 2 s  .com*/
 *            Input text, must be lower case
 * @param minWeight
 *            Minimum tag weight, e.g. a font size
 * @param maxWeight
 *            Max. tag weight
 * @param maxTags
 *            Maximum number of tags to return, -1 for all tags
 * @param additionalStopWords
 *            Set of words to specifically exclude, in addition to the
 *            standard set [and, not, after, yes, no, ...]
 */
public WordCloud(String textLowerCase, int minWeight, int maxWeight, int maxTags,
        Set<String> additionalStopWords) {
    String[] wordArray = textLowerCase.split("\\W");
    Set<String> stopWords = new HashSet<String>(Arrays.asList(stopWordsArray));
    for (String additionalStopWord : additionalStopWords) {
        stopWords.add(additionalStopWord.toLowerCase().trim());
    }
    LancasterStemmer stemmer = new LancasterStemmer();
    String wordStem;
    Frequency frequencies = new Frequency();
    for (String wordString : wordArray) {
        if ((!stopWords.contains(wordString)) && (wordString.length() >= minWordLength)) {
            wordStem = stemmer.stripSuffixes(wordString);
            // Record the mapping of the stem to its origin so the most
            // common origin can be re-introduced when the cloud is
            // generated
            this.recordStemOrigin(wordString, wordStem);
            frequencies.addValue(wordStem);
        }
    }
    // Compute std. dev of frequencies so we can remove outliers
    DescriptiveStatistics stats = new DescriptiveStatistics();
    Iterator freqIt = frequencies.valuesIterator();
    long stemFreq;
    while (freqIt.hasNext()) {
        stemFreq = frequencies.getCount(freqIt.next());
        stats.addValue(stemFreq);
    }
    double mean = stats.getMean();
    double stdDev = stats.getStandardDeviation();
    long minFreq = Long.MAX_VALUE;
    long maxFreq = 0;
    // Remove outliers
    freqIt = frequencies.valuesIterator();
    int upperLimit = (int) (mean + (stdDev * 10));
    int lowerLimit = (int) (mean - stdDev);
    if (lowerLimit < 2) {
        lowerLimit = 2;
    }
    int numWords = 0;
    int numRawWords = wordArray.length;
    boolean removeLowOutliers = (numRawWords > (maxTags * 10));
    while (freqIt.hasNext()) {
        wordStem = (String) freqIt.next();
        stemFreq = frequencies.getCount(wordStem);
        // For a large input set, remove high and low outliers.
        // For a smaller set, just high freq. outliers
        if ((stemFreq > upperLimit) || ((stemFreq < lowerLimit) && removeLowOutliers)) {
            freqIt.remove();
        } else {
            numWords++;
            if (stemFreq > maxFreq) {
                maxFreq = stemFreq;
            } else if (stemFreq < minFreq) {
                minFreq = stemFreq;
            }
        }
    }
    // Cut down to exact required number of tags by removing smallest
    if (lowerLimit < minFreq) {
        lowerLimit = (int) minFreq;
    }
    if (numWords > maxTags) {
        while (numWords > maxTags) {
            freqIt = frequencies.valuesIterator();
            SMALLREMOVAL: while (freqIt.hasNext()) {
                stemFreq = frequencies.getCount(freqIt.next());
                if (stemFreq < lowerLimit) {
                    freqIt.remove();
                    numWords--;
                    if (numWords == maxTags) {
                        break SMALLREMOVAL;
                    }
                }
            }
            int step = (int) ((mean - lowerLimit) / 3);
            if (step < 1) {
                step = 1;
            }
            lowerLimit += step;
        }
        // The new min. freq. may have changed
        minFreq = Long.MAX_VALUE;
        freqIt = frequencies.valuesIterator();
        while (freqIt.hasNext()) {
            stemFreq = frequencies.getCount(freqIt.next());
            if (stemFreq < minFreq) {
                minFreq = stemFreq;
            }
        }
    }
    // Scale and create tag objects
    double scaleFactor;
    if (maxFreq == minFreq) {
        scaleFactor = (double) (maxWeight - minWeight) / 4; // TODO: a realistic
        // scale factor in this
        // case
    } else {
        scaleFactor = (double) (maxWeight - minWeight) / (maxFreq - minFreq);
    }
    freqIt = frequencies.valuesIterator();
    int weight;
    while (freqIt.hasNext()) {
        wordStem = (String) freqIt.next();
        stemFreq = frequencies.getCount(wordStem);
        // Might still be some left less than the min. threshold
        if (stemFreq <= minFreq) {
            weight = minWeight;
        } else {
            weight = (int) (Math.ceil((double) (stemFreq - minFreq) * scaleFactor) + minWeight);
        }
        SortedSet<WordInfo> origins = this.stemOriginMap.get(wordStem);
        String mostCommonOrigin = origins.last().getName();
        Set<String> synonyms = new TreeSet<String>();
        for (WordInfo origin : origins) {
            synonyms.add(origin.getName());
        }
        WordInfo word = new Word(mostCommonOrigin, weight, synonyms);
        this.words.add(word);
    }
}

From source file:jfabrix101.lib.helper.NetworkHelper.java

/**
 * Download content from a URL in binary format.
 *  //from w  ww . j  ava2  s.c  o m
 * @param url - URL from which to download 
 * @return - The array of byte of content
 * @throws Exception - Exception if something went wrong.
 */
public static byte[] getBinaryData(String url) throws Exception {
    return getBinaryData(url, Long.MAX_VALUE);
}

From source file:info.archinnov.achilles.it.bugs.TestEntityWithCaseSensitivePKIT.java

@Test
public void should_insert_and_delete() throws Exception {
    //Given//  w w w.  ja v a 2s  .c om
    Long id = RandomUtils.nextLong(0, Long.MAX_VALUE);
    Long clust = RandomUtils.nextLong(0, Long.MAX_VALUE);

    //When
    manager.crud().insert(new EntityWithCaseSensitivePK(id, clust)).execute();
    manager.crud().deleteById(id, clust).execute();

    //Then
    final EntityWithCaseSensitivePK found = manager.crud().findById(id, clust).get();
    assertThat(found).isNull();
}

From source file:de.brands4friends.daleq.integration.tests.FieldTypeTest.java

@Test
public void insert_maxLong_into_AllTypes() {
    assertInsertValueInAllFields(Long.MAX_VALUE);
}

From source file:com.healthcit.cacure.dao.FormDao.java

public boolean areAllModuleFormsApproved(Long moduleId) {
    long approvedCount = 0;
    long totalCount = Long.MAX_VALUE;

    // Get the all forms count
    String totalJpql = "select count(distinct frm) from QuestionnaireForm frm "
            + "where frm.module.id = :moduleId";
    Query totalQuery = em.createQuery(totalJpql);
    totalQuery.setParameter("moduleId", moduleId);
    totalCount = (Long) totalQuery.getSingleResult();

    // Get the returned forms count
    String approvedJpql = "select count(distinct frm) from QuestionnaireForm frm "
            + "where frm.module.id = :moduleId and frm.status = :status";
    Query approvedQuery = em.createQuery(approvedJpql);
    approvedQuery.setParameter("moduleId", moduleId);
    approvedQuery.setParameter("status", FormStatus.APPROVED);
    approvedCount = (Long) approvedQuery.getSingleResult();

    // If there are no forms there, they cannot be all approved
    if (totalCount == 0) {
        return false;
    }/*ww w.  j a  v a  2  s. c  o m*/

    boolean allFormsApproved = totalCount == approvedCount;
    return allFormsApproved;
}

From source file:info.archinnov.achilles.it.TestEntityWithStaticCounterColumn.java

@Test
public void should_dsl_update_static() throws Exception {
    //Given//from  w  w w .j a v a 2  s.  co m
    final long id = RandomUtils.nextLong(0L, Long.MAX_VALUE);
    final long staticCount = RandomUtils.nextLong(0L, Long.MAX_VALUE);

    //When
    manager.dsl().updateStatic().fromBaseTable().staticCount_Incr(staticCount).where().id_Eq(id).execute();

    //Then
    final Row actual = session.execute("SELECT static_count FROM entity_static_counter WHERE id = " + id).one();

    assertThat(actual).isNotNull();
    assertThat(actual.getLong("static_count")).isEqualTo(staticCount);
}