Example usage for java.lang Long MAX_VALUE

List of usage examples for java.lang Long MAX_VALUE

Introduction

In this page you can find the example usage for java.lang Long MAX_VALUE.

Prototype

long MAX_VALUE

To view the source code for java.lang Long MAX_VALUE.

Click Source Link

Document

A constant holding the maximum value a long can have, 263-1.

Usage

From source file:com.antsdb.saltedfish.sql.FishMetaUtilMain.java

private void listTables() {
    GTableReadOnly gtable = humpback.getTable(-TableId.SYSTABLE.ordinal());
    if (gtable == null) {
        println("error: SYSTABLE is not found");
        return;/*from w  w w.  j  a v a 2 s.co m*/
    }
    for (RowIterator i = gtable.scan(0, Long.MAX_VALUE);;) {
        if (!i.next()) {
            break;
        }
        SysTableRow row = new SysTableRow(i.getRow());
        println(row.toString());
    }
}

From source file:info.archinnov.achilles.it.TestDSLEntityWithClusterings.java

@Test
public void should_dsl_select_one() throws Exception {
    //Given//ww  w .  j  a va 2  s  .c om
    final long id = RandomUtils.nextLong(0L, Long.MAX_VALUE);
    final UUID uuid = UUIDs.timeBased();
    final Date date = buildDateKey();

    scriptExecutor.executeScriptTemplate("EntityWithClusteringColumns/insert_single_row.cql",
            ImmutableMap.of("id", id, "uuid", uuid));

    //When
    final EntityWithClusteringColumns actual = manager.dsl().select().value().fromBaseTable().where().id_Eq(id)
            .uuid_Eq(uuid).date_Eq(date).getOne();

    //Then
    assertThat(actual).isNotNull();
    assertThat(actual.getValue()).isEqualTo("val");
}

From source file:com.github.bluetiger9.nosql.benchmarking.benchmarks.GenericPerformanceBenchmark.java

private void exportOperationResults(PrintWriter writer, String operation, File outputFolder) {
    long sum = 0;
    long min = Long.MAX_VALUE;
    long max = -1;
    long count = 0;
    for (BenchmarkTask task : benchmarkTasks) {
        for (TimeMeasurment tm : task.getLatencies().get(operation)) {
            long latency = tm.getLatency();
            count++;/*www.j a  v a  2s  .c  o  m*/
            sum += latency;
            if (latency < min)
                min = latency;
            if (latency > max)
                max = latency;
        }
    }

    if (count != 0) {
        writer.println("# " + operation + " latencies");
        writer.println(String.format("%s.ops=%d", operation, count));
        writer.println(String.format("%s.avg=%.4f ms", operation, sum / 1000000.0 / count));
        writer.println(String.format("%s.min=%.4f ms", operation, min / 1000000.0));
        writer.println(String.format("%s.max=%.4f ms", operation, max / 1000000.0));
        writer.println();

        try {
            final File tsFile = new File(outputFolder, operation + ".txt");
            for (BenchmarkTask task : benchmarkTasks) {
                ExportTools.exportTimeSeries(task.getLatencies().get(operation), tsFile);
            }
        } catch (IOException e) {
            logger.error(e);
        }
    }
}

From source file:lennard.PiRecordReader.java

public PiRecordReader(Configuration job, FileSplit split) throws IOException {
    this.maxLineLength = job.getInt("mapred.linerecordreader.maxlength", Integer.MAX_VALUE);
    start = split.getStart();//from w w  w .j  a v  a  2  s. co  m
    end = start + split.getLength();
    final Path file = split.getPath();
    compressionCodecs = new CompressionCodecFactory(job);
    final CompressionCodec codec = compressionCodecs.getCodec(file);

    // open the file and seek to the start of the split
    FileSystem fs = file.getFileSystem(job);
    FSDataInputStream fileIn = fs.open(split.getPath());
    boolean skipFirstLine = false;

    if (codec != null) {
        in = new LineReader(codec.createInputStream(fileIn), job);
        end = Long.MAX_VALUE;
    } else {
        if (start != 0) {
            skipFirstLine = true;
            --start;
            fileIn.seek(start);
        }
        in = new LineReader(fileIn, job);
    }
    if (skipFirstLine) { // skip first line and re-establish "start".
        start += in.readLine(new Text(), 0, (int) Math.min((long) Integer.MAX_VALUE, end - start));
    }
    this.pos = start;
}

From source file:com.google.cloud.bigtable.mapreduce.Export.java

private static Scan getConfiguredScanForJob(Configuration conf, String[] args) throws IOException {
    Scan s = new Scan();
    // Optional arguments.
    // Set Scan Versions
    int versions = args.length > 2 ? Integer.parseInt(args[2]) : 1;
    s.setMaxVersions(versions);// www . j av  a2s. c  o  m
    // Set Scan Range
    long startTime = args.length > 3 ? Long.parseLong(args[3]) : 0L;
    long endTime = args.length > 4 ? Long.parseLong(args[4]) : Long.MAX_VALUE;
    s.setTimeRange(startTime, endTime);
    // Set cache blocks
    s.setCacheBlocks(false);
    // set Start and Stop row
    if (conf.get(TableInputFormat.SCAN_ROW_START) != null) {
        s.setStartRow(Bytes.toBytes(conf.get(TableInputFormat.SCAN_ROW_START)));
    }
    if (conf.get(TableInputFormat.SCAN_ROW_STOP) != null) {
        s.setStopRow(Bytes.toBytes(conf.get(TableInputFormat.SCAN_ROW_STOP)));
    }
    // Set Scan Column Family
    boolean raw = Boolean.parseBoolean(conf.get(RAW_SCAN));
    if (raw) {
        s.setRaw(raw);
    }

    if (conf.get(TableInputFormat.SCAN_COLUMN_FAMILY) != null) {
        s.addFamily(Bytes.toBytes(conf.get(TableInputFormat.SCAN_COLUMN_FAMILY)));
    }
    // Set RowFilter or Prefix Filter if applicable.
    Filter exportFilter = getExportFilter(args);
    if (exportFilter != null) {
        LOG.info("Setting Scan Filter for Export.");
        s.setFilter(exportFilter);
    }

    int batching = conf.getInt(EXPORT_BATCHING, -1);
    if (batching != -1) {
        try {
            s.setBatch(batching);
        } catch (IncompatibleFilterException e) {
            LOG.error("Batching could not be set", e);
        }
    }
    LOG.info("versions=" + versions + ", starttime=" + startTime + ", endtime=" + endTime
            + ", keepDeletedCells=" + raw);
    return s;
}

From source file:com.redhat.lightblue.metadata.types.DoubleTypeTest.java

@Test
public void testCastLong() {
    Long laung = Long.MAX_VALUE;
    assertTrue(doubleType.cast(laung) instanceof Double);
}

From source file:hoot.services.models.osm.Changeset.java

/**
 * Creates a new changeset/*  w  w w.ja va 2 s .co m*/
 *
 * @param changesetDoc changeset create XML
 * @param mapId ID of the map owning the changeset
 * @param userId ID of the user creating the changeset
 * @param conn JDBC Connection
 * @return ID of the created changeset
 * @throws Exception
 */
public static long createChangeset(final Document changesetDoc, final long mapId, final long userId,
        Connection dbConn) throws Exception {
    final long changesetId = Changeset.insertNew(mapId, userId, dbConn);
    if (changesetId == Long.MAX_VALUE || changesetId < 1) {
        throw new Exception("Invalid changeset ID: " + changesetId);
    }

    (new Changeset(mapId, changesetId, dbConn)).insertTags(mapId,
            XPathAPI.selectNodeList(changesetDoc, "//changeset/tag"), dbConn);

    return changesetId;
}

From source file:uk.codingbadgers.bootstrap.download.Sha1Download.java

@Override
public void download() {
    CloseableHttpClient client = HttpClients.createDefault();
    String hash = null;//from   w  w w.  j  a  v a  2 s  . co  m

    // Get sha1 hash from repo
    try {
        HttpGet request = new HttpGet(remote + ".sha1");
        HttpResponse response = client.execute(request);

        StatusLine status = response.getStatusLine();
        if (status.getStatusCode() == HttpStatus.SC_OK) {
            HttpEntity entity = response.getEntity();
            hash = EntityUtils.toString(entity);
            EntityUtils.consume(entity);
        }
    } catch (IOException ex) {
        throw new BootstrapException(ex);
    }

    if (local.exists()) {
        String localHash = ChecksumGenerator.createSha1(local);

        if (hash != null && hash.equalsIgnoreCase(localHash)) {
            System.out.println("File " + local.getName() + " is up to date with remote, no need to download");
            return;
        }
    }

    if (!local.getParentFile().exists()) {
        local.getParentFile().mkdirs();
    }

    // Download library from remote
    try {
        HttpGet request = new HttpGet(remote);
        HttpResponse response = client.execute(request);

        StatusLine status = response.getStatusLine();
        if (status.getStatusCode() == HttpStatus.SC_OK) {
            System.err.println("Downloading " + local.getName());

            HttpEntity entity = response.getEntity();

            ReadableByteChannel rbc = Channels.newChannel(entity.getContent());
            FileOutputStream fos = new FileOutputStream(local);
            fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE);

            EntityUtils.consume(entity);

            String localHash = ChecksumGenerator.createSha1(local);
            if (hash != null && !localHash.equalsIgnoreCase(hash)) {
                throw new BootstrapException("Error downloading file (" + local.getName()
                        + ")\n[expected hash: " + localHash + " but got " + hash + "]");
            }

            System.out.println("Downloaded " + local.getName());

        } else {
            throw new BootstrapException("Error download update for " + local.getName() + ", Error: "
                    + status.getStatusCode() + status.getReasonPhrase());
        }
    } catch (IOException ex) {
        throw new BootstrapException(ex);
    }
}

From source file:com.zjy.mongo.input.MongoInputSplit.java

@Override
public long getLength() {
    return Long.MAX_VALUE;
}

From source file:com.haulmont.chile.core.datatypes.impl.LongDatatype.java

protected boolean hasValidLongRange(Number result) throws ParseException {
    if (result instanceof Double) {
        Double doubleResult = (Double) result;
        if (doubleResult > Long.MAX_VALUE || doubleResult < Long.MIN_VALUE) {
            return false;
        }//from w  ww  .  j  a  va2  s . com
    }
    return true;
}