Example usage for java.lang Long MIN_VALUE

List of usage examples for java.lang Long MIN_VALUE

Introduction

In this page you can find the example usage for java.lang Long MIN_VALUE.

Prototype

long MIN_VALUE

To view the source code for java.lang Long MIN_VALUE.

Click Source Link

Document

A constant holding the minimum value a long can have, -263.

Usage

From source file:com.antsdb.saltedfish.nosql.MemTableReadOnly.java

/**
 * /*w  ww.ja va 2  s . c om*/
 * @return Long.MIN_VALUE when there is pending data
 */
public long getStartTrxId() {
    long startTrxId = Long.MIN_VALUE;
    for (MemTabletReadOnly tablet : this.tablets) {
        long tabletStartTrxId = tablet.getStartTrxId();
        if (tabletStartTrxId == 0) {
            continue;
        }
        startTrxId = Math.max(startTrxId, tabletStartTrxId);
    }
    return startTrxId;
}

From source file:com.palantir.atlasdb.keyvalue.impl.InMemoryKeyValueService.java

private <T> ClosableIterator<RowResult<T>> getRangeInternal(String tableName, final RangeRequest range,
        final ResultProducer<T> resultProducer) {
    ConcurrentNavigableMap<Key, byte[]> tableMap = getTableMap(tableName).entries;
    if (range.isReverse()) {
        tableMap = tableMap.descendingMap();
    }//from  w  w  w  .j  a  v  a2  s  .co  m
    if (range.getStartInclusive().length != 0) {
        if (range.isReverse()) {
            Cell startCell = Cells.createLargestCellForRow(range.getStartInclusive());
            tableMap = tableMap.tailMap(new Key(startCell, Long.MIN_VALUE));
        } else {
            Cell startCell = Cells.createSmallestCellForRow(range.getStartInclusive());
            tableMap = tableMap.tailMap(new Key(startCell, Long.MIN_VALUE));
        }
    }
    if (range.getEndExclusive().length != 0) {
        if (range.isReverse()) {
            Cell endCell = Cells.createLargestCellForRow(range.getEndExclusive());
            tableMap = tableMap.headMap(new Key(endCell, Long.MAX_VALUE));
        } else {
            Cell endCell = Cells.createSmallestCellForRow(range.getEndExclusive());
            tableMap = tableMap.headMap(new Key(endCell, Long.MAX_VALUE));
        }
    }
    final PeekingIterator<Entry<Key, byte[]>> it = Iterators.peekingIterator(tableMap.entrySet().iterator());
    return ClosableIterators.wrap(new AbstractIterator<RowResult<T>>() {
        @Override
        protected RowResult<T> computeNext() {
            while (true) {
                if (!it.hasNext()) {
                    return endOfData();
                }
                ImmutableSortedMap.Builder<byte[], T> result = ImmutableSortedMap
                        .orderedBy(UnsignedBytes.lexicographicalComparator());
                Key key = it.peek().getKey();
                byte[] row = key.row;
                Iterator<Entry<Key, byte[]>> cellIter = takeCell(it, key);
                collectValueForTimestamp(key.col, cellIter, result, range, resultProducer);

                while (it.hasNext()) {
                    if (!it.peek().getKey().matchesRow(row)) {
                        break;
                    }
                    key = it.peek().getKey();
                    cellIter = takeCell(it, key);
                    collectValueForTimestamp(key.col, cellIter, result, range, resultProducer);
                }
                SortedMap<byte[], T> columns = result.build();
                if (!columns.isEmpty()) {
                    return RowResult.create(row, columns);
                }
            }
        }

    });
}

From source file:org.apache.flink.hdfstests.ContinuousFileProcessingMigrationTest.java

/**
 * Manually run this to write binary snapshot data. Remove @Ignore to run.
 *//*  w w  w  .jav a 2 s  .  c  o m*/
@Ignore
@Test
public void writeMonitoringSourceSnapshot() throws Exception {

    File testFolder = tempFolder.newFolder();

    long fileModTime = Long.MIN_VALUE;
    for (int i = 0; i < 1; i++) {
        Tuple2<File, String> file = createFileAndFillWithData(testFolder, "file", i, "This is test line.");
        fileModTime = file.f0.lastModified();
    }

    TextInputFormat format = new TextInputFormat(new Path(testFolder.getAbsolutePath()));

    final ContinuousFileMonitoringFunction<String> monitoringFunction = new ContinuousFileMonitoringFunction<>(
            format, FileProcessingMode.PROCESS_CONTINUOUSLY, 1, INTERVAL);

    StreamSource<TimestampedFileInputSplit, ContinuousFileMonitoringFunction<String>> src = new StreamSource<>(
            monitoringFunction);

    final AbstractStreamOperatorTestHarness<TimestampedFileInputSplit> testHarness = new AbstractStreamOperatorTestHarness<>(
            src, 1, 1, 0);

    testHarness.open();

    final Throwable[] error = new Throwable[1];

    final OneShotLatch latch = new OneShotLatch();

    // run the source asynchronously
    Thread runner = new Thread() {
        @Override
        public void run() {
            try {
                monitoringFunction.run(new DummySourceContext() {
                    @Override
                    public void collect(TimestampedFileInputSplit element) {
                        latch.trigger();
                    }

                    @Override
                    public void markAsTemporarilyIdle() {

                    }
                });
            } catch (Throwable t) {
                t.printStackTrace();
                error[0] = t;
            }
        }
    };
    runner.start();

    if (!latch.isTriggered()) {
        latch.await();
    }

    final OperatorSubtaskState snapshot;
    synchronized (testHarness.getCheckpointLock()) {
        snapshot = testHarness.snapshot(0L, 0L);
    }

    OperatorSnapshotUtil.writeStateHandle(snapshot, "src/test/resources/monitoring-function-migration-test-"
            + fileModTime + "-flink" + flinkGenerateSavepointVersion + "-snapshot");

    monitoringFunction.cancel();
    runner.join();

    testHarness.close();
}

From source file:hoot.services.controllers.osm.MapResource.java

private static Document generateExtentOSM(String maxlon, String maxlat, String minlon, String minlat) {
    SimpleDateFormat sdfDate = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'");
    Date now = new Date();
    String strDate = sdfDate.format(now);

    try {//w ww  .j av  a2s  .  c o  m
        DocumentBuilderFactory dbf = XmlDocumentBuilder.getSecureDocBuilderFactory();
        dbf.setValidating(false);
        DocumentBuilder db = dbf.newDocumentBuilder();
        Document doc = db.newDocument();

        Element osmElem = doc.createElement("osm");
        osmElem.setAttribute("version", "0.6");
        osmElem.setAttribute("generator", "hootenanny");
        doc.appendChild(osmElem);

        Element boundsElem = doc.createElement("bounds");
        boundsElem.setAttribute("minlat", minlat);
        boundsElem.setAttribute("minlon", minlon);
        boundsElem.setAttribute("maxlat", maxlat);
        boundsElem.setAttribute("maxlon", maxlon);
        osmElem.appendChild(boundsElem);

        // The ID's for these fabricated nodes were stepping on the ID's of actual nodes, so their ID's need to be
        // made negative and large, so they have no chance of stepping on anything.

        long node1Id = Long.MIN_VALUE + 3;
        long node2Id = Long.MIN_VALUE + 2;
        long node3Id = Long.MIN_VALUE + 1;
        long node4Id = Long.MIN_VALUE;

        Element nodeElem = doc.createElement("node");
        nodeElem.setAttribute("id", String.valueOf(node1Id));
        nodeElem.setAttribute("timestamp", strDate);
        nodeElem.setAttribute("user", "hootenannyuser");
        nodeElem.setAttribute("visible", "true");
        nodeElem.setAttribute("version", "1");
        nodeElem.setAttribute("lat", maxlat);
        nodeElem.setAttribute("lon", minlon);
        osmElem.appendChild(nodeElem);

        nodeElem = doc.createElement("node");
        nodeElem.setAttribute("id", String.valueOf(node2Id));
        nodeElem.setAttribute("timestamp", strDate);
        nodeElem.setAttribute("user", "hootenannyuser");
        nodeElem.setAttribute("visible", "true");
        nodeElem.setAttribute("version", "1");
        nodeElem.setAttribute("lat", maxlat);
        nodeElem.setAttribute("lon", maxlon);
        osmElem.appendChild(nodeElem);

        nodeElem = doc.createElement("node");
        nodeElem.setAttribute("id", String.valueOf(node3Id));
        nodeElem.setAttribute("timestamp", strDate);
        nodeElem.setAttribute("user", "hootenannyuser");
        nodeElem.setAttribute("visible", "true");
        nodeElem.setAttribute("version", "1");
        nodeElem.setAttribute("lat", minlat);
        nodeElem.setAttribute("lon", maxlon);
        osmElem.appendChild(nodeElem);

        nodeElem = doc.createElement("node");
        nodeElem.setAttribute("id", String.valueOf(node4Id));
        nodeElem.setAttribute("timestamp", strDate);
        nodeElem.setAttribute("user", "hootenannyuser");
        nodeElem.setAttribute("visible", "true");
        nodeElem.setAttribute("version", "1");
        nodeElem.setAttribute("lat", minlat);
        nodeElem.setAttribute("lon", minlon);
        osmElem.appendChild(nodeElem);

        Element wayElem = doc.createElement("way");
        wayElem.setAttribute("id", String.valueOf(Long.MIN_VALUE));
        wayElem.setAttribute("timestamp", strDate);
        wayElem.setAttribute("user", "hootenannyuser");
        wayElem.setAttribute("visible", "true");
        wayElem.setAttribute("version", "1");

        Element ndElem = doc.createElement("nd");
        ndElem.setAttribute("ref", String.valueOf(node1Id));
        wayElem.appendChild(ndElem);

        ndElem = doc.createElement("nd");
        ndElem.setAttribute("ref", String.valueOf(node2Id));
        wayElem.appendChild(ndElem);

        ndElem = doc.createElement("nd");
        ndElem.setAttribute("ref", String.valueOf(node3Id));
        wayElem.appendChild(ndElem);

        ndElem = doc.createElement("nd");
        ndElem.setAttribute("ref", String.valueOf(node4Id));
        wayElem.appendChild(ndElem);

        ndElem = doc.createElement("nd");
        ndElem.setAttribute("ref", String.valueOf(node1Id));
        wayElem.appendChild(ndElem);

        /*
         * ndElem = doc.createElement("tag"); ndElem.setAttribute("k", "area");
         * ndElem.setAttribute("v", "yes"); wayElem.appendChild(ndElem);
         */

        osmElem.appendChild(wayElem);

        Transformer tf = TransformerFactory.newInstance().newTransformer();

        // Fortify may require this, but it doesn't work.
        // TransformerFactory transformerFactory =
        // XmlDocumentBuilder.getSecureTransformerFactory();
        tf.setOutputProperty(OutputKeys.ENCODING, "UTF-8");
        tf.setOutputProperty(OutputKeys.INDENT, "yes");

        try (Writer out = new StringWriter()) {
            tf.transform(new DOMSource(doc), new StreamResult(out));
            logger.debug("Layer Extent OSM: {}", out);
        }

        return doc;
    } catch (Exception e) {
        throw new RuntimeException("Error generating OSM extent", e);
    }
}

From source file:edu.usc.goffish.gopher.sample.stats.N_Hop_Stats.java

private void init() throws IOException {
    //get current iteration
    currentIteration = getIteration();//from  w  ww  . j a  v  a  2s.c om

    /**
     * Init the property filters
     */
    List<Property> properties = new ArrayList<>(1);
    properties.add(subgraph.getEdgeProperties().getProperty(LATENCY_PROP));
    properties.add(subgraph.getEdgeProperties().getProperty(IS_EXIST_PROP));
    properties.add(subgraph.getEdgeProperties().getProperty(HOP_PROP));
    //properties.add(subgraph.getEdgeProperties().getProperty(VANTAGE_IP_PROP));

    /**
     * Load the instance iterator from startTime to Long.MAX
     * Note that they will not get loaded in to the memory
     */
    instanceIterator = subgraph.getInstances(Long.MIN_VALUE, Long.MAX_VALUE, PropertySet.EmptyPropertySet,
            new PropertySet(properties), false).iterator();
    currentInstance = instanceIterator.hasNext() ? instanceIterator.next() : null;

}

From source file:ldbc.snb.datagen.generator.LDBCDatagen.java

public int runGenerateJob(Configuration conf) throws Exception {

    String hadoopPrefix = conf.get("ldbc.snb.datagen.serializer.hadoopDir");
    FileSystem fs = FileSystem.get(conf);
    ArrayList<Float> percentages = new ArrayList<Float>();
    percentages.add(0.45f);/*from   www  .  j a v  a 2s . c  o  m*/
    percentages.add(0.45f);
    percentages.add(0.1f);

    long start = System.currentTimeMillis();
    printProgress("Starting: Person generation");
    long startPerson = System.currentTimeMillis();
    HadoopPersonGenerator personGenerator = new HadoopPersonGenerator(conf);
    personGenerator.run(hadoopPrefix + "/persons", "ldbc.snb.datagen.hadoop.UniversityKeySetter");
    long endPerson = System.currentTimeMillis();

    printProgress("Creating university location correlated edges");
    long startUniversity = System.currentTimeMillis();
    HadoopKnowsGenerator knowsGenerator = new HadoopKnowsGenerator(conf,
            "ldbc.snb.datagen.hadoop.UniversityKeySetter", "ldbc.snb.datagen.hadoop.RandomKeySetter",
            percentages, 0, conf.get("ldbc.snb.datagen.generator.knowsGenerator"));

    knowsGenerator.run(hadoopPrefix + "/persons", hadoopPrefix + "/universityEdges");
    long endUniversity = System.currentTimeMillis();

    printProgress("Creating main interest correlated edges");
    long startInterest = System.currentTimeMillis();

    knowsGenerator = new HadoopKnowsGenerator(conf, "ldbc.snb.datagen.hadoop.InterestKeySetter",
            "ldbc.snb.datagen.hadoop.RandomKeySetter", percentages, 1,
            conf.get("ldbc.snb.datagen.generator.knowsGenerator"));

    knowsGenerator.run(hadoopPrefix + "/persons", hadoopPrefix + "/interestEdges");
    long endInterest = System.currentTimeMillis();

    printProgress("Creating random correlated edges");
    long startRandom = System.currentTimeMillis();

    knowsGenerator = new HadoopKnowsGenerator(conf, "ldbc.snb.datagen.hadoop.RandomKeySetter",
            "ldbc.snb.datagen.hadoop.RandomKeySetter", percentages, 2,
            "ldbc.snb.datagen.generator.RandomKnowsGenerator");

    knowsGenerator.run(hadoopPrefix + "/persons", hadoopPrefix + "/randomEdges");
    long endRandom = System.currentTimeMillis();

    fs.delete(new Path(DatagenParams.hadoopDir + "/persons"), true);
    printProgress("Merging the different edge files");
    ArrayList<String> edgeFileNames = new ArrayList<String>();
    edgeFileNames.add(hadoopPrefix + "/universityEdges");
    edgeFileNames.add(hadoopPrefix + "/interestEdges");
    edgeFileNames.add(hadoopPrefix + "/randomEdges");
    long startMerge = System.currentTimeMillis();
    HadoopMergeFriendshipFiles merger = new HadoopMergeFriendshipFiles(conf,
            "ldbc.snb.datagen.hadoop.RandomKeySetter");
    merger.run(hadoopPrefix + "/mergedPersons", edgeFileNames);
    long endMerge = System.currentTimeMillis();

    printProgress("Serializing persons");
    long startPersonSerializing = System.currentTimeMillis();
    if (!conf.getBoolean("ldbc.snb.datagen.serializer.persons.sort", false)) {
        HadoopPersonSerializer serializer = new HadoopPersonSerializer(conf);
        serializer.run(hadoopPrefix + "/mergedPersons");
    } else {
        HadoopPersonSortAndSerializer serializer = new HadoopPersonSortAndSerializer(conf);
        serializer.run(hadoopPrefix + "/mergedPersons");
    }
    long endPersonSerializing = System.currentTimeMillis();

    long startPersonActivity = System.currentTimeMillis();
    if (conf.getBoolean("ldbc.snb.datagen.generator.activity", true)) {
        printProgress("Generating and serializing person activity");
        HadoopPersonActivityGenerator activityGenerator = new HadoopPersonActivityGenerator(conf);
        activityGenerator.run(hadoopPrefix + "/mergedPersons");

        int numThreads = DatagenParams.numThreads;
        int blockSize = DatagenParams.blockSize;
        int numBlocks = (int) Math.ceil(DatagenParams.numPersons / (double) blockSize);

        for (int i = 0; i < numThreads; ++i) {
            if (i < numBlocks) {
                fs.copyToLocalFile(false, new Path(DatagenParams.hadoopDir + "/m" + i + "personFactors.txt"),
                        new Path("./"));
                fs.copyToLocalFile(false, new Path(DatagenParams.hadoopDir + "/m" + i + "activityFactors.txt"),
                        new Path("./"));
                fs.copyToLocalFile(false, new Path(DatagenParams.hadoopDir + "/m0friendList" + i + ".csv"),
                        new Path("./"));
            }
        }
    }
    long endPersonActivity = System.currentTimeMillis();

    long startSortingUpdateStreams = System.currentTimeMillis();

    if (conf.getBoolean("ldbc.snb.datagen.serializer.updateStreams", false)) {

        printProgress("Sorting update streams ");

        List<String> personStreamsFileNames = new ArrayList<String>();
        List<String> forumStreamsFileNames = new ArrayList<String>();
        for (int i = 0; i < DatagenParams.numThreads; ++i) {
            int numPartitions = conf.getInt("ldbc.snb.datagen.serializer.numUpdatePartitions", 1);
            for (int j = 0; j < numPartitions; ++j) {
                personStreamsFileNames
                        .add(DatagenParams.hadoopDir + "/temp_updateStream_person_" + i + "_" + j);
                if (conf.getBoolean("ldbc.snb.datagen.generator.activity", false)) {
                    forumStreamsFileNames
                            .add(DatagenParams.hadoopDir + "/temp_updateStream_forum_" + i + "_" + j);
                }
            }
        }
        HadoopUpdateStreamSorterAndSerializer updateSorterAndSerializer = new HadoopUpdateStreamSorterAndSerializer(
                conf);
        updateSorterAndSerializer.run(personStreamsFileNames, "person");
        updateSorterAndSerializer.run(forumStreamsFileNames, "forum");
        for (String file : personStreamsFileNames) {
            fs.delete(new Path(file), true);
        }

        for (String file : forumStreamsFileNames) {
            fs.delete(new Path(file), true);
        }

        long minDate = Long.MAX_VALUE;
        long maxDate = Long.MIN_VALUE;
        long count = 0;
        for (int i = 0; i < DatagenParams.numThreads; ++i) {
            Path propertiesFile = new Path(
                    DatagenParams.hadoopDir + "/temp_updateStream_person_" + i + ".properties");
            FSDataInputStream file = fs.open(propertiesFile);
            Properties properties = new Properties();
            properties.load(file);
            long aux;
            aux = Long.parseLong(properties.getProperty("ldbc.snb.interactive.min_write_event_start_time"));
            minDate = aux < minDate ? aux : minDate;
            aux = Long.parseLong(properties.getProperty("ldbc.snb.interactive.max_write_event_start_time"));
            maxDate = aux > maxDate ? aux : maxDate;
            aux = Long.parseLong(properties.getProperty("ldbc.snb.interactive.num_events"));
            count += aux;
            file.close();
            fs.delete(propertiesFile, true);

            if (conf.getBoolean("ldbc.snb.datagen.generator.activity", false)) {
                propertiesFile = new Path(
                        DatagenParams.hadoopDir + "/temp_updateStream_forum_" + i + ".properties");
                file = fs.open(propertiesFile);
                properties = new Properties();
                properties.load(file);
                aux = Long.parseLong(properties.getProperty("ldbc.snb.interactive.min_write_event_start_time"));
                minDate = aux < minDate ? aux : minDate;
                aux = Long.parseLong(properties.getProperty("ldbc.snb.interactive.max_write_event_start_time"));
                maxDate = aux > maxDate ? aux : maxDate;
                aux = Long.parseLong(properties.getProperty("ldbc.snb.interactive.num_events"));
                count += aux;
                file.close();
                fs.delete(propertiesFile, true);
            }
        }

        OutputStream output = fs
                .create(new Path(DatagenParams.socialNetworkDir + "/updateStream" + ".properties"), true);
        output.write(new String("ldbc.snb.interactive.gct_delta_duration:" + DatagenParams.deltaTime + "\n")
                .getBytes());
        output.write(
                new String("ldbc.snb.interactive.min_write_event_start_time:" + minDate + "\n").getBytes());
        output.write(
                new String("ldbc.snb.interactive.max_write_event_start_time:" + maxDate + "\n").getBytes());
        output.write(new String("ldbc.snb.interactive.update_interleave:" + (maxDate - minDate) / count + "\n")
                .getBytes());
        output.write(new String("ldbc.snb.interactive.num_events:" + count).getBytes());
        output.close();
    }

    long endSortingUpdateStreams = System.currentTimeMillis();

    printProgress("Serializing invariant schema ");
    long startInvariantSerializing = System.currentTimeMillis();
    HadoopInvariantSerializer invariantSerializer = new HadoopInvariantSerializer(conf);
    invariantSerializer.run();
    long endInvariantSerializing = System.currentTimeMillis();

    long end = System.currentTimeMillis();

    System.out.println(((end - start) / 1000) + " total seconds");
    System.out.println("Person generation time: " + ((endPerson - startPerson) / 1000));
    System.out.println(
            "University correlated edge generation time: " + ((endUniversity - startUniversity) / 1000));
    System.out.println("Interest correlated edge generation time: " + ((endInterest - startInterest) / 1000));
    System.out.println("Random correlated edge generation time: " + ((endRandom - startRandom) / 1000));
    System.out.println("Edges merge time: " + ((endMerge - startMerge) / 1000));
    System.out
            .println("Person serialization time: " + ((endPersonSerializing - startPersonSerializing) / 1000));
    System.out.println("Person activity generation and serialization time: "
            + ((endPersonActivity - startPersonActivity) / 1000));
    System.out.println(
            "Sorting update streams time: " + ((endSortingUpdateStreams - startSortingUpdateStreams) / 1000));
    System.out.println("Invariant schema serialization time: "
            + ((endInvariantSerializing - startInvariantSerializing) / 1000));
    System.out.println("Total Execution time: " + ((end - start) / 1000));

    if (conf.getBoolean("ldbc.snb.datagen.parametergenerator.parameters", false)
            && conf.getBoolean("ldbc.snb.datagen.generator.activity", false)) {
        System.out.println("Running Parameter Generation");
        System.out.println("Generating Interactive Parameters");
        ProcessBuilder pb = new ProcessBuilder("mkdir", "-p",
                conf.get("ldbc.snb.datagen.serializer.outputDir") + "/substitution_parameters");
        pb.directory(new File("./"));
        Process p = pb.start();
        p.waitFor();

        pb = new ProcessBuilder(conf.get("ldbc.snb.datagen.parametergenerator.python"),
                "paramgenerator/generateparams.py", "./",
                conf.get("ldbc.snb.datagen.serializer.outputDir") + "/substitution_parameters");
        pb.directory(new File("./"));
        File logInteractive = new File("parameters_interactive.log");
        pb.redirectErrorStream(true);
        pb.redirectOutput(ProcessBuilder.Redirect.appendTo(logInteractive));
        p = pb.start();
        p.waitFor();

        System.out.println("Generating BI Parameters");
        pb = new ProcessBuilder(conf.get("ldbc.snb.datagen.parametergenerator.python"),
                "paramgenerator/generateparamsbi.py", "./",
                conf.get("ldbc.snb.datagen.serializer.outputDir") + "/substitution_parameters");
        pb.directory(new File("./"));
        File logBi = new File("parameters_bi.log");
        pb.redirectErrorStream(true);
        pb.redirectOutput(ProcessBuilder.Redirect.appendTo(logBi));
        p = pb.start();
        p.waitFor();
        System.out.println("Finished Parameter Generation");
    }
    return 0;
}

From source file:org.waarp.openr66.protocol.http.rest.handler.HttpRestControlR66Handler.java

protected ArrayNode getDetailedAllow() {
    ArrayNode node = JsonHandler.createArrayNode();

    if (this.methods.contains(METHOD.GET)) {
        InformationJsonPacket node3 = new InformationJsonPacket(Long.MIN_VALUE, false, "remoteHost");
        node3.setComment("Information on Transfer request (GET)");
        ArrayNode node1 = JsonHandler.createArrayNode();
        ObjectNode node1b = JsonHandler.createObjectNode();
        node1b.put(DbTaskRunner.JSON_MODEL, DbTaskRunner.class.getSimpleName());
        DbValue[] values = DbTaskRunner.getAllType();
        for (DbValue dbValue : values) {
            node1b.put(dbValue.column, dbValue.getType());
        }/*from w w  w  .j  ava2 s  .c o m*/
        node1.add(node1b);
        ObjectNode node2;
        try {
            node2 = RestArgument.fillDetailedAllow(METHOD.GET, this.path,
                    ACTIONS_TYPE.GetTransferInformation.name(), node3.createObjectNode(), node1);
            node.add(node2);
        } catch (OpenR66ProtocolPacketException e1) {
        }
    }
    if (this.methods.contains(METHOD.PUT)) {
        RestartTransferJsonPacket node4 = new RestartTransferJsonPacket();
        node4.setRequestUserPacket();
        node4.setComment("Restart Transfer request (PUT)");
        node4.setRequested("Requested host");
        node4.setRequester("Requester host");
        node4.setRestarttime(new Date());
        ArrayNode node1 = JsonHandler.createArrayNode();
        try {
            node1.add(node4.createObjectNode());
            ObjectNode node2 = RestArgument.fillDetailedAllow(METHOD.PUT, this.path,
                    ACTIONS_TYPE.RestartTransfer.name(), node4.createObjectNode(), node1);
            node.add(node2);
        } catch (OpenR66ProtocolPacketException e1) {
        }
        StopOrCancelJsonPacket node5 = new StopOrCancelJsonPacket();
        node5.setRequestUserPacket();
        node5.setComment("Stop Or Cancel request (PUT)");
        node5.setRequested("Requested host");
        node5.setRequester("Requester host");
        node1 = JsonHandler.createArrayNode();
        try {
            node1.add(node5.createObjectNode());
            ObjectNode node2 = RestArgument.fillDetailedAllow(METHOD.PUT, this.path,
                    ACTIONS_TYPE.StopOrCancelTransfer.name(), node5.createObjectNode(), node1);
            node.add(node2);
        } catch (OpenR66ProtocolPacketException e1) {
        }
    }
    if (this.methods.contains(METHOD.POST)) {
        TransferRequestJsonPacket node6 = new TransferRequestJsonPacket();
        node6.setRequestUserPacket();
        node6.setComment("Transfer Request (POST)");
        node6.setFilename("Filename");
        node6.setFileInformation("File information");
        node6.setRequested("Requested host");
        node6.setRulename("Rulename");
        node6.setStart(new Date());
        ArrayNode node1 = JsonHandler.createArrayNode();
        try {
            node1.add(node6.createObjectNode());
            ObjectNode node2 = RestArgument.fillDetailedAllow(METHOD.POST, this.path,
                    ACTIONS_TYPE.CreateTransfer.name(), node6.createObjectNode(), node1);
            node.add(node2);
        } catch (OpenR66ProtocolPacketException e1) {
        }
    }

    ObjectNode node2 = RestArgument.fillDetailedAllow(METHOD.OPTIONS, this.path, COMMAND_TYPE.OPTIONS.name(),
            null, null);
    node.add(node2);

    return node;
}

From source file:br.com.blackhubos.eventozero.updater.github.searcher.GitHubSearcher.java

@SuppressWarnings("unchecked")
private void processJsonObject(JSONObject jobject, MultiTypeFormatter formatter,
        Collection<Version> versionList) {
    /**/*from ww w.  ja  v a2 s  .c  o m*/
     * Variaveis do {@link Version}
     */
    String name = null;
    String version = null;
    Collection<Asset> downloadUrl = new ArrayList<>();
    String commitish = null;
    String changelog = null;
    Date creationDate = null;
    Date publishDate = null;
    long id = Long.MIN_VALUE;
    boolean criticalBug = false;
    boolean preRelease = false;
    List<String> supportedVersions = new ArrayList<>();
    /**
     * /Variaveis do {@link Version}
     */

    for (Map.Entry object : (Set<Map.Entry>) jobject.entrySet()) {

        Object key = object.getKey();
        Object value = object.getValue();
        String stringValue = String.valueOf(value);
        switch (GitHubAPIInput.parseObject(key)) {
        // Tag geralmente  a verso
        case TAG_NAME: {
            version = stringValue;
            break;
        }

        // Data de criao
        case CREATED_AT: {
            creationDate = formatter.format(stringValue, Date.class).get();
            break;
        }

        // Data de publicao
        case PUBLISHED_AT: {
            publishDate = formatter.format(stringValue, Date.class).get();
            break;
        }

        // Assets/Artefatos ou Arquivos (processado externamente)
        case ASSETS: {
            // Array com multiplos artefatos
            JSONArray jsonArray = (JSONArray) value;

            for (Object assetsJsonObject : jsonArray) {
                // Obtem o objeto a partir da array de artefatos
                JSONObject jsonAsset = (JSONObject) assetsJsonObject;
                // Obtm o artefato a partir do objeto
                Optional<Asset> assetOptional = Asset.parseJsonObject(jsonAsset, formatter);
                //  bom evitar um null n :P
                if (assetOptional.isPresent()) {
                    // Adiciona o artefato caso ele seja encontrado
                    downloadUrl.add(assetOptional.get());
                }
            }
            break;
        }

        // Obtem o nome (titulo) da verso
        case NAME: {
            name = stringValue;
            break;
        }

        // Numero de identificao do GitHub (nem sei se vamos usar)
        case ID: {
            id = Long.parseLong(stringValue);
            break;
        }

        // Obtm a mensagem, geralmente nosso changelog, e define se  uma verso de bug critico
        case BODY: {
            changelog = stringValue;
            // Define se  verso de bug critico
            criticalBug = changelog.endsWith("!!!CRITICAL BUG FOUND!!!")
                    || changelog.endsWith("CRITICAL BUG FOUND") || changelog.endsWith("CRITICAL BUG");

            // Regex para obter a linha que diz as verses suportadas
            Pattern supportedPattern = Pattern.compile("^(Verses|Supported)", Pattern.CASE_INSENSITIVE);

            // Faz loop nas linhas
            for (String line : changelog.split("\n")) {
                // Procura o regex na linha
                if (supportedPattern.matcher(line).find()) {
                    // Remove as letras
                    line = line.replaceAll("[^\\d. ]+", "").trim();
                    // Adiciona a lista
                    supportedVersions.addAll(Arrays.asList(line.split(" ")));
                }
            }

            break;
        }

        // Formata a boolean e verifica se ela  uma pre-release (alpha, beta, etc)
        case PRERELEASE: {
            Optional<Boolean> booleanOptional = formatter.format(value, Boolean.class);

            // Evitar um nullinho :D
            if (!booleanOptional.isPresent()) {
                preRelease = false;
                break;
            }

            preRelease = booleanOptional.get();
            break;
        }

        // Commitish geralmente  a branch ou a Commit relacionada a verso
        case TARGET_COMMITISH: {
            commitish = stringValue;
            break;
        }

        default: {
            break;
        }
        }
    }

    // Verifica se o ID  Diferente do valor minimo, isto vai fazer com que ns saibamos se alguma verso foi encontrada ou no :D
    if (id != Long.MIN_VALUE) {
        // Cria uma nova verso e adiciona a lista
        Version versionInstance = new Version(name, version, supportedVersions, downloadUrl, commitish,
                changelog, creationDate, publishDate, id, criticalBug, preRelease);
        versionList.add(versionInstance);
    }
}

From source file:net.myrrix.online.ServerRecommender.java

@Override
public void ingest(Reader reader) throws TasteException {
    // See also InputFilesReader
    BufferedReader buffered = IOUtils.buffer(reader);
    try {//from   w  w  w.  j  av  a  2s. co m

        int lines = 0;
        int badLines = 0;
        String line;
        while ((line = buffered.readLine()) != null) {

            if (badLines > 100) { // Crude check
                throw new IOException("Too many bad lines; aborting");
            }

            lines++;

            if (line.isEmpty() || line.charAt(0) == '#') {
                continue;
            }

            Iterator<String> it = DELIMITER.split(line).iterator();

            long userID = Long.MIN_VALUE;
            String itemTag = null;
            long itemID = Long.MIN_VALUE;
            String userTag = null;
            float value;
            try {

                String userIDString = it.next();
                if (userIDString.startsWith("\"")) {
                    itemTag = userIDString.substring(1, userIDString.length() - 1);
                } else {
                    userID = Long.parseLong(userIDString);
                }

                String itemIDString = it.next();
                if (itemIDString.startsWith("\"")) {
                    userTag = itemIDString.substring(1, itemIDString.length() - 1);
                } else {
                    itemID = Long.parseLong(itemIDString);
                }

                if (it.hasNext()) {
                    String valueToken = it.next();
                    value = valueToken.isEmpty() ? Float.NaN : LangUtils.parseFloat(valueToken);
                } else {
                    value = 1.0f;
                }

            } catch (NoSuchElementException ignored) {
                log.warn("Ignoring line with too few columns: '{}'", line);
                badLines++;
                continue;
            } catch (IllegalArgumentException iae) { // includes NumberFormatException
                if (lines == 1) {
                    log.info("Ignoring header line: '{}'", line);
                } else {
                    log.warn("Ignoring unparseable line: '{}'", line);
                    badLines++;
                }
                continue;
            }

            boolean remove = Float.isNaN(value);

            if (itemTag != null) {

                if (userTag != null) {
                    log.warn("Two tags not allowed: '{}'", line);
                    badLines++;
                    continue;
                }

                if (!remove) {
                    setItemTag(itemTag, itemID, value, true);
                }
                // else ignore? no support for remove tag yet

            } else if (userTag != null) {

                if (!remove) {
                    setUserTag(userID, userTag, value, true);
                }
                // else ignore? no support for remove tag yet

            } else {

                if (remove) {
                    removePreference(userID, itemID, true);
                } else {
                    setPreference(userID, itemID, value, true);
                }

            }

            if (lines % 1000000 == 0) {
                log.info("Finished {} lines", lines);
            }
        }
        generationManager.bulkDone();

    } catch (IOException ioe) {
        throw new TasteException(ioe);
    }
}

From source file:de.innovationgate.webgate.api.rss2.SimpleRSS.java

public Date getCreated() {
    ChannelIF channel = getChannel();/*from   w w  w . ja  va2s . c  om*/
    if (channel != null) {
        return channel.getPubDate();
    } else {
        return new Date(Long.MIN_VALUE);
    }

}