Example usage for java.lang Long MIN_VALUE

List of usage examples for java.lang Long MIN_VALUE

Introduction

In this page you can find the example usage for java.lang Long MIN_VALUE.

Prototype

long MIN_VALUE

To view the source code for java.lang Long MIN_VALUE.

Click Source Link

Document

A constant holding the minimum value a long can have, -263.

Usage

From source file:com.tilab.fiware.metaware.service.AlgorithmServiceTest.java

/**
 * Test of deleteAlgorithm method, of class AlgorithmService.
 *//*from  w w  w. ja  v  a 2s . c  o m*/
@Test
public void testDeleteAlgorithm() {
    System.out.println("deleteAlgorithm");
    AlgorithmService instance = new AlgorithmService();
    Algorithm algorithm = new Algorithm("algorithm test name", "algorithm test description", "test",
            Long.MIN_VALUE, Long.MIN_VALUE, null, null, "private", "model test", "sub-model test",
            "hive query test", Long.MIN_VALUE, 42, "algo.test.com"); // perm list and owner id are inserted after;
    algorithm.setPermissions(Arrays.asList(perm1));
    algorithm.setOwner(userId2);
    String id = instance.createAlgorithm(algorithm);
    instance.deleteAlgorithm(id);
}

From source file:it.doqui.index.ecmengine.business.personalization.multirepository.node.index.FullIndexRecoveryComponent.java

private void performFullRecovery() {
    logger.debug("[FullIndexRecoveryComponent::performFullRecovery] BEGIN");
    try {/*from   w ww .ja v a 2  s.  c o  m*/
        int txnCount = nodeDaoService.getTransactionCount();
        // starting
        String msgStart = I18NUtil.getMessage(MSG_RECOVERY_STARTING, txnCount);
        logger.info(msgStart);
        // count the transactions
        int processedCount = 0;
        long fromTimeInclusive = Long.MIN_VALUE;
        long toTimeExclusive = Long.MAX_VALUE;
        List<Long> lastTxnIds = Collections.<Long>emptyList();
        while (true) {
            List<Transaction> nextTxns = nodeDaoService.getTxnsByCommitTimeAscending(fromTimeInclusive,
                    toTimeExclusive, MAX_TRANSACTIONS_PER_ITERATION, lastTxnIds);

            lastTxnIds = new ArrayList<Long>(nextTxns.size());

            // reindex each transaction
            for (Transaction txn : nextTxns) {
                Long txnId = txn.getId();
                // Keep it to ensure we exclude it from the next iteration
                lastTxnIds.add(txnId);
                // check if we have to terminate
                if (isShuttingDown()) {
                    String msgTerminated = I18NUtil.getMessage(MSG_RECOVERY_TERMINATED);
                    logger.warn(msgTerminated);
                    return;
                }

                // Allow exception to bubble out or not
                if (stopOnError) {
                    reindexTransaction(txnId);
                } else {
                    try {
                        reindexTransaction(txnId);
                    } catch (Throwable e) {
                        String msgError = I18NUtil.getMessage(MSG_RECOVERY_ERROR, txnId, e.getMessage());
                        logger.info(msgError, e);
                    }
                }
                // Although we use the same time as this transaction for the next iteration, we also
                // make use of the exclusion list to ensure that it doesn't get pulled back again.
                fromTimeInclusive = txn.getCommitTimeMs();

                // dump a progress report every 10% of the way
                double before = (double) processedCount / (double) txnCount * 10.0; // 0 - 10 
                processedCount++;
                double after = (double) processedCount / (double) txnCount * 10.0; // 0 - 10
                if (Math.floor(before) < Math.floor(after)) // crossed a 0 - 10 integer boundary
                {
                    int complete = ((int) Math.floor(after)) * 10;
                    String msgProgress = I18NUtil.getMessage(MSG_RECOVERY_PROGRESS, complete);
                    logger.info(msgProgress);
                }
            }

            // have we finished?
            if (nextTxns.size() == 0) {
                // there are no more
                break;
            }

        }
        // done
        String msgDone = I18NUtil.getMessage(MSG_RECOVERY_COMPLETE);
        logger.info(msgDone);
    } finally {
        logger.debug("[FullIndexRecoveryComponent::performFullRecovery] END");
    }
}

From source file:com.linkedin.databus2.relay.GoldenGateEventProducer.java

/**
 *
 * @param pConfig The physical source config for which the event producer is configured.
 * @param schemaRegistryService Schema registry to fetch schemas
 * @param dbusEventBuffer An event buffer to which the producer can write/append events.
 * @param statsCollector Reporting stats
 * @param maxScnReaderWriters To read/write the maxScn from maxScn file
 * @throws DatabusException//from  w  ww.  j  av a2s.co m
 */
public GoldenGateEventProducer(PhysicalSourceStaticConfig pConfig, SchemaRegistryService schemaRegistryService,
        DbusEventBufferAppendable dbusEventBuffer, DbusEventsStatisticsCollector statsCollector,
        MaxSCNReaderWriter maxScnReaderWriters) throws DatabusException {
    super(dbusEventBuffer, maxScnReaderWriters, pConfig, null);
    _pConfig = pConfig;
    _schemaRegistryService = schemaRegistryService;
    _statsCollector = statsCollector;
    _currentState = State.INIT;
    _partitionFunctionHashMap = new HashMap<Integer, PartitionFunction>();
    _eventsLog = Logger.getLogger("com.linkedin.databus2.producers.db.events." + pConfig.getName());

    if (_pConfig != null) {
        long eventRatePerSec = pConfig.getEventRatePerSec();
        long maxThrottleDurationInSecs = pConfig.getMaxThrottleDurationInSecs();

        if ((eventRatePerSec > 0) && (maxThrottleDurationInSecs > 0)) {
            _rc = new RateControl(eventRatePerSec, maxThrottleDurationInSecs);
        } else {
            // Disable rate control
            _rc = new RateControl(Long.MIN_VALUE, Long.MIN_VALUE);
        }
    }

    final String MODULE = GoldenGateEventProducer.class.getName();
    _log = Logger.getLogger(MODULE + "." + getName());

    //Create a hashmap for logical source id ==> PartitionFunction, this will be used as the logical partition Id for the event creation
    // also create a list(map) of MonitoredSourceInfo objects to monitor GGEventProducer progress
    for (int i = 0; i < _pConfig.getSources().length; i++) {
        LogicalSourceStaticConfig logicalSourceStaticConfig = _pConfig.getSources()[i];
        GGMonitoredSourceInfo source = buildGGMonitoredSourceInfo(logicalSourceStaticConfig, _pConfig);
        _monitoredSources.put(source.getSourceId(), source);
    }

    // get one fake global source for total stats
    LogicalSourceStaticConfig logicalSourceStaticConfig = new LogicalSourceStaticConfig(GLOBAL_SOURCE_ID,
            _pConfig.getName(), "", "constant:1", (short) 0, false, null, null, null);
    GGMonitoredSourceInfo source = buildGGMonitoredSourceInfo(logicalSourceStaticConfig, _pConfig);
    _monitoredSources.put(source.getSourceId(), source);

    // create stats collector for parser
    _ggParserStats = new GGParserStatistics(_pConfig.getName());
    registerParserMbean(_ggParserStats);

}

From source file:org.apache.hadoop.hbase.coprocessor.TestBigDecimalColumnInterpreter.java

@Test(timeout = 300000)
public void testMaxWithInvalidRange() {
    AggregationClient aClient = new AggregationClient(conf);
    final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci = new BigDecimalColumnInterpreter();
    Scan scan = new Scan();
    scan.setStartRow(ROWS[4]);/* w w w.j  av a2 s  .  c  o  m*/
    scan.setStopRow(ROWS[2]);
    scan.addColumn(TEST_FAMILY, TEST_QUALIFIER);
    BigDecimal max = new BigDecimal(Long.MIN_VALUE);
    ;
    try {
        max = aClient.max(TEST_TABLE, ci, scan);
    } catch (Throwable e) {
        max = BigDecimal.ZERO;
    }
    assertEquals(BigDecimal.ZERO, max);// control should go to the catch block
}

From source file:com.facebook.infrastructure.db.ColumnFamily.java

public boolean isMarkedForDelete() {
    return markedForDeleteAt > Long.MIN_VALUE;
}

From source file:org.apache.batchee.cli.command.JobOperatorCommand.java

private ClassLoader createLoader(final ClassLoader parent) throws MalformedURLException {
    final Collection<URL> urls = new LinkedList<URL>();

    if (libs != null) {
        final File folder = new File(libs);
        if (folder.exists()) {
            addFolder(folder, urls);/*  w w  w . ja va2s .c  o  m*/
        }
    }

    // we add libs/*.jar and libs/xxx/*.jar to be able to sort libs but only one level to keep it simple
    File resources = null;
    File exploded = null;
    if (archive != null) {
        final File bar = new File(archive);
        if (bar.exists()) {
            if (bar.isFile()) { // bar to unzip
                exploded = new File(work, bar.getName());
            } else if (bar.isDirectory()) { // already unpacked
                exploded = bar;
            } else {
                throw new IllegalArgumentException("unsupported archive type for: '" + archive + "'");
            }

            final File timestamp = new File(exploded, "timestamp.txt");

            long ts = Long.MIN_VALUE;
            if (exploded.exists()) {
                if (timestamp.exists()) {
                    try {
                        ts = Long.parseLong(FileUtils.readFileToString(timestamp).trim());
                    } catch (final IOException e) {
                        ts = Long.MIN_VALUE;
                    }
                }
            }

            if (ts == Long.MIN_VALUE || ts < bar.lastModified()) {
                explode(bar, exploded, timestamp, bar.lastModified());
            }

            if (archive.endsWith(".bar") || new File(exploded, "BATCH-INF").exists()) {
                // bar archives are split accross 3 folders
                addFolder(new File(exploded, "BATCH-INF/classes"), urls);
                addFolderIfExist(new File(exploded, "BATCH-INF/lib"), urls);
                resources = new File(exploded, "BATCH-INF");
            } else if (archive.endsWith(".war") || new File(exploded, "WEB-INF").exists()) {
                addFolderIfExist(new File(exploded, "WEB-INF/classes"), urls);
                addLibs(new File(exploded, "WEB-INF/lib"), urls);
            } else {
                throw new IllegalArgumentException("unknown or unsupported archive type: " + archive);
            }
        } else {
            throw new IllegalArgumentException("'" + archive + "' doesn't exist");
        }
    }

    final ClassLoader sharedClassLoader = createSharedClassLoader(parent);
    if (libs == null && archive == null) {
        return sharedClassLoader;
    }

    final ChildFirstURLClassLoader classLoader = new ChildFirstURLClassLoader(
            urls.toArray(new URL[urls.size()]), sharedClassLoader);
    if (resources != null && resources.exists()) {
        classLoader.addResource(resources);
    }
    if (exploded != null) {
        classLoader.setApplicationFolder(exploded);
    }
    return classLoader;
}

From source file:com.gsma.mobileconnect.impl.RequestTokenTest.java

@Test
public void requestToken_withExpiredDiscoveryResponse_shouldThrowException()
        throws OIDCException, DiscoveryResponseExpiredException, IOException {
    // GIVEN//from ww w.  j  a  v  a 2  s  .c  o m
    IOIDC ioidc = Factory.getOIDC(null);
    CaptureRequestTokenResponse captureRequestTokenResponse = new CaptureRequestTokenResponse();
    DiscoveryResponse discoveryResponse = new DiscoveryResponse(true, new Date(Long.MIN_VALUE), 0, null,
            parseJson(OPERATOR_JSON_STRING));

    // THEN
    thrown.expect(DiscoveryResponseExpiredException.class);
    thrown.expectMessage(containsString("discoveryResult has expired"));

    // WHEN
    ioidc.requestToken(discoveryResponse, "", "", null, captureRequestTokenResponse);
}

From source file:com.google.uzaygezen.core.LongBitVector.java

@Override
public int compareTo(BitVector o) {
    checkSize(o);//from   w  ww  .j  a va2  s  . c om
    final int cmp;
    // optimisation
    if (o.size() <= 64) {
        // 0, positives, Long.MAX_VALUE, Long.MIN_VALUE, negatives, -1
        long x = data + Long.MIN_VALUE;
        long y = o.toExactLong() + Long.MIN_VALUE;
        cmp = Long.compare(x, y);
        assert Integer.signum(cmp) == Integer
                .signum(BitSetComparator.INSTANCE.compare(toBitSet(), o.toBitSet()));
    } else {
        cmp = BitSetComparator.INSTANCE.compare(toBitSet(), o.toBitSet());
    }
    return cmp;
}

From source file:org.apache.hadoop.hbase.coprocessor.TestBigDecimalColumnInterpreter.java

@Test(timeout = 300000)
public void testMaxWithInvalidRange2() throws Throwable {
    BigDecimal max = new BigDecimal(Long.MIN_VALUE);
    Scan scan = new Scan();
    scan.addColumn(TEST_FAMILY, TEST_QUALIFIER);
    scan.setStartRow(ROWS[4]);//w  w w.  j  a  v  a  2  s  .com
    scan.setStopRow(ROWS[4]);
    try {
        AggregationClient aClient = new AggregationClient(conf);
        final ColumnInterpreter<BigDecimal, BigDecimal, EmptyMsg, BigDecimalMsg, BigDecimalMsg> ci = new BigDecimalColumnInterpreter();
        max = aClient.max(TEST_TABLE, ci, scan);
    } catch (Exception e) {
        max = BigDecimal.ZERO;
    }
    assertEquals(BigDecimal.ZERO, max);// control should go to the catch block
}

From source file:com.alibaba.otter.node.etl.extract.extractor.FileExtractor.java

private void doFileDetectCollector(Pipeline pipeline, List<FileData> fileDatas) {
    ExecutorTemplate executorTemplate = executorTemplateGetter.get();
    try {/*from w  w  w. j a  v a2 s .c  o m*/
        executorTemplate.start();
        // ?poolSize
        executorTemplate.adjustPoolSize(pipeline.getParameters().getFileLoadPoolSize());
        for (final FileData fileData : fileDatas) {
            // ???
            executorTemplate.submit(new Runnable() {

                public void run() {
                    boolean isAranda = StringUtils.isNotEmpty(fileData.getNameSpace());
                    int count = 0;
                    Throwable exception = null;
                    while (count++ < retry) {
                        try {
                            if (isAranda) {
                                // remote file
                                throw new RuntimeException(fileData + " is not support!");
                            } else {
                                // ?
                                File file = new File(fileData.getPath());
                                fileData.setLastModifiedTime(file.lastModified());
                                fileData.setSize(file.length());
                            }

                            return;// 
                        } catch (Exception e) {
                            fileData.setLastModifiedTime(Long.MIN_VALUE);
                            fileData.setSize(Long.MIN_VALUE);
                            exception = e;
                        }
                    }

                    if (count >= retry) {
                        logger.warn(String.format("FileDetectCollector is error! collect failed[%s]",
                                fileData.getNameSpace() + "/" + fileData.getPath()), exception);
                    }
                }
            });
        }

        long start = System.currentTimeMillis();
        logger.info("start pipelinep[{}] waitFor FileData Size : {} ", pipeline.getId(), fileDatas.size());
        // ??
        executorTemplate.waitForResult();
        logger.info("end pipelinep[{}] waitFor FileData cost : {} ms ", pipeline.getId(),
                (System.currentTimeMillis() - start));
    } finally {
        if (executorTemplate != null) {
            executorTemplateGetter.release(executorTemplate);
        }
    }
}