List of usage examples for java.lang Long MAX_VALUE
long MAX_VALUE
To view the source code for java.lang Long MAX_VALUE.
Click Source Link
From source file:info.archinnov.achilles.test.integration.tests.SliceQueryDeleteIT.java
@Test public void should_delete_with_matching_clusterings() throws Exception { long partitionKey = RandomUtils.nextLong(0, Long.MAX_VALUE); insertClusteredValues(partitionKey, 1, "name1", 1); insertClusteredValues(partitionKey, 1, "name2", 3); insertClusteredValues(partitionKey, 1, "name3", 1); manager.sliceQuery(ClusteredEntity.class).forDelete().withPartitionComponents(partitionKey) .deleteMatching(1, "name2"); List<ClusteredEntity> entities = manager.sliceQuery(ClusteredEntity.class).forSelect() .withPartitionComponents(partitionKey).get(100); assertThat(entities).hasSize(2);/*from w ww . j a va 2 s. co m*/ Collections.sort(entities, new ClusteredEntity.ClusteredEntityComparator()); assertThat(entities.get(0).getId().getCount()).isEqualTo(1); assertThat(entities.get(0).getId().getName()).isEqualTo("name1"); assertThat(entities.get(0).getValue()).isEqualTo("value11"); assertThat(entities.get(1).getId().getCount()).isEqualTo(1); assertThat(entities.get(1).getId().getName()).isEqualTo("name3"); assertThat(entities.get(1).getValue()).isEqualTo("value11"); }
From source file:com.navercorp.pinpoint.web.dao.hbase.HbaseAgentInfoDao.java
private Scan createScanForInitialAgentInfo(String agentId) { Scan scan = new Scan(); byte[] agentIdBytes = Bytes.toBytes(agentId); byte[] reverseStartKey = RowKeyUtils.concatFixedByteAndLong(agentIdBytes, HbaseTableConstatns.AGENT_NAME_MAX_LEN, Long.MAX_VALUE); scan.setStartRow(reverseStartKey);/*from w ww . j a v a 2 s. c o m*/ scan.setReversed(true); scan.setMaxVersions(1); scan.setCaching(SCANNER_CACHING); return scan; }
From source file:com.logsniffer.reader.support.BackwardReaderTest.java
@Test public void testRevReadingAllFromTail() throws FormatException, UnsupportedEncodingException, IOException { final List<LogEntry> entries = revReader.readEntries(log, log, log.createRelative(null, Long.MAX_VALUE), -10);/* w ww .j a v a 2s . com*/ Assert.assertEquals(3, entries.size()); Assert.assertEquals(logLines[0] + "\n" + logLines[1] + "\n" + logLines[2] + "\n" + logLines[3], entries.get(0).getRawContent()); Assert.assertEquals(logLines[4], entries.get(1).getRawContent()); Assert.assertEquals(logLines[5], entries.get(2).getRawContent()); }
From source file:com.dsclab.loader.app.Loader.java
public static void load(Configs prop) throws SQLException, ClassNotFoundException, InterruptedException, ExecutionException { int readThread = prop.getReadThread(); int writeThread = prop.getWriteThread(); ExecutorService readExecutor = Executors.newFixedThreadPool(readThread); ExecutorService writeExecutor = Executors.newFixedThreadPool(writeThread); LOG.info("Start load: writeThread:" + writeThread + ", readThread:" + readThread); BlockingQueue<List<String>> contentQueue = new LinkedBlockingQueue<>(); int tableCount = tableTask.size(); int sum = 0;// ww w.jav a 2s .c o m for (int i = 0; i < tableCount; i++) { sum = sum + tableTask.get(i).getTaskSqlList().size(); } for (int i = 0; i < sum; i++) { readExecutor.submit(new ProducerThread(prop.getInputURL(), contentQueue)); writeExecutor.submit(new ConsumerThread(prop.getOutputURL(), contentQueue)); } readExecutor.shutdown(); readExecutor.awaitTermination(Long.MAX_VALUE, TimeUnit.DAYS); System.out.println("[CHIA7712] read threads end"); writeExecutor.shutdown(); writeExecutor.awaitTermination(Long.MAX_VALUE, TimeUnit.DAYS); System.out.println("[CHIA7712] write threads end"); }
From source file:io.covert.binary.analysis.BinaryAnalysisMapper.java
protected void setup(Context context) throws java.io.IOException, InterruptedException { Configuration conf = context.getConfiguration(); try {/* w w w . j a v a 2s . com*/ parser = (OutputParser<K, V>) Class.forName(conf.get("binary.analysis.output.parser")).newInstance(); } catch (Exception e) { throw new IOException("Could create parser", e); } fileExtention = conf.get("binary.analysis.file.extention", ".dat"); timeoutMS = conf.getLong("binary.analysis.execution.timeoutMS", Long.MAX_VALUE); program = conf.get("binary.analysis.program"); args = conf.get("binary.analysis.program.args").split(conf.get("binary.analysis.program.args.delim", ",")); String[] codes = conf.get("binary.analysis.program.exit.codes").split(","); exitCodes = new int[codes.length]; for (int i = 0; i < codes.length; ++i) { exitCodes[i] = Integer.parseInt(codes[i]); } workingDir = new File(".").getAbsoluteFile(); dataDir = new File(workingDir, "_data"); dataDir.mkdir(); logDirContents(workingDir); File programFile = new File(workingDir, program); if (programFile.exists()) { LOG.info("Program file exists in working directory, ensuring executable and readable"); programFile.setExecutable(true); programFile.setReadable(true); } }
From source file:com.mycontacts.resource.UserAddressBookResource.java
@Override public Long getMaxResourceSize() { return Long.MAX_VALUE; }
From source file:org.surfnet.oaaas.model.TokenResponseCacheImpl.java
private void cleanUpCache() { Set<Map.Entry<String, CacheEntry>> entries = cache.entrySet(); long ago = Long.MAX_VALUE; String oldestKey = null;/*w w w. ja v a 2 s. com*/ for (Map.Entry<String, CacheEntry> entry : entries) { if (isExpired(entry.getValue())) { cache.remove(entry.getKey()); } else if (entry.getValue().expireBy < ago) { oldestKey = entry.getKey(); ago = entry.getValue().expireBy; } } if (oldestKey != null) { cache.remove(oldestKey); } }
From source file:edu.cornell.med.icb.util.ProcessEstimator.java
/** * Call when a unit of work has been completed. This should be called a total * of totalUnits times. Returns the estimated time remaining. The first time this is * called it will return Long.MAX_VALUE as no estimate can be made without at least * two data points. The result can be nicely formatted using ICBStringUtils.millis2hms(). * @return the estimated time remaining. The first time this is called it will * return Long.MAX_VALUE as no estimate can be made without at least two data points. *///w w w . j a va 2 s .c o m public long unitCompleted() { final int numUnits = unitsCompleted.incrementAndGet(); final long currentTime = stopWatch.getTime(); regressor.addDataPoint(stopWatch.getTime(), totalUnits.intValue() - numUnits); if (numUnits < 2) { return Long.MAX_VALUE; } regressor.regress(); this.correlationCoefficient = regressor.getCorrelationCoefficient(); final long completeAt = (long) regressor.getXIntercept(); return completeAt - currentTime; }
From source file:me.doshou.admin.monitor.web.controller.EhcacheMonitorController.java
@RequestMapping("{cacheName}/{key}/details") @ResponseBody//from w w w. ja v a 2 s . c om public Object keyDetail(@PathVariable("cacheName") String cacheName, @PathVariable("key") String key, Model model) { Element element = cacheManager.getCache(cacheName).get(key); String dataPattern = "yyyy-MM-dd hh:mm:ss"; Map<String, Object> data = Maps.newHashMap(); data.put("objectValue", element.getObjectValue().toString()); data.put("size", PrettyMemoryUtils.prettyByteSize(element.getSerializedSize())); data.put("hitCount", element.getHitCount()); Date latestOfCreationAndUpdateTime = new Date(element.getLatestOfCreationAndUpdateTime()); data.put("latestOfCreationAndUpdateTime", DateFormatUtils.format(latestOfCreationAndUpdateTime, dataPattern)); Date lastAccessTime = new Date(element.getLastAccessTime()); data.put("lastAccessTime", DateFormatUtils.format(lastAccessTime, dataPattern)); if (element.getExpirationTime() == Long.MAX_VALUE) { data.put("expirationTime", "?"); } else { Date expirationTime = new Date(element.getExpirationTime()); data.put("expirationTime", DateFormatUtils.format(expirationTime, dataPattern)); } data.put("timeToIdle", element.getTimeToIdle()); data.put("timeToLive", element.getTimeToLive()); data.put("version", element.getVersion()); return data; }
From source file:com.thinkbiganalytics.inputformat.hadoop.mapred.OmnitureDataFileRecordReader.java
public OmnitureDataFileRecordReader(Configuration job, FileSplit split) throws IOException { this.maxLineLength = job.getInt("mapred.escapedlinereader.maxlength", Integer.MAX_VALUE); this.start = split.getStart(); this.end = start + split.getLength(); final Path file = split.getPath(); this.compressionCodecs = new CompressionCodecFactory(job); final CompressionCodec codec = compressionCodecs.getCodec(file); // Open the file and seek to the start of the split FileSystem fs = file.getFileSystem(job); FSDataInputStream fileIn = fs.open(split.getPath()); boolean skipFirstLine = false; if (codec != null) { lineReader = new EscapedLineReader(codec.createInputStream(fileIn), job); end = Long.MAX_VALUE; } else {//from w w w. ja v a 2 s. co m if (start != 0) { skipFirstLine = true; --start; fileIn.seek(start); } lineReader = new EscapedLineReader(fileIn, job); } if (skipFirstLine) { start += lineReader.readLine(new Text(), 0, (int) Math.min((long) Integer.MAX_VALUE, end - start)); } this.pos = start; }