List of usage examples for java.lang Long MIN_VALUE
long MIN_VALUE
To view the source code for java.lang Long MIN_VALUE.
Click Source Link
From source file:org.apache.hadoop.hbase.master.AssignmentPlan.java
/** * @param region//from w w w . ja va 2 s . co m * @return the last update time stamp for the region in the plan */ public synchronized long getAssignmentUpdateTS(HRegionInfo region) { Long updateTS = assignmentUpdateTS.get(region); if (updateTS == null) return Long.MIN_VALUE; else return updateTS.longValue(); }
From source file:com.xtructure.xevolution.evolution.impl.AbstractEvolutionExperiment.java
protected Population<D> createPopulation(int idNumber) throws IOException, XMLStreamException { if (getOutputDir() == null) { return getGeneticsFactory().createPopulation(idNumber); }// w w w. j a v a2 s. c om File populationFile = null; long mod = Long.MIN_VALUE; for (File file : getOutputDir().listFiles()) { if (file.lastModified() > mod && file.getName().endsWith(".xml")) { mod = file.lastModified(); populationFile = file; } } if (populationFile == null) { return getGeneticsFactory().createPopulation(idNumber); } return XmlReader.read(populationFile); }
From source file:fr.inria.soctrace.tools.importer.pajedump.core.PJDumpParser.java
private boolean parseRawTrace(IProgressMonitor monitor) throws SoCTraceException { try {/*from ww w .j av a 2s . c o m*/ boolean partialImport = false; numberOfEvents = 0; minTimestamp = Long.MAX_VALUE; maxTimestamp = Long.MIN_VALUE; page = 0; elist.clear(); // we add +1 to file size to avoid dividing by 0 double scale = ((double) PJDumpConstants.WORK) / (getFileSize(traceFile) + 1); // add +1 to the byte read too to compensate byteRead = 1; BufferedReader br = new BufferedReader( new InputStreamReader(new DataInputStream(new FileInputStream(traceFile)))); String[] line; while ((line = getLine(br)) != null) { if (line.length < PJDumpConstants.MIN_LINE_NUMBER_OF_PARAMETERS) { malformedLineException(line); } logger.debug(Arrays.toString(line)); PJDumpLineParser parser = parserMap.get(line[PJDumpConstants.ENTITY]); if (parser == null) { malformedLineException(line); } parser.parseLine(line); if (elist.size() == PJDumpConstants.PAGE_SIZE) page++; if (elist.size() >= PJDumpConstants.PAGE_SIZE && endPendingLinks.isEmpty() && startPendingLinks.isEmpty()) { saveEvents(elist); monitor.worked(getWorked(scale)); byteRead = 0; numberOfEvents += elist.size(); elist.clear(); if (monitor.isCanceled()) { if (getLine(br) != null) { // there were other lines partialImport = true; } break; } } } if (elist.size() > 0) { saveEvents(elist); monitor.worked(getWorked(scale)); byteRead = 0; numberOfEvents += elist.size(); elist.clear(); } logger.debug("Saved {} events on {} pages", numberOfEvents, (page + 1)); return partialImport; } catch (Exception e) { throw new SoCTraceException(e); } }
From source file:org.apache.hadoop.hbase.regionserver.compactions.DateTieredCompactionPolicy.java
public boolean shouldPerformMajorCompaction(final Collection<StoreFile> filesToCompact) throws IOException { long mcTime = getNextMajorCompactTime(filesToCompact); if (filesToCompact == null || mcTime == 0) { if (LOG.isDebugEnabled()) { LOG.debug("filesToCompact: " + filesToCompact + " mcTime: " + mcTime); }/*from w ww .j a va 2s . c om*/ return false; } // TODO: Use better method for determining stamp of last major (HBASE-2990) long lowTimestamp = StoreUtils.getLowestTimestamp(filesToCompact); long now = EnvironmentEdgeManager.currentTime(); if (lowTimestamp <= 0L || lowTimestamp >= (now - mcTime)) { if (LOG.isDebugEnabled()) { LOG.debug("lowTimestamp: " + lowTimestamp + " lowTimestamp: " + lowTimestamp + " now: " + now + " mcTime: " + mcTime); } return false; } long cfTTL = this.storeConfigInfo.getStoreFileTtl(); HDFSBlocksDistribution hdfsBlocksDistribution = new HDFSBlocksDistribution(); List<Long> boundaries = getCompactBoundariesForMajor(filesToCompact, now); boolean[] filesInWindow = new boolean[boundaries.size()]; for (StoreFile file : filesToCompact) { Long minTimestamp = file.getMinimumTimestamp(); long oldest = (minTimestamp == null) ? Long.MIN_VALUE : now - minTimestamp.longValue(); if (cfTTL != Long.MAX_VALUE && oldest >= cfTTL) { LOG.debug("Major compaction triggered on store " + this + "; for TTL maintenance"); return true; } if (!file.isMajorCompaction() || file.isBulkLoadResult()) { LOG.debug("Major compaction triggered on store " + this + ", because there are new files and time since last major compaction " + (now - lowTimestamp) + "ms"); return true; } int lowerWindowIndex = Collections.binarySearch(boundaries, minTimestamp == null ? (Long) Long.MAX_VALUE : minTimestamp); int upperWindowIndex = Collections.binarySearch(boundaries, file.getMaximumTimestamp() == null ? (Long) Long.MAX_VALUE : file.getMaximumTimestamp()); if (lowerWindowIndex != upperWindowIndex) { LOG.debug("Major compaction triggered on store " + this + "; because file " + file.getPath() + " has data with timestamps cross window boundaries"); return true; } else if (filesInWindow[upperWindowIndex]) { LOG.debug("Major compaction triggered on store " + this + "; because there are more than one file in some windows"); return true; } else { filesInWindow[upperWindowIndex] = true; } hdfsBlocksDistribution.add(file.getHDFSBlockDistribution()); } float blockLocalityIndex = hdfsBlocksDistribution .getBlockLocalityIndex(RSRpcServices.getHostname(comConf.conf, false)); if (blockLocalityIndex < comConf.getMinLocalityToForceCompact()) { LOG.debug("Major compaction triggered on store " + this + "; to make hdfs blocks local, current blockLocalityIndex is " + blockLocalityIndex + " (min " + comConf.getMinLocalityToForceCompact() + ")"); return true; } LOG.debug("Skipping major compaction of " + this + ", because the files are already major compacted"); return false; }
From source file:com.kakao.helper.SharedPreferencesCache.java
public synchronized Long getLong(final String key) { long value = memory.getLong(key, Long.MIN_VALUE); if (value == Long.MIN_VALUE) { try {/*from w w w .j a va2 s. com*/ deserializeKey(key); value = memory.getLong(key, Long.MIN_VALUE); } catch (JSONException e) { Logger.getInstance().w("Error reading cached value for key: '" + key + "' -- " + e); } } return value; }
From source file:org.syncope.core.rest.UserTestITCase.java
@Test public void createUserWithNoPropagation() { // get task list List<PropagationTaskTO> tasks = Arrays .asList(restTemplate.getForObject(BASE_URL + "task/propagation/list", PropagationTaskTO[].class)); assertNotNull(tasks);//w ww . j a v a2 s . c om assertFalse(tasks.isEmpty()); // get max task id long maxId = Long.MIN_VALUE; for (PropagationTaskTO task : tasks) { if (task.getId() > maxId) { maxId = task.getId(); } } // create a new user UserTO userTO = new UserTO(); userTO.setUsername("xxx@xxx.xxx"); AttributeTO attributeTO = new AttributeTO(); attributeTO.setSchema("firstname"); attributeTO.addValue("xxx"); userTO.addAttribute(attributeTO); attributeTO = new AttributeTO(); attributeTO.setSchema("surname"); attributeTO.addValue("xxx"); userTO.addAttribute(attributeTO); attributeTO = new AttributeTO(); attributeTO.setSchema("userId"); attributeTO.addValue("xxx@xxx.xxx"); userTO.addAttribute(attributeTO); attributeTO = new AttributeTO(); attributeTO.setSchema("fullname"); attributeTO.addValue("xxx"); userTO.addAttribute(attributeTO); userTO.setPassword("password123"); userTO.addResource("ws-target-resource-nopropagation"); restTemplate.postForObject(BASE_URL + "user/create", userTO, UserTO.class); // get the new task list tasks = Arrays .asList(restTemplate.getForObject(BASE_URL + "task/propagation/list", PropagationTaskTO[].class)); assertNotNull(tasks); assertFalse(tasks.isEmpty()); // get max task id long newMaxId = Long.MIN_VALUE; for (PropagationTaskTO task : tasks) { if (task.getId() > newMaxId) { newMaxId = task.getId(); } } assertTrue(newMaxId > maxId); // get last task PropagationTaskTO taskTO = restTemplate.getForObject(BASE_URL + "task/read/{taskId}", PropagationTaskTO.class, newMaxId); assertNotNull(taskTO); assertTrue(taskTO.getExecutions().isEmpty()); }
From source file:InlineSchemaValidator.java
public void validate(Validator validator, Source source, String systemId, int repetitions, boolean memoryUsage) { try {//w w w . jav a 2 s. c om long timeBefore = System.currentTimeMillis(); long memoryBefore = Runtime.getRuntime().freeMemory(); for (int j = 0; j < repetitions; ++j) { validator.validate(source); } long memoryAfter = Runtime.getRuntime().freeMemory(); long timeAfter = System.currentTimeMillis(); long time = timeAfter - timeBefore; long memory = memoryUsage ? memoryBefore - memoryAfter : Long.MIN_VALUE; printResults(fOut, systemId, time, memory, repetitions); } catch (SAXParseException e) { // ignore } catch (Exception e) { System.err.println("error: Parse error occurred - " + e.getMessage()); Exception se = e; if (e instanceof SAXException) { se = ((SAXException) e).getException(); } if (se != null) se.printStackTrace(System.err); else e.printStackTrace(System.err); } }
From source file:com.tilab.fiware.metaware.service.DatasetServiceTest.java
/** * Test of createDataset method, of class DatasetService. * * @throws com.fasterxml.jackson.core.JsonProcessingException *///from ww w .j a v a 2 s .c o m @Test public void testCreateDataset() throws JsonProcessingException { System.out.println("createDataset"); Dataset dataset = new Dataset("dataset test name", "dataset test description", "test", Long.MIN_VALUE, Long.MIN_VALUE, null, null, "private", true, new DatasetStructure()); dataset.setPermissions(Arrays.asList(perm1)); dataset.setOwner(userId2); Dataset expResult = dataset; DatasetService instance = INSTANCE.getDatasetService(); String id = instance.createDataset(dataset); Dataset result = instance.getDataset(id); assertEquals(expResult, result); instance.deleteDataset(id); }
From source file:at.alladin.rmbt.controlServer.QualityOfServiceResultResource.java
@Post("json") public String request(final String entity) { final String secret = getContext().getParameters().getFirstValue("RMBT_SECRETKEY"); addAllowOrigin();//w w w . ja v a2s .c om JSONObject request = null; final ErrorList errorList = new ErrorList(); final JSONObject answer = new JSONObject(); System.out.println(MessageFormat.format(labels.getString("NEW_QOS_RESULT"), getIP())); if (entity != null && !entity.isEmpty()) // try parse the string to a JSON object try { request = new JSONObject(entity); final String lang = request.optString("client_language"); // Load Language Files for Client final List<String> langs = Arrays .asList(settings.getString("RMBT_SUPPORTED_LANGUAGES").split(",\\s*")); if (langs.contains(lang)) { errorList.setLanguage(lang); labels = ResourceManager.getSysMsgBundle(new Locale(lang)); } // System.out.println(request.toString(4)); if (conn != null) { ResultOptions resultOptions = new ResultOptions(new Locale(lang)); conn.setAutoCommit(false); final Test test = new Test(conn); if (request.optString("test_token").length() > 0) { final String[] token = request.getString("test_token").split("_"); try { // Check if UUID final UUID testUuid = UUID.fromString(token[0]); final String data = token[0] + "_" + token[1]; final String hmac = Helperfunctions.calculateHMAC(secret, data); if (hmac.length() == 0) errorList.addError("ERROR_TEST_TOKEN"); if (token[2].length() > 0 && hmac.equals(token[2])) { final List<String> clientNames = Arrays .asList(settings.getString("RMBT_CLIENT_NAME").split(",\\s*")); final List<String> clientVersions = Arrays .asList(settings.getString("RMBT_VERSION_NUMBER").split(",\\s*")); if (test.getTestByUuid(testUuid) > 0) if (clientNames.contains(request.optString("client_name")) && clientVersions.contains(request.optString("client_version"))) { //save qos test results: JSONArray qosResult = request.optJSONArray("qos_result"); if (qosResult != null) { QoSTestResultDao resultDao = new QoSTestResultDao(conn); Set<String> excludeTestTypeKeys = new TreeSet<>(); excludeTestTypeKeys.add("test_type"); excludeTestTypeKeys.add("qos_test_uid"); for (int i = 0; i < qosResult.length(); i++) { JSONObject testObject = qosResult.optJSONObject(i); //String hstore = Helperfunctions.json2hstore(testObject, excludeTestTypeKeys); JSONObject resultJson = new JSONObject(testObject, JSONObject.getNames(testObject)); for (String excludeKey : excludeTestTypeKeys) { resultJson.remove(excludeKey); } QoSTestResult testResult = new QoSTestResult(); //testResult.setResults(hstore); testResult.setResults(resultJson.toString()); testResult.setTestType(testObject.getString("test_type")); testResult.setTestUid(test.getUid()); long qosTestId = testObject.optLong("qos_test_uid", Long.MIN_VALUE); testResult.setQoSTestObjectiveId(qosTestId); resultDao.save(testResult); } } QoSTestResultDao resultDao = new QoSTestResultDao(conn); PreparedStatement updateCounterPs = resultDao .getUpdateCounterPreparedStatement(); List<QoSTestResult> testResultList = resultDao.getByTestUid(test.getUid()); //map that contains all test types and their result descriptions determined by the test result <-> test objectives comparison Map<TestType, TreeSet<ResultDesc>> resultKeys = new HashMap<>(); //test description set: Set<String> testDescSet = new TreeSet<>(); //test summary set: Set<String> testSummarySet = new TreeSet<>(); //iterate through all result entries for (QoSTestResult testResult : testResultList) { //reset test counters testResult.setFailureCounter(0); testResult.setSuccessCounter(0); //get the correct class of the result; TestType testType = TestType .valueOf(testResult.getTestType().toUpperCase()); Class<? extends AbstractResult<?>> clazz = testType.getClazz(); //parse hstore data final JSONObject resultJson = new JSONObject(testResult.getResults()); AbstractResult<?> result = QoSUtil.HSTORE_PARSER.fromJSON(resultJson, clazz); result.setResultJson(resultJson); if (result != null) { //add each test description key to the testDescSet (to fetch it later from the db) if (testResult.getTestDescription() != null) { testDescSet.add(testResult.getTestDescription()); } if (testResult.getTestSummary() != null) { testSummarySet.add(testResult.getTestSummary()); } testResult.setResult(result); } //compare test results with expected results QoSUtil.compareTestResults(testResult, result, resultKeys, testType, resultOptions); //resultList.put(testResult.toJson()); //update all test results after the success and failure counters have been set resultDao.updateCounter(testResult, updateCounterPs); //System.out.println("UPDATING: " + testResult.toString()); } } else errorList.addError("ERROR_CLIENT_VERSION"); } else errorList.addError("ERROR_TEST_TOKEN_MALFORMED"); } catch (final IllegalArgumentException e) { e.printStackTrace(); errorList.addError("ERROR_TEST_TOKEN_MALFORMED"); } catch (HstoreParseException e) { e.printStackTrace(); errorList.addError("ERROR_DB_CONNECTION"); } catch (IllegalAccessException e) { e.printStackTrace(); errorList.addError("ERROR_TEST_TOKEN_MALFORMED"); } } else errorList.addError("ERROR_TEST_TOKEN_MISSING"); conn.commit(); } else errorList.addError("ERROR_DB_CONNECTION"); } catch (final JSONException e) { errorList.addError("ERROR_REQUEST_JSON"); //System.out.println("Error parsing JSDON Data " + e.toString()); e.printStackTrace(); } catch (final SQLException e) { //System.out.println("Error while storing data " + e.toString()); e.printStackTrace(); } else errorList.addErrorString("Expected request is missing."); try { answer.putOpt("error", errorList.getList()); } catch (final JSONException e) { System.out.println("Error saving ErrorList: " + e.toString()); } return answer.toString(); }
From source file:evaluation.simulator.plugins.outputStrategy.StopAndGoMessage.java
public static long getMinTimestampForReply(int mixNumber, int maxClockDeviation, int[] sgDelays, int minInterMixDelay, int minClientMixDelay) { if (mixNumber == 0) return Long.MIN_VALUE; int sumOfSgDelays = 0; for (int i = 0; i < mixNumber; i++) sumOfSgDelays += sgDelays[i];/* w w w. j a v a2 s .c o m*/ int minDelay = mixNumber * minInterMixDelay; int maxClockDev = mixNumber * maxClockDeviation; if (mixNumber == sgDelays.length - 1) minDelay = minDelay - minInterMixDelay + minClientMixDelay; return Simulator.getNow() + sumOfSgDelays + minDelay - maxClockDev; }