List of usage examples for java.lang Long MAX_VALUE
long MAX_VALUE
To view the source code for java.lang Long MAX_VALUE.
Click Source Link
From source file:com.linkedin.pinot.core.realtime.converter.stats.RealtimeNoDictionaryColStatistics.java
private void computeLongMinMax(int[] rows) { long values[] = new long[_numDocIds]; _blockValSet.getLongValues(rows, 0, _numDocIds, values, 0); long min = Long.MAX_VALUE; long max = Long.MIN_VALUE; for (int i = 0; i < _numDocIds; i++) { if (values[i] < min) { min = values[i];/*from w w w . j av a 2 s .c om*/ } if (values[i] > max) { max = values[i]; } } _minValue = Long.valueOf(min); _maxValue = Long.valueOf(max); }
From source file:com.liferay.example.servicebuilder.extdb.service.impl.UserLoginLocalServiceImpl.java
/** * updateUserLogin: Updates the user login record with the given info. * @param userId User who logged in.//from www. j ava 2 s . co m * @param loginDate Date when the user logged in. */ public void updateUserLogin(final long userId, final Date loginDate) { UserLogin login = null; // first try to get the existing record for the user try { login = getUserLogin(userId); } catch (PortalException e) { logger.error("Error getting user login for user id " + userId, e); } if (login == null) { // user has never logged in before, need a new record if (logger.isDebugEnabled()) logger.debug("User " + userId + " has never logged in before."); // create a new record login = createUserLogin(userId); // update the login date login.setLastLogin(loginDate); // initialize the values login.setTotalLogins(1); login.setShortestTimeBetweenLogins(Long.MAX_VALUE); login.setLongestTimeBetweenLogins(0); // add the login addUserLogin(login); } else { // user has logged in before, just need to update record. if (logger.isDebugEnabled()) logger.debug("User " + userId + " has logged in before, updating the record."); // increment the logins count login.setTotalLogins(login.getTotalLogins() + 1); // determine the duration time between the current and last login long duration = loginDate.getTime() - login.getLastLogin().getTime(); // if this duration is longer than last, update the longest duration. if (duration > login.getLongestTimeBetweenLogins()) { login.setLongestTimeBetweenLogins(duration); } // if this duration is shorter than last, update the shortest duration. if (duration < login.getShortestTimeBetweenLogins()) { login.setShortestTimeBetweenLogins(duration); } // update the last login timestamp login.setLastLogin(loginDate); // update the record updateUserLogin(login); } }
From source file:info.archinnov.achilles.test.integration.tests.EntityWithTypeTransformerIT.java
@Test public void should_insert_and_find_entity_with_null_transformed_types() throws Exception { //Given//from ww w .java 2 s.c o m Long id = RandomUtils.nextLong(0, Long.MAX_VALUE); Long longValue = RandomUtils.nextLong(0, Long.MAX_VALUE); final EntityWithTypeTransformer entity = new EntityWithTypeTransformer(); entity.setId(id); entity.setLongToString(longValue); manager.insert(entity); //When final EntityWithTypeTransformer found = manager.find(EntityWithTypeTransformer.class, id); //Then assertThat(found.getLongToString()).isEqualTo(longValue); assertThat(found.getMyList()).isNull(); assertThat(found.getMySet()).isNull(); assertThat(found.getKeyMap()).isNull(); assertThat(found.getValueMap()).isNull(); assertThat(found.getKeyValueMap()).isNull(); }
From source file:com.linkedin.drelephant.mapreduce.MapReduceMetricsAggregator.java
@Override public void aggregate(HadoopApplicationData hadoopData) { MapReduceApplicationData data = (MapReduceApplicationData) hadoopData; long mapTaskContainerSize = getMapContainerSize(data); long reduceTaskContainerSize = getReducerContainerSize(data); int reduceTaskSlowStartPercentage = (int) (Double .parseDouble(data.getConf().getProperty(REDUCER_SLOW_START_CONFIG)) * 100); //overwrite reduceTaskSlowStartPercentage to 100%. TODO: make use of the slow start percent reduceTaskSlowStartPercentage = 100; mapTasks = new TaskLevelAggregatedMetrics(data.getMapperData(), mapTaskContainerSize, data.getStartTime()); long reduceIdealStartTime = mapTasks.getNthPercentileFinishTime(reduceTaskSlowStartPercentage); // Mappers list is empty if (reduceIdealStartTime == -1) { // ideal start time for reducer is infinite since it cannot start reduceIdealStartTime = Long.MAX_VALUE; }// w w w . j av a 2 s . c o m reduceTasks = new TaskLevelAggregatedMetrics(data.getReducerData(), reduceTaskContainerSize, reduceIdealStartTime); _hadoopAggregatedData.setResourceUsed(mapTasks.getResourceUsed() + reduceTasks.getResourceUsed()); _hadoopAggregatedData.setTotalDelay(mapTasks.getDelay() + reduceTasks.getDelay()); _hadoopAggregatedData.setResourceWasted(mapTasks.getResourceWasted() + reduceTasks.getResourceWasted()); }
From source file:info.archinnov.achilles.it.TestEntityWithStaticColumn.java
@Test public void should_insert() throws Exception { //Given/*from www .ja v a 2s. c om*/ final long id = RandomUtils.nextLong(0L, Long.MAX_VALUE); final UUID uuid = UUIDs.timeBased(); final EntityWithStaticColumn entity = new EntityWithStaticColumn(id, uuid, "static_val", "val"); //When manager.crud().insert(entity).execute(); //Then final Row actual = session .execute("SELECT * FROM entitywithstaticcolumn WHERE id = " + id + " AND uuid = " + uuid).one(); assertThat(actual).isNotNull(); assertThat(actual.getString("static_col")).isEqualTo("static_val"); assertThat(actual.getString("value")).isEqualTo("val"); }
From source file:com.fusesource.forge.jmstest.probe.jmx.JMXProbe.java
public void setObjectNameString(String objectName) { try {// w w w. j a va 2s .c o m this.objectName = new ObjectName(objectName); } catch (Exception e) { log().error("Error seting ObjectName for Probe: " + objectName); setActive(false); lastException = Long.MAX_VALUE; } }
From source file:com.tesora.dve.common.TestDataGenerator.java
protected Object getColumnValue(ColumnMetadata cm) { Object cv = null;/*from www . ja va2 s . c o m*/ Calendar cal = Calendar.getInstance(); switch (cm.getDataType()) { case Types.BIT: case Types.BOOLEAN: cv = Boolean.TRUE; break; case Types.BIGINT: cv = Long.MAX_VALUE; break; case Types.CHAR: case Types.VARCHAR: cv = StringUtils.left(baseString, cm.getSize()); break; case Types.SMALLINT: cv = Short.MAX_VALUE; break; case Types.TINYINT: cv = Byte.MAX_VALUE; break; case Types.INTEGER: cv = Integer.MAX_VALUE; break; case Types.DOUBLE: cv = new Double(1234.5678); // TODO need to handle s,p break; case Types.FLOAT: cv = new Float(123.56); // TODO need to handle s,p break; case Types.DECIMAL: cv = new BigDecimal("12345.6789"); // TODO need to handle s,p break; case Types.DATE: cal.setTimeInMillis(123456789); cal.set(Calendar.HOUR_OF_DAY, 0); cal.set(Calendar.MINUTE, 0); cal.set(Calendar.SECOND, 0); cal.set(Calendar.MILLISECOND, 0); cv = cal.getTime(); break; case Types.TIMESTAMP: cal.setTimeInMillis(123456789); cv = cal.getTime(); break; case Types.TIME: cv = new Time(123456789); break; default: break; } return cv; }
From source file:au.id.wolfe.fxassetman.server.dao.AssetTypeDaoTest.java
@Test(expected = NoResultException.class) public void testFindByIdNoResultExceptionExpected() { assetTypeDao.findById(Long.MAX_VALUE); }
From source file:com.github.ukase.service.BulkRenderer.java
private void checkTTL(String id, Long created) { if (created == null || created == Long.MAX_VALUE) { return;/*from w w w . j a v a 2 s .c o m*/ } if (System.currentTimeMillis() - created > ttl) { File pdf = getPdfFile(id); if (!pdf.delete()) { log.warn("PDF for " + id + " weren't deleted"); } renderedPDFs.remove(id); File subDir = pdf.getParentFile(); File[] files = subDir.listFiles(); if (files != null && files.length == 0) { log.info(subDir.getName() + " removing... " + (subDir.delete() ? "success" : "failed")); } } }
From source file:com.intel.databackend.api.FirstLastTimestampService.java
private Observation[] getTopObservation(String component, boolean first) { return hbase.scan(accountId, component, 0L, Long.MAX_VALUE, false, null, first, 1); }