List of usage examples for java.util.concurrent TimeUnit HOURS
TimeUnit HOURS
To view the source code for java.util.concurrent TimeUnit HOURS.
Click Source Link
From source file:at.becast.youploader.youtube.GuiUploadEvent.java
@Override public void onRead(long length, long position, long size) { long now = System.currentTimeMillis(); if (this.step < now - 2000) { frame.getProgressBar().setString(String.format("%6.2f%%", (float) position / size * 100)); frame.getProgressBar().setValue((int) ((float) position / size * 100)); frame.getProgressBar().revalidate(); frame.revalidate();/*from w ww . j a v a 2s. co m*/ frame.repaint(); if (lastdb < now - 10000) { SQLite.updateUploadProgress(frame.upload_id, position); this.lastdb = now; } this.step = now; this.dataDelta = position - this.lastdata; this.timeDelta = this.step - this.lasttime; this.lasttime = this.step; this.lastdata = position; long speed = this.dataDelta / (this.timeDelta + 1) * 1000 + 1; long duration = ((size - position) / speed) * 1000; String time = String.format("%02d:%02d:%02d", TimeUnit.MILLISECONDS.toHours(duration), TimeUnit.MILLISECONDS.toMinutes(duration) - TimeUnit.HOURS.toMinutes(TimeUnit.MILLISECONDS.toHours(duration)), TimeUnit.MILLISECONDS.toSeconds(duration) - TimeUnit.MINUTES.toSeconds(TimeUnit.MILLISECONDS.toMinutes(duration))); frame.getLblKbs().setText(FileUtils.byteCountToDisplaySize(speed) + "/s"); frame.getLblETA().setText(time); if (Main.debug) { LOG.debug("Took {} ms to refresh, Uploaded {} bytes, Speed {} ", System.currentTimeMillis() - now, this.dataDelta, FileUtils.byteCountToDisplaySize(speed)); } } }
From source file:DIA_Umpire_SE.DIA_Umpire_SE.java
/** * @param args the command line arguments DIA_Umpire parameterfile *//* w w w . j av a 2 s . co m*/ public static void main(String[] args) throws InterruptedException, FileNotFoundException, ExecutionException, IOException, ParserConfigurationException, DataFormatException, SAXException, Exception { System.out.println( "================================================================================================="); System.out.println( "DIA-Umpire singal extraction analysis (version: " + UmpireInfo.GetInstance().Version + ")"); if (args.length < 2 || args.length > 3) { System.out.println( "command format error, the correct format is: java -jar -Xmx8G DIA_Umpire_SE.jar mzMXL_file diaumpire_se.params"); System.out.println( "To fix DIA setting, use : java -jar -Xmx8G DIA_Umpire_SE.jar mzMXL_file diaumpire_se.params -f"); return; } try { //Define logger level for console ConsoleLogger.SetConsoleLogger(Level.INFO); //Define logger level and file path for text log file ConsoleLogger.SetFileLogger(Level.DEBUG, FilenameUtils.getFullPath(args[0]) + "diaumpire_se.log"); } catch (Exception e) { } boolean Fix = false; boolean Resume = false; if (args.length == 3 && args[2].equals("-f")) { Fix = true; } String parameterfile = args[1]; String MSFilePath = args[0]; Logger.getRootLogger().info("Version: " + UmpireInfo.GetInstance().Version); Logger.getRootLogger().info("Parameter file:" + parameterfile); Logger.getRootLogger().info("Spectra file:" + MSFilePath); BufferedReader reader = new BufferedReader(new FileReader(parameterfile)); String line = ""; InstrumentParameter param = new InstrumentParameter(InstrumentParameter.InstrumentType.TOF5600); param.DetermineBGByID = false; param.EstimateBG = true; int NoCPUs = 2; SpectralDataType.DataType dataType = SpectralDataType.DataType.DIA_F_Window; String WindowType = ""; int WindowSize = 25; ArrayList<XYData> WindowList = new ArrayList<>(); boolean ExportPrecursorPeak = false; boolean ExportFragmentPeak = false; //<editor-fold defaultstate="collapsed" desc="Read parameter file"> while ((line = reader.readLine()) != null) { Logger.getRootLogger().info(line); if (!"".equals(line) && !line.startsWith("#")) { //System.out.println(line); if (line.equals("==window setting begin")) { while (!(line = reader.readLine()).equals("==window setting end")) { if (!"".equals(line)) { WindowList.add(new XYData(Float.parseFloat(line.split("\t")[0]), Float.parseFloat(line.split("\t")[1]))); } } continue; } if (line.split("=").length < 2) { continue; } String type = line.split("=")[0].trim(); if (type.startsWith("para.")) { type = type.replace("para.", "SE."); } String value = line.split("=")[1].trim(); switch (type) { case "Thread": { NoCPUs = Integer.parseInt(value); break; } case "ExportPrecursorPeak": { ExportPrecursorPeak = Boolean.parseBoolean(value); break; } case "ExportFragmentPeak": { ExportFragmentPeak = Boolean.parseBoolean(value); break; } //<editor-fold defaultstate="collapsed" desc="instrument parameters"> case "RPmax": { param.PrecursorRank = Integer.parseInt(value); break; } case "RFmax": { param.FragmentRank = Integer.parseInt(value); break; } case "CorrThreshold": { param.CorrThreshold = Float.parseFloat(value); break; } case "DeltaApex": { param.ApexDelta = Float.parseFloat(value); break; } case "RTOverlap": { param.RTOverlapThreshold = Float.parseFloat(value); break; } case "BoostComplementaryIon": { param.BoostComplementaryIon = Boolean.parseBoolean(value); break; } case "AdjustFragIntensity": { param.AdjustFragIntensity = Boolean.parseBoolean(value); break; } case "SE.MS1PPM": { param.MS1PPM = Float.parseFloat(value); break; } case "SE.MS2PPM": { param.MS2PPM = Float.parseFloat(value); break; } case "SE.SN": { param.SNThreshold = Float.parseFloat(value); break; } case "SE.MS2SN": { param.MS2SNThreshold = Float.parseFloat(value); break; } case "SE.MinMSIntensity": { param.MinMSIntensity = Float.parseFloat(value); break; } case "SE.MinMSMSIntensity": { param.MinMSMSIntensity = Float.parseFloat(value); break; } case "SE.MinRTRange": { param.MinRTRange = Float.parseFloat(value); break; } case "SE.MaxNoPeakCluster": { param.MaxNoPeakCluster = Integer.parseInt(value); param.MaxMS2NoPeakCluster = Integer.parseInt(value); break; } case "SE.MinNoPeakCluster": { param.MinNoPeakCluster = Integer.parseInt(value); param.MinMS2NoPeakCluster = Integer.parseInt(value); break; } case "SE.MinMS2NoPeakCluster": { param.MinMS2NoPeakCluster = Integer.parseInt(value); break; } case "SE.MaxCurveRTRange": { param.MaxCurveRTRange = Float.parseFloat(value); break; } case "SE.Resolution": { param.Resolution = Integer.parseInt(value); break; } case "SE.RTtol": { param.RTtol = Float.parseFloat(value); break; } case "SE.NoPeakPerMin": { param.NoPeakPerMin = Integer.parseInt(value); break; } case "SE.StartCharge": { param.StartCharge = Integer.parseInt(value); break; } case "SE.EndCharge": { param.EndCharge = Integer.parseInt(value); break; } case "SE.MS2StartCharge": { param.MS2StartCharge = Integer.parseInt(value); break; } case "SE.MS2EndCharge": { param.MS2EndCharge = Integer.parseInt(value); break; } case "SE.NoMissedScan": { param.NoMissedScan = Integer.parseInt(value); break; } case "SE.Denoise": { param.Denoise = Boolean.valueOf(value); break; } case "SE.EstimateBG": { param.EstimateBG = Boolean.valueOf(value); break; } case "SE.RemoveGroupedPeaks": { param.RemoveGroupedPeaks = Boolean.valueOf(value); break; } case "SE.MinFrag": { param.MinFrag = Integer.parseInt(value); break; } case "SE.IsoPattern": { param.IsoPattern = Float.valueOf(value); break; } case "SE.StartRT": { param.startRT = Float.valueOf(value); break; } case "SE.EndRT": { param.endRT = Float.valueOf(value); break; } case "SE.RemoveGroupedPeaksRTOverlap": { param.RemoveGroupedPeaksRTOverlap = Float.valueOf(value); break; } case "SE.RemoveGroupedPeaksCorr": { param.RemoveGroupedPeaksCorr = Float.valueOf(value); break; } case "SE.MinMZ": { param.MinMZ = Float.valueOf(value); break; } case "SE.MinPrecursorMass": { param.MinPrecursorMass = Float.valueOf(value); break; } case "SE.MaxPrecursorMass": { param.MaxPrecursorMass = Float.valueOf(value); break; } case "SE.IsoCorrThreshold": { param.IsoCorrThreshold = Float.valueOf(value); break; } case "SE.MassDefectFilter": { param.MassDefectFilter = Boolean.parseBoolean(value); break; } case "SE.MassDefectOffset": { param.MassDefectOffset = Float.valueOf(value); break; } //</editor-fold>//</editor-fold> case "WindowType": { WindowType = value; switch (WindowType) { case "SWATH": { dataType = SpectralDataType.DataType.DIA_F_Window; break; } case "V_SWATH": { dataType = SpectralDataType.DataType.DIA_V_Window; break; } case "MSX": { dataType = SpectralDataType.DataType.MSX; break; } case "MSE": { dataType = SpectralDataType.DataType.MSe; break; } } break; } case "WindowSize": { WindowSize = Integer.parseInt(value); break; } } } } //</editor-fold> try { File MSFile = new File(MSFilePath); if (MSFile.exists()) { long time = System.currentTimeMillis(); Logger.getRootLogger().info( "================================================================================================="); Logger.getRootLogger().info("Processing " + MSFilePath + "...."); //Initialize a DIA file data structure DIAPack DiaFile = new DIAPack(MSFile.getAbsolutePath(), NoCPUs); DiaFile.Resume = Resume; DiaFile.SetDataType(dataType); DiaFile.SetParameter(param); //Set DIA isolation window setting if (dataType == SpectralDataType.DataType.DIA_F_Window) { DiaFile.SetWindowSize(WindowSize); } else if (dataType == SpectralDataType.DataType.DIA_V_Window) { for (XYData window : WindowList) { DiaFile.AddVariableWindow(window); } } DiaFile.SaveDIASetting(); DiaFile.SaveParams(); if (Fix) { DiaFile.FixScanidx(); return; } DiaFile.ExportPrecursorPeak = ExportPrecursorPeak; DiaFile.ExportFragmentPeak = ExportFragmentPeak; Logger.getRootLogger().info("Module A: Signal extraction"); //Start DIA signal extraction process to generate pseudo MS/MS files DiaFile.process(); time = System.currentTimeMillis() - time; Logger.getRootLogger().info(MSFilePath + " processed time:" + String.format("%d hour, %d min, %d sec", TimeUnit.MILLISECONDS.toHours(time), TimeUnit.MILLISECONDS.toMinutes(time) - TimeUnit.HOURS.toMinutes(TimeUnit.MILLISECONDS.toHours(time)), TimeUnit.MILLISECONDS.toSeconds(time) - TimeUnit.MINUTES.toSeconds(TimeUnit.MILLISECONDS.toMinutes(time)))); } else { throw new RuntimeException("file: " + MSFile + "? does not exist!"); } Logger.getRootLogger().info("Job complete"); Logger.getRootLogger().info( "================================================================================================="); } catch (Exception e) { Logger.getRootLogger().error(ExceptionUtils.getStackTrace(e)); throw e; } }
From source file:org.andstatus.app.msg.MessageViewItem.java
@Override public DuplicationLink duplicates(MessageViewItem other) { DuplicationLink link = DuplicationLink.NONE; if (other == null) { return link; }/*w w w .j a v a 2 s.c o m*/ if (getMsgId() == other.getMsgId()) { link = duplicatesByFavoritedAndReblogged(other); } if (link == DuplicationLink.NONE) { if (Math.abs(createdDate - other.createdDate) < TimeUnit.HOURS.toMillis(24)) { String thisBody = getCleanedBody(body); String otherBody = getCleanedBody(other.body); if (thisBody.length() < MIN_LENGTH_TO_COMPARE || otherBody.length() < MIN_LENGTH_TO_COMPARE) { // Too short to compare } else if (thisBody.equals(otherBody)) { if (createdDate == other.createdDate) { link = duplicatesByFavoritedAndReblogged(other); } else if (createdDate < other.createdDate) { link = DuplicationLink.IS_DUPLICATED; } else { link = DuplicationLink.DUPLICATES; } } else if (thisBody.contains(otherBody)) { link = DuplicationLink.DUPLICATES; } else if (otherBody.contains(thisBody)) { link = DuplicationLink.IS_DUPLICATED; } } } return link; }
From source file:com.linkedin.pinot.server.api.restlet.TableSizeResourceTest.java
public void setupSegment() throws Exception { final String filePath = TestUtils.getFileFromResourceUrl( SegmentV1V2ToV3FormatConverter.class.getClassLoader().getResource(AVRO_DATA)); // intentionally changed this to TimeUnit.Hours to make it non-default for testing final SegmentGeneratorConfig config = SegmentTestUtils.getSegmentGenSpecWithSchemAndProjectedColumns( new File(filePath), INDEX_DIR, "daysSinceEpoch", TimeUnit.HOURS, "testTable"); config.setSegmentNamePostfix("1"); config.setTimeColumnName("daysSinceEpoch"); final SegmentIndexCreationDriver driver = SegmentCreationDriverFactory.get(null); driver.init(config);//w w w. j av a 2 s.co m driver.build(); File segmentDirectory = new File(INDEX_DIR, driver.getSegmentName()); indexSegment = ColumnarSegmentLoader.load(segmentDirectory, ReadMode.mmap); serverInstance.getInstanceDataManager().addSegment(indexSegment.getSegmentMetadata(), null, null); }
From source file:org.apache.jackrabbit.oak.plugins.segment.CompactionAndCleanupIT.java
@Test public void compactionNoBinaryClone() throws Exception { // 2MB data, 5MB blob final int blobSize = 5 * 1024 * 1024; final int dataNodes = 10000; // really long time span, no binary cloning FileStore fileStore = FileStore.builder(getFileStoreFolder()).withMaxFileSize(1).build(); final SegmentNodeStore nodeStore = SegmentNodeStore.builder(fileStore).build(); CompactionStrategy custom = new CompactionStrategy(false, false, CLEAN_OLD, TimeUnit.HOURS.toMillis(1), (byte) 0) { @Override/*from w ww . j a v a 2 s . c o m*/ public boolean compacted(@Nonnull Callable<Boolean> setHead) throws Exception { return nodeStore.locked(setHead); } }; // Use in memory compaction map as gains asserted later on // do not take additional space of the compaction map into consideration custom.setPersistCompactionMap(false); fileStore.setCompactionStrategy(custom); // 1a. Create a bunch of data NodeBuilder extra = nodeStore.getRoot().builder(); NodeBuilder content = extra.child("content"); for (int i = 0; i < dataNodes; i++) { NodeBuilder c = content.child("c" + i); for (int j = 0; j < 1000; j++) { c.setProperty("p" + i, "v" + i); } } nodeStore.merge(extra, EmptyHook.INSTANCE, CommitInfo.EMPTY); // ---- final long dataSize = fileStore.size(); log.debug("File store dataSize {}", byteCountToDisplaySize(dataSize)); try { // 1. Create a property with 5 MB blob NodeBuilder builder = nodeStore.getRoot().builder(); builder.setProperty("a1", createBlob(nodeStore, blobSize)); builder.setProperty("b", "foo"); nodeStore.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY); log.debug("File store pre removal {}, expecting {}", byteCountToDisplaySize(fileStore.size()), byteCountToDisplaySize(blobSize + dataSize)); assertEquals(mb(blobSize + dataSize), mb(fileStore.size())); // 2. Now remove the property builder = nodeStore.getRoot().builder(); builder.removeProperty("a1"); nodeStore.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY); // Size remains same, no cleanup happened yet log.debug("File store pre compaction {}, expecting {}", byteCountToDisplaySize(fileStore.size()), byteCountToDisplaySize(blobSize + dataSize)); assertEquals(mb(blobSize + dataSize), mb(fileStore.size())); // 3. Compact assertTrue(fileStore.maybeCompact(false)); // Size doesn't shrink: ran compaction with a '1 Hour' cleanup // strategy assertSize("post compaction", fileStore.size(), blobSize + dataSize, blobSize + 2 * dataSize); // 4. Add some more property to flush the current TarWriter builder = nodeStore.getRoot().builder(); builder.setProperty("a2", createBlob(nodeStore, blobSize)); nodeStore.merge(builder, EmptyHook.INSTANCE, CommitInfo.EMPTY); // Size is double assertSize("pre cleanup", fileStore.size(), 2 * blobSize + dataSize, 2 * blobSize + 2 * dataSize); // 5. Cleanup, expecting store size: // no data content => // fileStore.size() == blobSize // some data content => // fileStore.size() in [blobSize + dataSize, blobSize + 2 x dataSize] assertTrue(fileStore.maybeCompact(false)); fileStore.cleanup(); assertSize("post cleanup", fileStore.size(), 0, blobSize + 2 * dataSize); // refresh the ts ref, to simulate a long wait time custom.setOlderThan(0); TimeUnit.MILLISECONDS.sleep(5); boolean needsCompaction = true; for (int i = 0; i < 3 && needsCompaction; i++) { needsCompaction = fileStore.maybeCompact(false); fileStore.cleanup(); } // gain is finally 0% assertFalse(fileStore.maybeCompact(false)); // no data loss happened byte[] blob = ByteStreams .toByteArray(nodeStore.getRoot().getProperty("a2").getValue(Type.BINARY).getNewStream()); assertEquals(blobSize, blob.length); } finally { fileStore.close(); } }
From source file:com.squid.kraken.v4.api.core.customer.AuthServiceImpl.java
public void initGC() { modelGC = Executors.newSingleThreadScheduledExecutor(); ModelGC<AccessToken, AccessTokenPK> gc = new ModelGC<AccessToken, AccessTokenPK>(0, this, AccessToken.class); modelGCThread = modelGC.scheduleWithFixedDelay(gc, 0, 1, TimeUnit.HOURS); }
From source file:com.linkedin.pinot.server.api.resources.TableSizeResourceTest.java
public void setupSegment() throws Exception { final String filePath = TestUtils.getFileFromResourceUrl( SegmentV1V2ToV3FormatConverter.class.getClassLoader().getResource(AVRO_DATA)); // intentionally changed this to TimeUnit.Hours to make it non-default for testing final SegmentGeneratorConfig config = SegmentTestUtils.getSegmentGenSpecWithSchemAndProjectedColumns( new File(filePath), INDEX_DIR, "daysSinceEpoch", TimeUnit.HOURS, TABLE_NAME); config.setSegmentNamePostfix("1"); config.setTimeColumnName("daysSinceEpoch"); final SegmentIndexCreationDriver driver = SegmentCreationDriverFactory.get(null); driver.init(config);//from w w w .j ava 2 s. c om driver.build(); File segmentDirectory = new File(INDEX_DIR, driver.getSegmentName()); indexSegment = ColumnarSegmentLoader.load(segmentDirectory, ReadMode.mmap); serverInstance.getInstanceDataManager().addSegment(indexSegment.getSegmentMetadata(), null, null); }
From source file:org.aroyomc.nexus.strikes.NexusStrikesMain.java
@Override public void saveDefaultConfig() { try {/*from w w w.j a v a 2 s. c o m*/ if (!configFile.exists() || BeanManager.load(BeanManager.ConfigType.YAML, configFile, MainConfigBean.class) == null) { configBean = new MainConfigBean(); // Strike actions... configBean.setStrikeActions(new HashMap<Integer, MainConfigBean.Action>()); configBean.getStrikeActions().put(1, new MainConfigBean.Action()); configBean.getStrikeActions().get(1).setType(MainConfigBean.Action.ActionType.WARN); configBean.getStrikeActions().put(2, new MainConfigBean.Action()); configBean.getStrikeActions().get(2).setType(MainConfigBean.Action.ActionType.MUTE); configBean.getStrikeActions().get(2).setTimeUnit(TimeUnit.MINUTES); configBean.getStrikeActions().get(2).setDuration(10.0); configBean.getStrikeActions().put(3, new MainConfigBean.Action()); configBean.getStrikeActions().get(3).setType(MainConfigBean.Action.ActionType.KICK); configBean.getStrikeActions().put(4, new MainConfigBean.Action()); configBean.getStrikeActions().get(4).setType(MainConfigBean.Action.ActionType.TEMPBAN); configBean.getStrikeActions().get(4).setTimeUnit(TimeUnit.HOURS); configBean.getStrikeActions().get(4).setDuration(5.0); configBean.getStrikeActions().put(5, new MainConfigBean.Action()); configBean.getStrikeActions().get(5).setType(MainConfigBean.Action.ActionType.BAN); configBean.getStrikeActions().put(6, new MainConfigBean.Action()); configBean.getStrikeActions().get(6).setType(MainConfigBean.Action.ActionType.COMMAND); configBean.getStrikeActions().get(6).setCommand("tell %guilty% Naughty-naughty! Sent by %police%"); // Database config... configBean.setDatabase(new MainConfigBean.Database()); configBean.getDatabase().setType(MainConfigBean.Database.Type.FILE); configBean.getDatabase().setUrl("jdbc:mysql://localhost:3306/mcsrv"); configBean.getDatabase().setRedisPort(6380); configBean.getDatabase().setDriverClass("com.mysql.jdbc.Driver"); configBean.getDatabase().setUsername("root"); configBean.getDatabase().setPassword("v3rys3cr3t"); configBean.getDatabase().setMaxRedisConnections(-1); saveConfig(); } } catch (IOException ex) { } }
From source file:org.schedoscope.metascope.util.HTMLUtil.java
public String getDuration(long start, long end) { if (end == 0 || start == 0) { return NO_DATE_AVAILABLE; }/*from w w w.j a v a 2 s . c o m*/ long millis = end - start; return String.format("%02d:%02d:%02d", TimeUnit.MILLISECONDS.toHours(millis), TimeUnit.MILLISECONDS.toMinutes(millis) - TimeUnit.HOURS.toMinutes(TimeUnit.MILLISECONDS.toHours(millis)), TimeUnit.MILLISECONDS.toSeconds(millis) - TimeUnit.MINUTES.toSeconds(TimeUnit.MILLISECONDS.toMinutes(millis))); }
From source file:org.dcache.util.histograms.TimeseriesHistogramTest.java
@Test public void rebuiltTimeseriesHistogramShouldBeTheSameAsOriginal() throws Exception { givenTimeseriesHistogram();/* www . java 2 s.com*/ givenQueueCountValuesFor(48); givenBinUnitOf((double) TimeUnit.HOURS.toMillis(1)); givenBinCountOf(48); givenBinLabelOf(TimeUnit.HOURS.name()); givenDataLabelOf("COUNT"); givenHistogramTypeOf("Queued Movers"); givenHighestBinOf(getHoursInThePastFromNow(0)); whenConfigureIsCalled(); whenHistogramIsStored(); givenTimeseriesHistogram(); whenConfigureIsCalled(); assertThatOriginalHistogramEqualsStored(); }