List of usage examples for java.util.concurrent TimeUnit HOURS
TimeUnit HOURS
To view the source code for java.util.concurrent TimeUnit HOURS.
Click Source Link
From source file:com.linkedin.pinot.common.metadata.SegmentZKMetadataTest.java
private ZNRecord getTestOfflineSegmentZNRecord() { String segmentName = "testTable_O_3000_4000"; ZNRecord record = new ZNRecord(segmentName); record.setSimpleField(CommonConstants.Segment.SEGMENT_NAME, segmentName); record.setSimpleField(CommonConstants.Segment.TABLE_NAME, "testTable"); record.setSimpleField(CommonConstants.Segment.INDEX_VERSION, "v1"); record.setEnumField(CommonConstants.Segment.SEGMENT_TYPE, CommonConstants.Segment.SegmentType.OFFLINE); record.setLongField(CommonConstants.Segment.START_TIME, 1000); record.setLongField(CommonConstants.Segment.END_TIME, 2000); record.setSimpleField(CommonConstants.Segment.TIME_UNIT, TimeUnit.HOURS.toString()); record.setLongField(CommonConstants.Segment.TOTAL_DOCS, 50000); record.setLongField(CommonConstants.Segment.CRC, 54321); record.setLongField(CommonConstants.Segment.CREATION_TIME, 1000); record.setSimpleField(CommonConstants.Segment.Offline.DOWNLOAD_URL, "http://localhost:8000/testTable_O_3000_4000"); record.setLongField(CommonConstants.Segment.Offline.PUSH_TIME, 4000); record.setLongField(CommonConstants.Segment.Offline.REFRESH_TIME, 8000); return record; }
From source file:org.structr.ldap.LDAPService.java
@Override public void initialize(final StructrServices services, final Properties config) throws ClassNotFoundException, InstantiationException, IllegalAccessException { this.updateInterval = Long .valueOf(config.getProperty(CONFIG_KEY_UPDATE_INTERVAL, Long.toString(TimeUnit.HOURS.toMillis(2)))); this.binddn = config.getProperty(CONFIG_KEY_LDAP_BINDDN); this.secret = config.getProperty(CONFIG_KEY_LDAP_SECRET); this.host = config.getProperty(CONFIG_KEY_LDAP_HOST, "localhost"); this.baseDn = config.getProperty(CONFIG_KEY_LDAP_BASEDN, "ou=system"); this.filter = config.getProperty(CONFIG_KEY_LDAP_FILTER, "(objectclass=*)"); this.scope = config.getProperty(CONFIG_KEY_LDAP_SCOPE, "SUBTREE"); this.port = Integer.valueOf(config.getProperty(CONFIG_KEY_LDAP_PORT, "389")); this.useSsl = "true".equals(config.getProperty(CONFIG_KEY_LDAP_SSL, "true")); }
From source file:org.dcache.util.histograms.HistogramModelTest.java
@Test public void updateOnTimeframeHistogramShouldAverageLastValue() throws NoSuchMethodException, InstantiationException, IllegalAccessException, InvocationTargetException { givenTimeframeHistogram();//www . j a v a 2 s . c o m givenQueueCountValuesFor(48); givenBinUnitOf((double) TimeUnit.HOURS.toMillis(1)); givenBinCountOf(48); givenBinLabelOf(TimeUnit.HOURS.name()); givenDataLabelOf("COUNT"); givenHistogramTypeOf("Queued Movers"); givenHighestBinOf(getHoursInThePastFromNow(0)); whenConfigureIsCalled(); assertThatUpdateAveragesLastValue(); }
From source file:name.persistent.behaviours.ValidatingDomainSupport.java
private boolean startResolving(Integer days) throws QueryEvaluationException { if (days == null || days < 1) return false; int count = Math.max(1, countTargets()); Resolver resolver = new Resolver(this); synchronized (resolvers) { if (resolvers.containsKey(resolver.key)) { resolver = resolvers.get(resolver.key); } else {/* w w w . j a v a 2 s .c om*/ resolvers.put(resolver.key, resolver); } } int periods = (count + 99) / 100; // check 100 targets at a time long maxHours = TimeUnit.DAYS.toHours(days); int interval = Math.max(1, (int) maxHours / periods); resolver.schedule(interval, TimeUnit.HOURS, Math.max(100, count * 2)); return true; }
From source file:DIA_Umpire_Quant.DIA_Umpire_Quant.java
/** * @param args the command line arguments *///from w w w . jav a 2s. c om public static void main(String[] args) throws FileNotFoundException, IOException, Exception { System.out.println( "================================================================================================="); System.out.println("DIA-Umpire quantitation with targeted re-extraction analysis (version: " + UmpireInfo.GetInstance().Version + ")"); if (args.length != 1) { System.out.println( "command format error, it should be like: java -jar -Xmx10G DIA_Umpire_Quant.jar diaumpire_quant.params"); return; } try { ConsoleLogger.SetConsoleLogger(Level.INFO); ConsoleLogger.SetFileLogger(Level.DEBUG, FilenameUtils.getFullPath(args[0]) + "diaumpire_quant.log"); } catch (Exception e) { } try { Logger.getRootLogger().info("Version: " + UmpireInfo.GetInstance().Version); Logger.getRootLogger().info("Parameter file:" + args[0]); BufferedReader reader = new BufferedReader(new FileReader(args[0])); String line = ""; String WorkFolder = ""; int NoCPUs = 2; String UserMod = ""; String Combined_Prot = ""; String InternalLibID = ""; String ExternalLibPath = ""; String ExternalLibDecoyTag = "DECOY"; boolean DefaultProtFiltering = true; boolean DataSetLevelPepFDR = false; float ProbThreshold = 0.99f; float ExtProbThreshold = 0.99f; float Freq = 0f; int TopNPep = 6; int TopNFrag = 6; float MinFragMz = 200f; String FilterWeight = "GW"; float MinWeight = 0.9f; float RTWindow_Int = -1f; float RTWindow_Ext = -1f; TandemParam tandemPara = new TandemParam(DBSearchParam.SearchInstrumentType.TOF5600); HashMap<String, File> AssignFiles = new HashMap<>(); boolean InternalLibSearch = false; boolean ExternalLibSearch = false; boolean ExportSaint = false; boolean SAINT_MS1 = false; boolean SAINT_MS2 = true; HashMap<String, String[]> BaitList = new HashMap<>(); HashMap<String, String> BaitName = new HashMap<>(); HashMap<String, String[]> ControlList = new HashMap<>(); HashMap<String, String> ControlName = new HashMap<>(); //<editor-fold defaultstate="collapsed" desc="Reading parameter file"> while ((line = reader.readLine()) != null) { line = line.trim(); Logger.getRootLogger().info(line); if (!"".equals(line) && !line.startsWith("#")) { //System.out.println(line); if (line.equals("==File list begin")) { do { line = reader.readLine(); line = line.trim(); if (line.equals("==File list end")) { continue; } else if (!"".equals(line)) { File newfile = new File(line); if (newfile.exists()) { AssignFiles.put(newfile.getAbsolutePath(), newfile); } else { Logger.getRootLogger().info("File: " + newfile + " does not exist."); } } } while (!line.equals("==File list end")); } if (line.split("=").length < 2) { continue; } String type = line.split("=")[0].trim(); String value = line.split("=")[1].trim(); switch (type) { case "TargetedExtraction": { InternalLibSearch = Boolean.parseBoolean(value); break; } case "InternalLibSearch": { InternalLibSearch = Boolean.parseBoolean(value); break; } case "ExternalLibSearch": { ExternalLibSearch = Boolean.parseBoolean(value); break; } case "Path": { WorkFolder = value; break; } case "path": { WorkFolder = value; break; } case "Thread": { NoCPUs = Integer.parseInt(value); break; } case "Fasta": { tandemPara.FastaPath = value; break; } case "Combined_Prot": { Combined_Prot = value; break; } case "DefaultProtFiltering": { DefaultProtFiltering = Boolean.parseBoolean(value); break; } case "DecoyPrefix": { if (!"".equals(value)) { tandemPara.DecoyPrefix = value; } break; } case "UserMod": { UserMod = value; break; } case "ProteinFDR": { tandemPara.ProtFDR = Float.parseFloat(value); break; } case "PeptideFDR": { tandemPara.PepFDR = Float.parseFloat(value); break; } case "DataSetLevelPepFDR": { DataSetLevelPepFDR = Boolean.parseBoolean(value); break; } case "InternalLibID": { InternalLibID = value; break; } case "ExternalLibPath": { ExternalLibPath = value; break; } case "ExtProbThreshold": { ExtProbThreshold = Float.parseFloat(value); break; } case "RTWindow_Int": { RTWindow_Int = Float.parseFloat(value); break; } case "RTWindow_Ext": { RTWindow_Ext = Float.parseFloat(value); break; } case "ExternalLibDecoyTag": { ExternalLibDecoyTag = value; if (ExternalLibDecoyTag.endsWith("_")) { ExternalLibDecoyTag = ExternalLibDecoyTag.substring(0, ExternalLibDecoyTag.length() - 1); } break; } case "ProbThreshold": { ProbThreshold = Float.parseFloat(value); break; } case "ReSearchProb": { //ReSearchProb = Float.parseFloat(value); break; } case "FilterWeight": { FilterWeight = value; break; } case "MinWeight": { MinWeight = Float.parseFloat(value); break; } case "TopNFrag": { TopNFrag = Integer.parseInt(value); break; } case "TopNPep": { TopNPep = Integer.parseInt(value); break; } case "Freq": { Freq = Float.parseFloat(value); break; } case "MinFragMz": { MinFragMz = Float.parseFloat(value); break; } //<editor-fold defaultstate="collapsed" desc="SaintOutput"> case "ExportSaintInput": { ExportSaint = Boolean.parseBoolean(value); break; } case "QuantitationType": { switch (value) { case "MS1": { SAINT_MS1 = true; SAINT_MS2 = false; break; } case "MS2": { SAINT_MS1 = false; SAINT_MS2 = true; break; } case "BOTH": { SAINT_MS1 = true; SAINT_MS2 = true; break; } } break; } // case "BaitInputFile": { // SaintBaitFile = value; // break; // } // case "PreyInputFile": { // SaintPreyFile = value; // break; // } // case "InterationInputFile": { // SaintInteractionFile = value; // break; // } default: { if (type.startsWith("BaitName_")) { BaitName.put(type.substring(9), value); } if (type.startsWith("BaitFile_")) { BaitList.put(type.substring(9), value.split("\t")); } if (type.startsWith("ControlName_")) { ControlName.put(type.substring(12), value); } if (type.startsWith("ControlFile_")) { ControlList.put(type.substring(12), value.split("\t")); } break; } //</editor-fold> } } } //</editor-fold> //Initialize PTM manager using compomics library PTMManager.GetInstance(); if (!UserMod.equals("")) { PTMManager.GetInstance().ImportUserMod(UserMod); } //Check if the fasta file can be found if (!new File(tandemPara.FastaPath).exists()) { Logger.getRootLogger().info("Fasta file :" + tandemPara.FastaPath + " cannot be found, the process will be terminated, please check."); System.exit(1); } //Check if the prot.xml file can be found if (!new File(Combined_Prot).exists()) { Logger.getRootLogger().info("ProtXML file: " + Combined_Prot + " cannot be found, the export protein summary table will be empty."); } LCMSID protID = null; //Parse prot.xml and generate protein master list given an FDR if (Combined_Prot != null && !Combined_Prot.equals("")) { protID = LCMSID.ReadLCMSIDSerialization(Combined_Prot); if (!"".equals(Combined_Prot) && protID == null) { protID = new LCMSID(Combined_Prot, tandemPara.DecoyPrefix, tandemPara.FastaPath); ProtXMLParser protxmlparser = new ProtXMLParser(protID, Combined_Prot, 0f); //Use DIA-Umpire default protein FDR calculation if (DefaultProtFiltering) { protID.RemoveLowLocalPWProtein(0.8f); protID.RemoveLowMaxIniProbProtein(0.9f); protID.FilterByProteinDecoyFDRUsingMaxIniProb(tandemPara.DecoyPrefix, tandemPara.ProtFDR); } //Get protein FDR calculation without other filtering else { protID.FilterByProteinDecoyFDRUsingLocalPW(tandemPara.DecoyPrefix, tandemPara.ProtFDR); } protID.LoadSequence(); protID.WriteLCMSIDSerialization(Combined_Prot); } Logger.getRootLogger().info("Protein No.:" + protID.ProteinList.size()); } HashMap<String, HashMap<String, FragmentPeak>> IDSummaryFragments = new HashMap<>(); //Generate DIA file list ArrayList<DIAPack> FileList = new ArrayList<>(); File folder = new File(WorkFolder); if (!folder.exists()) { Logger.getRootLogger().info("The path : " + WorkFolder + " cannot be found."); System.exit(1); } for (final File fileEntry : folder.listFiles()) { if (fileEntry.isFile() && (fileEntry.getAbsolutePath().toLowerCase().endsWith(".mzxml") | fileEntry.getAbsolutePath().toLowerCase().endsWith(".mzml")) && !fileEntry.getAbsolutePath().toLowerCase().endsWith("q1.mzxml") && !fileEntry.getAbsolutePath().toLowerCase().endsWith("q2.mzxml") && !fileEntry.getAbsolutePath().toLowerCase().endsWith("q3.mzxml")) { AssignFiles.put(fileEntry.getAbsolutePath(), fileEntry); } if (fileEntry.isDirectory()) { for (final File fileEntry2 : fileEntry.listFiles()) { if (fileEntry2.isFile() && (fileEntry2.getAbsolutePath().toLowerCase().endsWith(".mzxml") | fileEntry2.getAbsolutePath().toLowerCase().endsWith(".mzml")) && !fileEntry2.getAbsolutePath().toLowerCase().endsWith("q1.mzxml") && !fileEntry2.getAbsolutePath().toLowerCase().endsWith("q2.mzxml") && !fileEntry2.getAbsolutePath().toLowerCase().endsWith("q3.mzxml")) { AssignFiles.put(fileEntry2.getAbsolutePath(), fileEntry2); } } } } Logger.getRootLogger().info("No. of files assigned :" + AssignFiles.size()); for (File fileEntry : AssignFiles.values()) { Logger.getRootLogger().info(fileEntry.getAbsolutePath()); String mzXMLFile = fileEntry.getAbsolutePath(); if (mzXMLFile.toLowerCase().endsWith(".mzxml") | mzXMLFile.toLowerCase().endsWith(".mzml")) { DIAPack DiaFile = new DIAPack(mzXMLFile, NoCPUs); FileList.add(DiaFile); HashMap<String, FragmentPeak> FragMap = new HashMap<>(); IDSummaryFragments.put(FilenameUtils.getBaseName(mzXMLFile), FragMap); Logger.getRootLogger().info( "================================================================================================="); Logger.getRootLogger().info("Processing " + mzXMLFile); if (!DiaFile.LoadDIASetting()) { Logger.getRootLogger().info("Loading DIA setting failed, job is incomplete"); System.exit(1); } if (!DiaFile.LoadParams()) { Logger.getRootLogger().info("Loading parameters failed, job is incomplete"); System.exit(1); } } } LCMSID combinePepID = null; if (DataSetLevelPepFDR) { combinePepID = LCMSID.ReadLCMSIDSerialization(WorkFolder + "combinePepID.SerFS"); if (combinePepID == null) { FDR_DataSetLevel fdr = new FDR_DataSetLevel(); fdr.GeneratePepIonList(FileList, tandemPara, WorkFolder + "combinePepID.SerFS"); combinePepID = fdr.combineID; combinePepID.WriteLCMSIDSerialization(WorkFolder + "combinePepID.SerFS"); } } //process each DIA file for quantification based on untargeted identifications for (DIAPack DiaFile : FileList) { long time = System.currentTimeMillis(); Logger.getRootLogger().info("Loading identification results " + DiaFile.Filename + "...."); //If the LCMSID serialization is found if (!DiaFile.ReadSerializedLCMSID()) { DiaFile.ParsePepXML(tandemPara, combinePepID); DiaFile.BuildStructure(); if (!DiaFile.MS1FeatureMap.ReadPeakCluster()) { Logger.getRootLogger().info("Loading peak and structure failed, job is incomplete"); System.exit(1); } DiaFile.MS1FeatureMap.ClearMonoisotopicPeakOfCluster(); //Generate mapping between index of precursor feature and pseudo MS/MS scan index DiaFile.GenerateClusterScanNomapping(); //Doing quantification DiaFile.AssignQuant(); DiaFile.ClearStructure(); } DiaFile.IDsummary.ReduceMemoryUsage(); time = System.currentTimeMillis() - time; Logger.getRootLogger().info(DiaFile.Filename + " processed time:" + String.format("%d hour, %d min, %d sec", TimeUnit.MILLISECONDS.toHours(time), TimeUnit.MILLISECONDS.toMinutes(time) - TimeUnit.HOURS.toMinutes(TimeUnit.MILLISECONDS.toHours(time)), TimeUnit.MILLISECONDS.toSeconds(time) - TimeUnit.MINUTES.toSeconds(TimeUnit.MILLISECONDS.toMinutes(time)))); } //<editor-fold defaultstate="collapsed" desc="Targete re-extraction using internal library"> Logger.getRootLogger().info( "================================================================================================="); if (InternalLibSearch && FileList.size() > 1) { Logger.getRootLogger().info("Module C: Targeted extraction using internal library"); FragmentLibManager libManager = FragmentLibManager.ReadFragmentLibSerialization(WorkFolder, InternalLibID); if (libManager == null) { Logger.getRootLogger().info("Building internal spectral library"); libManager = new FragmentLibManager(InternalLibID); ArrayList<LCMSID> LCMSIDList = new ArrayList<>(); for (DIAPack dia : FileList) { LCMSIDList.add(dia.IDsummary); } libManager.ImportFragLibTopFrag(LCMSIDList, Freq, TopNFrag); libManager.WriteFragmentLibSerialization(WorkFolder); } libManager.ReduceMemoryUsage(); Logger.getRootLogger() .info("Building retention time prediction model and generate candidate peptide list"); for (int i = 0; i < FileList.size(); i++) { FileList.get(i).IDsummary.ClearMappedPep(); } for (int i = 0; i < FileList.size(); i++) { for (int j = i + 1; j < FileList.size(); j++) { RTAlignedPepIonMapping alignment = new RTAlignedPepIonMapping(WorkFolder, FileList.get(i).GetParameter(), FileList.get(i).IDsummary, FileList.get(j).IDsummary); alignment.GenerateModel(); alignment.GenerateMappedPepIon(); } FileList.get(i).ExportID(); FileList.get(i).IDsummary = null; } Logger.getRootLogger().info("Targeted matching........"); for (DIAPack diafile : FileList) { if (diafile.IDsummary == null) { diafile.ReadSerializedLCMSID(); } if (!diafile.IDsummary.GetMappedPepIonList().isEmpty()) { diafile.UseMappedIon = true; diafile.FilterMappedIonByProb = false; diafile.BuildStructure(); diafile.MS1FeatureMap.ReadPeakCluster(); diafile.MS1FeatureMap.ClearMonoisotopicPeakOfCluster(); diafile.GenerateMassCalibrationRTMap(); diafile.TargetedExtractionQuant(false, libManager, 1.1f, RTWindow_Int); diafile.MS1FeatureMap.ClearAllPeaks(); diafile.IDsummary.ReduceMemoryUsage(); diafile.IDsummary.RemoveLowProbMappedIon(ProbThreshold); diafile.ExportID(); Logger.getRootLogger().info("Peptide ions: " + diafile.IDsummary.GetPepIonList().size() + " Mapped ions: " + diafile.IDsummary.GetMappedPepIonList().size()); diafile.ClearStructure(); } diafile.IDsummary = null; System.gc(); } Logger.getRootLogger().info( "================================================================================================="); } //</editor-fold> //<editor-fold defaultstate="collapsed" desc="Targeted re-extraction using external library"> //External library search if (ExternalLibSearch) { Logger.getRootLogger().info("Module C: Targeted extraction using external library"); //Read exteranl library FragmentLibManager ExlibManager = FragmentLibManager.ReadFragmentLibSerialization(WorkFolder, FilenameUtils.getBaseName(ExternalLibPath)); if (ExlibManager == null) { ExlibManager = new FragmentLibManager(FilenameUtils.getBaseName(ExternalLibPath)); //Import traML file ExlibManager.ImportFragLibByTraML(ExternalLibPath, ExternalLibDecoyTag); //Check if there are decoy spectra ExlibManager.CheckDecoys(); //ExlibManager.ImportFragLibBySPTXT(ExternalLibPath); ExlibManager.WriteFragmentLibSerialization(WorkFolder); } Logger.getRootLogger() .info("No. of peptide ions in external lib:" + ExlibManager.PeptideFragmentLib.size()); for (DIAPack diafile : FileList) { if (diafile.IDsummary == null) { diafile.ReadSerializedLCMSID(); } //Generate RT mapping RTMappingExtLib RTmap = new RTMappingExtLib(diafile.IDsummary, ExlibManager, diafile.GetParameter()); RTmap.GenerateModel(); RTmap.GenerateMappedPepIon(); diafile.BuildStructure(); diafile.MS1FeatureMap.ReadPeakCluster(); diafile.GenerateMassCalibrationRTMap(); //Perform targeted re-extraction diafile.TargetedExtractionQuant(false, ExlibManager, ProbThreshold, RTWindow_Ext); diafile.MS1FeatureMap.ClearAllPeaks(); diafile.IDsummary.ReduceMemoryUsage(); //Remove target IDs below the defined probability threshold diafile.IDsummary.RemoveLowProbMappedIon(ExtProbThreshold); diafile.ExportID(); diafile.ClearStructure(); Logger.getRootLogger().info("Peptide ions: " + diafile.IDsummary.GetPepIonList().size() + " Mapped ions: " + diafile.IDsummary.GetMappedPepIonList().size()); } } //</editor-fold> //<editor-fold defaultstate="collapsed" desc="Peptide and fragment selection"> Logger.getRootLogger().info("Peptide and fragment selection across the whole dataset"); ArrayList<LCMSID> SummaryList = new ArrayList<>(); for (DIAPack diafile : FileList) { if (diafile.IDsummary == null) { diafile.ReadSerializedLCMSID(); diafile.IDsummary.ClearAssignPeakCluster(); //diafile.IDsummary.ClearPSMs(); } if (protID != null) { //Generate protein list according to mapping of peptide ions for each DIA file to the master protein list diafile.IDsummary.GenerateProteinByRefIDByPepSeq(protID, true); diafile.IDsummary.ReMapProPep(); } if ("GW".equals(FilterWeight)) { diafile.IDsummary.SetFilterByGroupWeight(); } else if ("PepW".equals(FilterWeight)) { diafile.IDsummary.SetFilterByWeight(); } SummaryList.add(diafile.IDsummary); } FragmentSelection fragselection = new FragmentSelection(SummaryList); fragselection.freqPercent = Freq; fragselection.MinFragMZ = MinFragMz; fragselection.GeneratePepFragScoreMap(); fragselection.GenerateTopFragMap(TopNFrag); fragselection.GenerateProtPepScoreMap(MinWeight); fragselection.GenerateTopPepMap(TopNPep); //</editor-fold> //<editor-fold defaultstate="collapsed" desc="Writing general reports"> ExportTable export = new ExportTable(WorkFolder, SummaryList, IDSummaryFragments, protID, fragselection); export.Export(TopNPep, TopNFrag, Freq); //</editor-fold> //<editor-fold defaultstate="collapsed" desc="//<editor-fold defaultstate="collapsed" desc="Generate SAINT input files"> if (ExportSaint && protID != null) { HashMap<String, DIAPack> Filemap = new HashMap<>(); for (DIAPack DIAfile : FileList) { Filemap.put(DIAfile.GetBaseName(), DIAfile); } FileWriter baitfile = new FileWriter(WorkFolder + "SAINT_Bait_" + DateTimeTag.GetTag() + ".txt"); FileWriter preyfile = new FileWriter(WorkFolder + "SAINT_Prey_" + DateTimeTag.GetTag() + ".txt"); FileWriter interactionfileMS1 = null; FileWriter interactionfileMS2 = null; if (SAINT_MS1) { interactionfileMS1 = new FileWriter( WorkFolder + "SAINT_Interaction_MS1_" + DateTimeTag.GetTag() + ".txt"); } if (SAINT_MS2) { interactionfileMS2 = new FileWriter( WorkFolder + "SAINT_Interaction_MS2_" + DateTimeTag.GetTag() + ".txt"); } HashMap<String, String> PreyID = new HashMap<>(); for (String samplekey : ControlName.keySet()) { String name = ControlName.get(samplekey); for (String file : ControlList.get(samplekey)) { baitfile.write(FilenameUtils.getBaseName(file) + "\t" + name + "\t" + "C\n"); LCMSID IDsummary = Filemap.get(FilenameUtils.getBaseName(file)).IDsummary; if (SAINT_MS1) { SaintOutput(protID, IDsummary, fragselection, interactionfileMS1, file, name, PreyID, 1); } if (SAINT_MS2) { SaintOutput(protID, IDsummary, fragselection, interactionfileMS2, file, name, PreyID, 2); } } } for (String samplekey : BaitName.keySet()) { String name = BaitName.get(samplekey); for (String file : BaitList.get(samplekey)) { baitfile.write(FilenameUtils.getBaseName(file) + "\t" + name + "\t" + "T\n"); LCMSID IDsummary = Filemap.get(FilenameUtils.getBaseName(file)).IDsummary; if (SAINT_MS1) { SaintOutput(protID, IDsummary, fragselection, interactionfileMS1, file, name, PreyID, 1); } if (SAINT_MS2) { SaintOutput(protID, IDsummary, fragselection, interactionfileMS2, file, name, PreyID, 2); } } } baitfile.close(); if (SAINT_MS1) { interactionfileMS1.close(); } if (SAINT_MS2) { interactionfileMS2.close(); } for (String AccNo : PreyID.keySet()) { preyfile.write(AccNo + "\t" + PreyID.get(AccNo) + "\n"); } preyfile.close(); } //</editor-fold> Logger.getRootLogger().info("Job done"); Logger.getRootLogger().info( "================================================================================================="); } catch (Exception e) { Logger.getRootLogger().error(ExceptionUtils.getStackTrace(e)); throw e; } }
From source file:com.l2jfree.gameserver.ThreadPoolManager.java
public void shutdown() { final long begin = System.currentTimeMillis(); System.out.println("ThreadPoolManager: Shutting down."); System.out.println("\t... executing " + getTaskCount(_scheduledPool) + " scheduled tasks."); System.out.println("\t... executing " + getTaskCount(_instantPool) + " instant tasks."); System.out.println("\t... executing " + getTaskCount(_longRunningPool) + " long running tasks."); _scheduledPool.shutdown();//from w w w. j av a 2 s .co m _instantPool.shutdown(); _longRunningPool.shutdown(); boolean success = false; try { success |= awaitTermination(5000); _scheduledPool.setExecuteExistingDelayedTasksAfterShutdownPolicy(false); _scheduledPool.setContinueExistingPeriodicTasksAfterShutdownPolicy(false); success |= awaitTermination(10000); } catch (InterruptedException e) { e.printStackTrace(); } System.out.println("\t... success: " + success + " in " + (System.currentTimeMillis() - begin) + " msec."); System.out.println("\t... " + getTaskCount(_scheduledPool) + " scheduled tasks left."); System.out.println("\t... " + getTaskCount(_instantPool) + " instant tasks left."); System.out.println("\t... " + getTaskCount(_longRunningPool) + " long running tasks left."); if (TimeUnit.HOURS .toMillis(12) < (System.currentTimeMillis() - GameServer.getStartedTime().getTimeInMillis())) RunnableStatsManager.dumpClassStats(SortBy.TOTAL); }
From source file:org.specvis.logic.Functions.java
/** * Get time interval between some start and end value in "hh:mm:ss" format. * @param start/*from www . ja va 2 s . c o m*/ * @param end * @return Time in "hh:mm:ss" format. */ public String totalTime(long start, long end) { long difference = end - start; return String.format("%02d:%02d:%02d", TimeUnit.MILLISECONDS.toHours(difference), TimeUnit.MILLISECONDS.toMinutes(difference) - TimeUnit.HOURS.toMinutes(TimeUnit.MILLISECONDS.toHours(difference)), TimeUnit.MILLISECONDS.toSeconds(difference) - TimeUnit.MINUTES.toSeconds(TimeUnit.MILLISECONDS.toMinutes(difference))); }
From source file:org.apache.druid.segment.realtime.plumber.RealtimePlumberSchoolTest.java
@Test(timeout = 60_000L) public void testPersistFails() throws Exception { Sink sink = new Sink(Intervals.utc(0, TimeUnit.HOURS.toMillis(1)), schema, tuningConfig.getShardSpec(), DateTimes.of("2014-12-01T12:34:56.789").toString(), tuningConfig.getMaxRowsInMemory(), TuningConfigs.getMaxBytesInMemoryOrDefault(tuningConfig.getMaxBytesInMemory()), tuningConfig.isReportParseExceptions(), tuningConfig.getDedupColumn()); plumber.getSinks().put(0L, sink);// w w w . jav a2s . co m plumber.startJob(); final InputRow row = EasyMock.createNiceMock(InputRow.class); EasyMock.expect(row.getTimestampFromEpoch()).andReturn(0L); EasyMock.expect(row.getDimensions()).andReturn(new ArrayList<String>()); EasyMock.replay(row); plumber.add(row, Suppliers.ofInstance(Committers.nil())); final CountDownLatch doneSignal = new CountDownLatch(1); plumber.persist(Committers.supplierFromRunnable(new Runnable() { @Override public void run() { doneSignal.countDown(); throw new RuntimeException(); } }).get()); doneSignal.await(); // Exception may need time to propagate while (metrics.failedPersists() < 1) { Thread.sleep(100); } Assert.assertEquals(1, metrics.failedPersists()); }
From source file:org.commonjava.indy.httprox.handler.ProxyResponseWriter.java
private void doHandleEvent(final ConduitStreamSinkChannel sinkChannel) { if (directed) { return;// w w w .ja v a 2 s . c o m } HttpConduitWrapper http = new HttpConduitWrapper(sinkChannel, httpRequest, contentController, cacheProvider); if (httpRequest == null) { if (error != null) { logger.debug("Handling error from request reader: " + error.getMessage(), error); handleError(error, http); } else { logger.debug("Invalid state (no error or request) from request reader. Sending 400."); try { http.writeStatus(ApplicationStatus.BAD_REQUEST); } catch (final IOException e) { logger.error("Failed to write BAD REQUEST for missing HTTP first-line to response channel.", e); } } return; } restLogger.info("START {} (from: {})", httpRequest.getRequestLine(), peerAddress); // TODO: Can we handle this? final String oldThreadName = Thread.currentThread().getName(); Thread.currentThread().setName("PROXY-" + httpRequest.getRequestLine().toString()); sinkChannel.getCloseSetter().set((c) -> { restLogger.info("END {} (from: {})", httpRequest.getRequestLine(), peerAddress); logger.trace("Sink channel closing."); Thread.currentThread().setName(oldThreadName); if (sslTunnel != null) { logger.trace("Close ssl tunnel"); sslTunnel.close(); } }); logger.debug("\n\n\n>>>>>>> Handle write\n\n\n"); if (error == null) { try { if (repoCreator == null) { throw new IndyDataException("No valid instance of ProxyRepositoryCreator"); } final UserPass proxyUserPass = parse(ApplicationHeader.proxy_authorization, httpRequest, null); mdcManager.putExtraHeaders(httpRequest); if (proxyUserPass != null) { mdcManager.putExternalID(proxyUserPass.getUser()); } logger.debug("Proxy UserPass: {}\nConfig secured? {}\nConfig tracking type: {}", proxyUserPass, config.isSecured(), config.getTrackingType()); if (proxyUserPass == null && (config.isSecured() || TrackingType.ALWAYS == config.getTrackingType())) { String realmInfo = String.format(PROXY_AUTHENTICATE_FORMAT, config.getProxyRealm()); logger.info("Not authenticated to proxy. Sending response: {} / {}: {}", PROXY_AUTHENTICATION_REQUIRED, proxy_authenticate, realmInfo); http.writeStatus(PROXY_AUTHENTICATION_REQUIRED); http.writeHeader(proxy_authenticate, realmInfo); } else { RequestLine requestLine = httpRequest.getRequestLine(); String method = requestLine.getMethod().toUpperCase(); String trackingId = null; boolean authenticated = true; ProxyResponseHelper proxyResponseHelper = new ProxyResponseHelper(httpRequest, config, contentController, repoCreator, storeManager, metricsConfig, metricRegistry, cls); if (proxyUserPass != null) { TrackingKey trackingKey = proxyResponseHelper.getTrackingKey(proxyUserPass); if (trackingKey != null) { trackingId = trackingKey.getId(); } String authCacheKey = generateAuthCacheKey(proxyUserPass); Boolean isAuthToken = proxyAuthCache.get(authCacheKey); if (Boolean.TRUE.equals(isAuthToken)) { authenticated = true; logger.debug("Found auth key in cache"); } else { logger.debug( "Passing BASIC authentication credentials to Keycloak bearer-token translation authenticator"); authenticated = proxyAuthenticator.authenticate(proxyUserPass, http); if (authenticated) { proxyAuthCache.put(authCacheKey, Boolean.TRUE, config.getAuthCacheExpirationHours(), TimeUnit.HOURS); } } logger.debug("Authentication done, result: {}", authenticated); } if (authenticated) { switch (method) { case GET_METHOD: case HEAD_METHOD: { final URL url = new URL(requestLine.getUri()); logger.debug("getArtifactStore starts, trackingId: {}, url: {}", trackingId, url); ArtifactStore store = proxyResponseHelper.getArtifactStore(trackingId, url); proxyResponseHelper.transfer(http, store, url.getPath(), GET_METHOD.equals(method), proxyUserPass); break; } case OPTIONS_METHOD: { http.writeStatus(ApplicationStatus.OK); http.writeHeader(ApplicationHeader.allow, ALLOW_HEADER_VALUE); break; } case CONNECT_METHOD: { if (!config.isMITMEnabled()) { logger.debug("CONNECT method not supported unless MITM-proxying is enabled."); http.writeStatus(ApplicationStatus.BAD_REQUEST); break; } String uri = requestLine.getUri(); // e.g, github.com:443 logger.debug("Get CONNECT request, uri: {}", uri); String[] toks = uri.split(":"); String host = toks[0]; int port = Integer.parseInt(toks[1]); directed = true; // After this, the proxy simply opens a plain socket to the target server and relays // everything between the initial client and the target server (including the TLS handshake). SocketChannel socketChannel; ProxyMITMSSLServer svr = new ProxyMITMSSLServer(host, port, trackingId, proxyUserPass, proxyResponseHelper, contentController, cacheProvider, config); tunnelAndMITMExecutor.submit(svr); socketChannel = svr.getSocketChannel(); if (socketChannel == null) { logger.debug("Failed to get MITM socket channel"); http.writeStatus(ApplicationStatus.SERVER_ERROR); svr.stop(); break; } sslTunnel = new ProxySSLTunnel(sinkChannel, socketChannel, config); tunnelAndMITMExecutor.submit(sslTunnel); proxyRequestReader.setProxySSLTunnel(sslTunnel); // client input will be directed to target socket // When all is ready, send the 200 to client. Client send the SSL handshake to reader, // reader direct it to tunnel to MITM. MITM finish the handshake and read the request data, // retrieve remote content and send back to tunnel to client. http.writeStatus(ApplicationStatus.OK); http.writeHeader("Status", "200 OK\n"); break; } default: { http.writeStatus(ApplicationStatus.METHOD_NOT_ALLOWED); } } } } logger.debug("Response complete."); } catch (final Throwable e) { error = e; } finally { mdcManager.clear(); } } if (error != null) { handleError(error, http); } try { if (directed) { ; // do not close sink channel } else { http.close(); } } catch (final IOException e) { logger.error("Failed to shutdown response", e); } }
From source file:com.piketec.jenkins.plugins.tpt.TptPluginSingleJobExecutor.java
/** * Starts TPT via command line and executes the given tests. * //from w w w . jav a 2 s . c om * @param launcher * to start the process * @param listener * to join TPT with a given timeout * @param cmd * The command to execute via command line * @param timeout * The maximum allowed runtime for TPT. * @throws InterruptedException * @throws IOException */ private boolean launchTPT(Launcher launcher, BuildListener listener, String cmd, long timeout) throws InterruptedException, IOException { boolean exitCodeWasNull = true; logger.info("Launching \"" + cmd + "\""); Launcher.ProcStarter starter = launcher.new ProcStarter(); starter.cmdAsSingleString(cmd); starter.stdout(logger.getLogger()); starter.stderr(logger.getLogger()); Proc tpt = null; try { tpt = starter.start(); if (timeout <= 0) { timeout = JenkinsConfiguration.DescriptorImpl.getDefaultTimeout(); } logger.info("Waiting for TPT to complete. Timeout: " + timeout + "h"); int exitcode = tpt.joinWithTimeout(timeout, TimeUnit.HOURS, listener); if (exitcode != 0) { logger.error("TPT process stops with exit code " + exitcode); exitCodeWasNull = false; } } catch (IOException e) { throw new IOException("TPT launch error: " + e.getMessage()); } catch (InterruptedException e) { try { tpt.kill(); } catch (IOException | InterruptedException e1) { throw new IOException( "TPT launch error: Interrupt requested, but cannot kill the TPT process. Please kill it manually."); } throw e; } return exitCodeWasNull; }