List of usage examples for java.lang System gc
public static void gc()
From source file:org.leo.benchmark.Benchmark.java
/** * Execute the current run code loop times. * // ww w .ja va 2s . co m * @param run code to run * @param loop number of time to run the code * @param taskName name displayed at the end of the task */ @SuppressWarnings({ "unchecked", "rawtypes" }) private void execute(BenchRunnable run, int loop, String taskName) { System.out.print(taskName + " ... "); // set default context collection.clear(); collection.addAll(defaultCtx); // warmup warmUp(); isTimeout = false; // timeout timer Timer timer = new Timer((int) timeout, new ActionListener() { @Override public void actionPerformed(ActionEvent e) { isTimeout = true; // to raise a ConcurrentModificationException or a // NoSuchElementException to interrupt internal work in the List collection.clear(); } }); timer.setRepeats(false); timer.start(); long startTime = System.nanoTime(); int i; for (i = 0; i < loop && !isTimeout; i++) { try { run.run(i); } catch (Exception e) { // on purpose so ignore it } } timer.stop(); long time = isTimeout ? timeout * 1000000 : System.nanoTime() - startTime; System.out.println((isTimeout ? "Timeout (>" + time + "ns) after " + i + " loop(s)" : time + "ns")); // restore default context, // the collection instance might have been // corrupted by the timeout so create a new instance try { Constructor<? extends Collection> constructor = collection.getClass() .getDeclaredConstructor((Class<?>[]) null); constructor.setAccessible(true); collection = constructor.newInstance(); // update the reference if (collection instanceof List) { list = (List<String>) collection; } } catch (Exception e1) { e1.printStackTrace(); } // store the results for display Map<Class<? extends Collection<?>>, Long> currentBench = benchResults.get(taskName); if (currentBench == null) { currentBench = new HashMap<Class<? extends Collection<?>>, Long>(); benchResults.put(taskName, currentBench); } currentBench.put((Class<? extends Collection<String>>) collection.getClass(), time); // little gc to clean up all the stuff System.gc(); }
From source file:com.flyn.net.asynchttp.AsyncHttpResponseHandler.java
/** * Returns byte array of response HttpEntity contents * * @param entity can be null/*w ww . j a v a 2s. c om*/ * @return response entity body or null * @throws java.io.IOException if reading entity or creating byte array failed */ byte[] getResponseData(HttpEntity entity) throws IOException { byte[] responseBody = null; if (entity != null) { InputStream instream = entity.getContent(); if (instream != null) { long contentLength = entity.getContentLength(); if (contentLength > Integer.MAX_VALUE) { throw new IllegalArgumentException("HTTP entity too large to be buffered in memory"); } int buffersize = (contentLength < 0) ? BUFFER_SIZE : (int) contentLength; try { ByteArrayBuffer buffer = new ByteArrayBuffer(buffersize); try { byte[] tmp = new byte[BUFFER_SIZE]; int l, count = 0; // do not send messages if request has been cancelled while ((l = instream.read(tmp)) != -1 && !Thread.currentThread().isInterrupted()) { count += l; buffer.append(tmp, 0, l); sendProgressMessage(count, (int) contentLength); } } finally { instream.close(); } responseBody = buffer.toByteArray(); } catch (OutOfMemoryError e) { System.gc(); throw new IOException("File too large to fit into available memory"); } } } return responseBody; }
From source file:com.example.fertilizercrm.common.httpclient.AsyncHttpResponseHandler.java
/** * Returns byte array of response HttpEntity contents * * @param entity can be null/* www .ja v a 2 s . co m*/ * @return response entity body or null * @throws IOException if reading entity or creating byte array failed */ byte[] getResponseData(HttpEntity entity) throws IOException { byte[] responseBody = null; if (entity != null) { InputStream instream = entity.getContent(); if (instream != null) { long contentLength = entity.getContentLength(); if (contentLength > Integer.MAX_VALUE) { throw new IllegalArgumentException("HTTP entity too large to be buffered in memory"); } if (contentLength < 0) { contentLength = BUFFER_SIZE; } try { ByteArrayBuffer buffer = new ByteArrayBuffer((int) contentLength); try { byte[] tmp = new byte[BUFFER_SIZE]; int l, count = 0; // do not send messages if request has been cancelled while ((l = instream.read(tmp)) != -1 && !Thread.currentThread().isInterrupted()) { count += l; buffer.append(tmp, 0, l); sendProgressMessage(count, (int) contentLength); } } finally { instream.close(); } responseBody = buffer.toByteArray(); } catch (OutOfMemoryError e) { System.gc(); throw new IOException("File too large to fit into available memory"); } } } return responseBody; }
From source file:com.ca.dvs.app.dvs_servlet.resources.RAML.java
/** * Produce an EDM from an uploaded RAML file * <p>/* w ww . java 2s. com*/ * @param uploadedInputStream the file content associated with the RAML file upload * @param fileDetail the file details associated with the RAML file upload * @return an HTTP response containing the EDM transformation for the uploaded RAML file */ @POST @Path("edm") @Consumes(MediaType.MULTIPART_FORM_DATA) @Produces(MediaType.APPLICATION_XML) public Response genEdm(@DefaultValue("") @FormDataParam("file") InputStream uploadedInputStream, @DefaultValue("") @FormDataParam("file") FormDataContentDisposition fileDetail) { log.info("POST raml/edm"); Response response = null; File uploadedFile = null; File ramlFile = null; FileInputStream ramlFileStream = null; try { if (fileDetail == null || fileDetail.getFileName() == null || fileDetail.getName() == null) { throw new InvalidParameterException("file"); } uploadedFile = FileUtil.getUploadedFile(uploadedInputStream, fileDetail); if (uploadedFile.isDirectory()) { // find RAML file in directory // First, look for a raml file that has the same base name as the uploaded file String targetName = Files.getNameWithoutExtension(fileDetail.getFileName()) + ".raml"; ramlFile = FileUtil.selectRamlFile(uploadedFile, targetName); } else { ramlFile = uploadedFile; } List<ValidationResult> results = null; try { results = RamlUtil.validateRaml(ramlFile); } catch (IOException e) { String msg = String.format("RAML validation failed catastrophically for %s", ramlFile.getName()); throw new Exception(msg, e.getCause()); } // If the RAML file is valid, get to work... if (ValidationResult.areValid(results)) { try { ramlFileStream = new FileInputStream(ramlFile.getAbsolutePath()); } catch (FileNotFoundException e) { String msg = String.format("Failed to open input stream from %s", ramlFile.getAbsolutePath()); throw new Exception(msg, e.getCause()); } FileResourceLoader resourceLoader = new FileResourceLoader(ramlFile.getParentFile()); RamlDocumentBuilder rdb = new RamlDocumentBuilder(resourceLoader); Raml raml = rdb.build(ramlFileStream, ramlFile.getAbsolutePath()); ramlFileStream.close(); ramlFileStream = null; EDM edm = new EDM(raml); Document doc = null; try { doc = edm.getDocument(); StringWriter stringWriter = new StringWriter(); EDM.prettyPrint(doc, stringWriter); response = Response.status(Status.OK).entity(stringWriter.toString()).build(); } catch (Exception e) { String msg = String.format("Failed to build EDM document - %s", e.getMessage()); throw new Exception(msg, e.getCause()); } } else { // RAML file failed validation response = Response.status(Status.BAD_REQUEST).entity(RamlUtil.validationMessages(results)).build(); } } catch (Exception ex) { ex.printStackTrace(); String msg = ex.getMessage(); log.error(msg, ex); if (ex instanceof JsonSyntaxException) { response = Response.status(Status.BAD_REQUEST).entity(msg).build(); } else if (ex instanceof InvalidParameterException) { response = Response.status(Status.BAD_REQUEST) .entity(String.format("Invalid form parameter - %s", ex.getMessage())).build(); } else { response = Response.status(Status.INTERNAL_SERVER_ERROR).entity(msg).build(); } return response; } finally { if (null != ramlFileStream) { try { ramlFileStream.close(); } catch (IOException e) { e.printStackTrace(); } } if (null != uploadedFile) { if (uploadedFile.isDirectory()) { try { System.gc(); // To help release files that snakeyaml abandoned open streams on -- otherwise, some files may not delete // Wait a bit for the system to close abandoned streams try { Thread.sleep(1000); } catch (InterruptedException e) { e.printStackTrace(); } FileUtils.deleteDirectory(uploadedFile); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } else { uploadedFile.delete(); } } } return response; }
From source file:com.all.ultrapeer.UltrapeerInvoker.java
public String gc() { log.info("Will invoke GarbageCollector..."); StringBuilder sb = new StringBuilder(); sb.append("\nTotal Memory: "); sb.append(Runtime.getRuntime().totalMemory() / 1024); sb.append("\nFree Memory Before: "); sb.append(Runtime.getRuntime().freeMemory() / 1024); System.gc(); sb.append("\nFree Memory After: "); sb.append(Runtime.getRuntime().freeMemory() / 1024); return sb.toString(); }
From source file:eu.planets_project.pp.plato.action.project.XmlAction.java
public String export() { if (selectedPlan != null) { // convert project-name to a filename, add date: SimpleDateFormat formatter = new SimpleDateFormat("yyyyMMdd_kkmmss"); String planName = selectedPlan.getPlanProperties().getName(); if ((planName == null) || "".equals(planName)) { planName = "export"; }/*from w ww . j a v a 2 s .c om*/ String normalizedPlanName = FileUtils.makeFilename(planName); String filename = normalizedPlanName + "-" + formatter.format(new Date()); String binarydataTempPath = OS.getTmpPath() + normalizedPlanName + System.currentTimeMillis() + "/"; File binarydataTempDir = new File(binarydataTempPath); binarydataTempDir.mkdirs(); try { HttpServletResponse response = (HttpServletResponse) FacesContext.getCurrentInstance() .getExternalContext().getResponse(); response.setContentType("application/x-download"); response.setHeader("Content-Disposition", "attachement; filename=\"" + filename + ".xml\""); // the length of the resulting XML file is unknown due to formatting: response.setContentLength(xml.length()); try { BufferedOutputStream out = new BufferedOutputStream(response.getOutputStream()); projectExport.exportComplete(selectedPlan.getPlanProperties().getId(), out, binarydataTempPath); out.flush(); out.close(); } catch (IOException e) { FacesMessages.instance().add(FacesMessage.SEVERITY_ERROR, "An error occured while generating the export file."); log.error("Could not open response-outputstream: ", e); } FacesContext.getCurrentInstance().responseComplete(); } finally { OS.deleteDirectory(binarydataTempDir); } } System.gc(); return null; }
From source file:juicebox.tools.utils.original.Preprocessor.java
private void writeBody(String inputFile) throws IOException { MatrixPP wholeGenomeMatrix = computeWholeGenomeMatrix(inputFile); writeMatrix(wholeGenomeMatrix);//from w w w . j a va 2s . c o m PairIterator iter = (inputFile.endsWith(".bin")) ? new BinPairIterator(inputFile, chromosomeIndexes) : new AsciiPairIterator(inputFile, chromosomeIndexes); int currentChr1 = -1; int currentChr2 = -1; MatrixPP currentMatrix = null; HashSet<String> writtenMatrices = new HashSet<String>(); String currentMatrixKey = null; while (iter.hasNext()) { AlignmentPair pair = iter.next(); // skip pairs that mapped to contigs if (!pair.isContigPair()) { // Flip pair if needed so chr1 < chr2 int chr1, chr2, bp1, bp2, frag1, frag2, mapq; if (pair.getChr1() < pair.getChr2()) { bp1 = pair.getPos1(); bp2 = pair.getPos2(); frag1 = pair.getFrag1(); frag2 = pair.getFrag2(); chr1 = pair.getChr1(); chr2 = pair.getChr2(); } else { bp1 = pair.getPos2(); bp2 = pair.getPos1(); frag1 = pair.getFrag2(); frag2 = pair.getFrag1(); chr1 = pair.getChr2(); chr2 = pair.getChr1(); } mapq = Math.min(pair.getMapq1(), pair.getMapq2()); // Filters if (diagonalsOnly && chr1 != chr2) continue; if (includedChromosomes != null && chr1 != 0) { String c1Name = chromosomes.get(chr1).getName(); String c2Name = chromosomes.get(chr2).getName(); if (!(includedChromosomes.contains(c1Name) || includedChromosomes.contains(c2Name))) { continue; } } // only increment if not intraFragment and passes the mapq threshold if (mapq < mapqThreshold || (chr1 == chr2 && frag1 == frag2)) continue; if (!(currentChr1 == chr1 && currentChr2 == chr2)) { // Starting a new matrix if (currentMatrix != null) { currentMatrix.parsingComplete(); writeMatrix(currentMatrix); writtenMatrices.add(currentMatrixKey); currentMatrix = null; System.gc(); //System.out.println("Available memory: " + RuntimeUtils.getAvailableMemory()); } // Start the next matrix currentChr1 = chr1; currentChr2 = chr2; currentMatrixKey = currentChr1 + "_" + currentChr2; if (writtenMatrices.contains(currentMatrixKey)) { System.err.println("Error: the chromosome combination " + currentMatrixKey + " appears in multiple blocks"); if (outputFile != null) outputFile.deleteOnExit(); System.exit(1); } currentMatrix = new MatrixPP(currentChr1, currentChr2); } currentMatrix.incrementCount(bp1, bp2, frag1, frag2, pair.getScore()); } } if (currentMatrix != null) { currentMatrix.parsingComplete(); writeMatrix(currentMatrix); } if (iter != null) iter.close(); masterIndexPosition = los.getWrittenCount(); }
From source file:com.example.administrator.newsdaily.model.httpclient.AsyncHttpResponseHandler.java
/** * Returns byte array of response HttpEntity contents * * @param entity can be null/*from ww w .ja v a2 s .c o m*/ * @return response entity body or null * @throws IOException if reading entity or creating byte array failed */ byte[] getResponseData(HttpEntity entity) throws IOException { byte[] responseBody = null; if (entity != null) { InputStream instream = entity.getContent(); if (instream != null) { long contentLength = entity.getContentLength(); if (contentLength > Integer.MAX_VALUE) { throw new IllegalArgumentException("HTTP entity too large to be buffered in memory"); } int buffersize = (contentLength <= 0) ? BUFFER_SIZE : (int) contentLength; try { ByteArrayBuffer buffer = new ByteArrayBuffer(buffersize); try { byte[] tmp = new byte[BUFFER_SIZE]; int l, count = 0; // do not send messages if request has been cancelled while ((l = instream.read(tmp)) != -1 && !Thread.currentThread().isInterrupted()) { count += l; buffer.append(tmp, 0, l); sendProgressMessage(count, (int) (contentLength <= 0 ? 1 : contentLength)); } } finally { AsyncHttpClient.silentCloseInputStream(instream); } responseBody = buffer.toByteArray(); } catch (OutOfMemoryError e) { System.gc(); throw new IOException("File too large to fit into available memory"); } } } return responseBody; }
From source file:com.mozilla.SUTAgentAndroid.SUTAgentAndroid.java
@Override public void onLowMemory() { System.gc(); logMemory("onLowMemory"); }
From source file:DIA_Umpire_Quant.DIA_Umpire_Quant.java
/** * @param args the command line arguments *//*from w w w .j a va 2 s . c o m*/ public static void main(String[] args) throws FileNotFoundException, IOException, Exception { System.out.println( "================================================================================================="); System.out.println("DIA-Umpire quantitation with targeted re-extraction analysis (version: " + UmpireInfo.GetInstance().Version + ")"); if (args.length != 1) { System.out.println( "command format error, it should be like: java -jar -Xmx10G DIA_Umpire_Quant.jar diaumpire_quant.params"); return; } try { ConsoleLogger.SetConsoleLogger(Level.INFO); ConsoleLogger.SetFileLogger(Level.DEBUG, FilenameUtils.getFullPath(args[0]) + "diaumpire_quant.log"); } catch (Exception e) { } try { Logger.getRootLogger().info("Version: " + UmpireInfo.GetInstance().Version); Logger.getRootLogger().info("Parameter file:" + args[0]); BufferedReader reader = new BufferedReader(new FileReader(args[0])); String line = ""; String WorkFolder = ""; int NoCPUs = 2; String UserMod = ""; String Combined_Prot = ""; String InternalLibID = ""; String ExternalLibPath = ""; String ExternalLibDecoyTag = "DECOY"; boolean DefaultProtFiltering = true; boolean DataSetLevelPepFDR = false; float ProbThreshold = 0.99f; float ExtProbThreshold = 0.99f; float Freq = 0f; int TopNPep = 6; int TopNFrag = 6; float MinFragMz = 200f; String FilterWeight = "GW"; float MinWeight = 0.9f; float RTWindow_Int = -1f; float RTWindow_Ext = -1f; TandemParam tandemPara = new TandemParam(DBSearchParam.SearchInstrumentType.TOF5600); HashMap<String, File> AssignFiles = new HashMap<>(); boolean InternalLibSearch = false; boolean ExternalLibSearch = false; boolean ExportSaint = false; boolean SAINT_MS1 = false; boolean SAINT_MS2 = true; HashMap<String, String[]> BaitList = new HashMap<>(); HashMap<String, String> BaitName = new HashMap<>(); HashMap<String, String[]> ControlList = new HashMap<>(); HashMap<String, String> ControlName = new HashMap<>(); //<editor-fold defaultstate="collapsed" desc="Reading parameter file"> while ((line = reader.readLine()) != null) { line = line.trim(); Logger.getRootLogger().info(line); if (!"".equals(line) && !line.startsWith("#")) { //System.out.println(line); if (line.equals("==File list begin")) { do { line = reader.readLine(); line = line.trim(); if (line.equals("==File list end")) { continue; } else if (!"".equals(line)) { File newfile = new File(line); if (newfile.exists()) { AssignFiles.put(newfile.getAbsolutePath(), newfile); } else { Logger.getRootLogger().info("File: " + newfile + " does not exist."); } } } while (!line.equals("==File list end")); } if (line.split("=").length < 2) { continue; } String type = line.split("=")[0].trim(); String value = line.split("=")[1].trim(); switch (type) { case "TargetedExtraction": { InternalLibSearch = Boolean.parseBoolean(value); break; } case "InternalLibSearch": { InternalLibSearch = Boolean.parseBoolean(value); break; } case "ExternalLibSearch": { ExternalLibSearch = Boolean.parseBoolean(value); break; } case "Path": { WorkFolder = value; break; } case "path": { WorkFolder = value; break; } case "Thread": { NoCPUs = Integer.parseInt(value); break; } case "Fasta": { tandemPara.FastaPath = value; break; } case "Combined_Prot": { Combined_Prot = value; break; } case "DefaultProtFiltering": { DefaultProtFiltering = Boolean.parseBoolean(value); break; } case "DecoyPrefix": { if (!"".equals(value)) { tandemPara.DecoyPrefix = value; } break; } case "UserMod": { UserMod = value; break; } case "ProteinFDR": { tandemPara.ProtFDR = Float.parseFloat(value); break; } case "PeptideFDR": { tandemPara.PepFDR = Float.parseFloat(value); break; } case "DataSetLevelPepFDR": { DataSetLevelPepFDR = Boolean.parseBoolean(value); break; } case "InternalLibID": { InternalLibID = value; break; } case "ExternalLibPath": { ExternalLibPath = value; break; } case "ExtProbThreshold": { ExtProbThreshold = Float.parseFloat(value); break; } case "RTWindow_Int": { RTWindow_Int = Float.parseFloat(value); break; } case "RTWindow_Ext": { RTWindow_Ext = Float.parseFloat(value); break; } case "ExternalLibDecoyTag": { ExternalLibDecoyTag = value; if (ExternalLibDecoyTag.endsWith("_")) { ExternalLibDecoyTag = ExternalLibDecoyTag.substring(0, ExternalLibDecoyTag.length() - 1); } break; } case "ProbThreshold": { ProbThreshold = Float.parseFloat(value); break; } case "ReSearchProb": { //ReSearchProb = Float.parseFloat(value); break; } case "FilterWeight": { FilterWeight = value; break; } case "MinWeight": { MinWeight = Float.parseFloat(value); break; } case "TopNFrag": { TopNFrag = Integer.parseInt(value); break; } case "TopNPep": { TopNPep = Integer.parseInt(value); break; } case "Freq": { Freq = Float.parseFloat(value); break; } case "MinFragMz": { MinFragMz = Float.parseFloat(value); break; } //<editor-fold defaultstate="collapsed" desc="SaintOutput"> case "ExportSaintInput": { ExportSaint = Boolean.parseBoolean(value); break; } case "QuantitationType": { switch (value) { case "MS1": { SAINT_MS1 = true; SAINT_MS2 = false; break; } case "MS2": { SAINT_MS1 = false; SAINT_MS2 = true; break; } case "BOTH": { SAINT_MS1 = true; SAINT_MS2 = true; break; } } break; } // case "BaitInputFile": { // SaintBaitFile = value; // break; // } // case "PreyInputFile": { // SaintPreyFile = value; // break; // } // case "InterationInputFile": { // SaintInteractionFile = value; // break; // } default: { if (type.startsWith("BaitName_")) { BaitName.put(type.substring(9), value); } if (type.startsWith("BaitFile_")) { BaitList.put(type.substring(9), value.split("\t")); } if (type.startsWith("ControlName_")) { ControlName.put(type.substring(12), value); } if (type.startsWith("ControlFile_")) { ControlList.put(type.substring(12), value.split("\t")); } break; } //</editor-fold> } } } //</editor-fold> //Initialize PTM manager using compomics library PTMManager.GetInstance(); if (!UserMod.equals("")) { PTMManager.GetInstance().ImportUserMod(UserMod); } //Check if the fasta file can be found if (!new File(tandemPara.FastaPath).exists()) { Logger.getRootLogger().info("Fasta file :" + tandemPara.FastaPath + " cannot be found, the process will be terminated, please check."); System.exit(1); } //Check if the prot.xml file can be found if (!new File(Combined_Prot).exists()) { Logger.getRootLogger().info("ProtXML file: " + Combined_Prot + " cannot be found, the export protein summary table will be empty."); } LCMSID protID = null; //Parse prot.xml and generate protein master list given an FDR if (Combined_Prot != null && !Combined_Prot.equals("")) { protID = LCMSID.ReadLCMSIDSerialization(Combined_Prot); if (!"".equals(Combined_Prot) && protID == null) { protID = new LCMSID(Combined_Prot, tandemPara.DecoyPrefix, tandemPara.FastaPath); ProtXMLParser protxmlparser = new ProtXMLParser(protID, Combined_Prot, 0f); //Use DIA-Umpire default protein FDR calculation if (DefaultProtFiltering) { protID.RemoveLowLocalPWProtein(0.8f); protID.RemoveLowMaxIniProbProtein(0.9f); protID.FilterByProteinDecoyFDRUsingMaxIniProb(tandemPara.DecoyPrefix, tandemPara.ProtFDR); } //Get protein FDR calculation without other filtering else { protID.FilterByProteinDecoyFDRUsingLocalPW(tandemPara.DecoyPrefix, tandemPara.ProtFDR); } protID.LoadSequence(); protID.WriteLCMSIDSerialization(Combined_Prot); } Logger.getRootLogger().info("Protein No.:" + protID.ProteinList.size()); } HashMap<String, HashMap<String, FragmentPeak>> IDSummaryFragments = new HashMap<>(); //Generate DIA file list ArrayList<DIAPack> FileList = new ArrayList<>(); File folder = new File(WorkFolder); if (!folder.exists()) { Logger.getRootLogger().info("The path : " + WorkFolder + " cannot be found."); System.exit(1); } for (final File fileEntry : folder.listFiles()) { if (fileEntry.isFile() && (fileEntry.getAbsolutePath().toLowerCase().endsWith(".mzxml") | fileEntry.getAbsolutePath().toLowerCase().endsWith(".mzml")) && !fileEntry.getAbsolutePath().toLowerCase().endsWith("q1.mzxml") && !fileEntry.getAbsolutePath().toLowerCase().endsWith("q2.mzxml") && !fileEntry.getAbsolutePath().toLowerCase().endsWith("q3.mzxml")) { AssignFiles.put(fileEntry.getAbsolutePath(), fileEntry); } if (fileEntry.isDirectory()) { for (final File fileEntry2 : fileEntry.listFiles()) { if (fileEntry2.isFile() && (fileEntry2.getAbsolutePath().toLowerCase().endsWith(".mzxml") | fileEntry2.getAbsolutePath().toLowerCase().endsWith(".mzml")) && !fileEntry2.getAbsolutePath().toLowerCase().endsWith("q1.mzxml") && !fileEntry2.getAbsolutePath().toLowerCase().endsWith("q2.mzxml") && !fileEntry2.getAbsolutePath().toLowerCase().endsWith("q3.mzxml")) { AssignFiles.put(fileEntry2.getAbsolutePath(), fileEntry2); } } } } Logger.getRootLogger().info("No. of files assigned :" + AssignFiles.size()); for (File fileEntry : AssignFiles.values()) { Logger.getRootLogger().info(fileEntry.getAbsolutePath()); String mzXMLFile = fileEntry.getAbsolutePath(); if (mzXMLFile.toLowerCase().endsWith(".mzxml") | mzXMLFile.toLowerCase().endsWith(".mzml")) { DIAPack DiaFile = new DIAPack(mzXMLFile, NoCPUs); FileList.add(DiaFile); HashMap<String, FragmentPeak> FragMap = new HashMap<>(); IDSummaryFragments.put(FilenameUtils.getBaseName(mzXMLFile), FragMap); Logger.getRootLogger().info( "================================================================================================="); Logger.getRootLogger().info("Processing " + mzXMLFile); if (!DiaFile.LoadDIASetting()) { Logger.getRootLogger().info("Loading DIA setting failed, job is incomplete"); System.exit(1); } if (!DiaFile.LoadParams()) { Logger.getRootLogger().info("Loading parameters failed, job is incomplete"); System.exit(1); } } } LCMSID combinePepID = null; if (DataSetLevelPepFDR) { combinePepID = LCMSID.ReadLCMSIDSerialization(WorkFolder + "combinePepID.SerFS"); if (combinePepID == null) { FDR_DataSetLevel fdr = new FDR_DataSetLevel(); fdr.GeneratePepIonList(FileList, tandemPara, WorkFolder + "combinePepID.SerFS"); combinePepID = fdr.combineID; combinePepID.WriteLCMSIDSerialization(WorkFolder + "combinePepID.SerFS"); } } //process each DIA file for quantification based on untargeted identifications for (DIAPack DiaFile : FileList) { long time = System.currentTimeMillis(); Logger.getRootLogger().info("Loading identification results " + DiaFile.Filename + "...."); //If the LCMSID serialization is found if (!DiaFile.ReadSerializedLCMSID()) { DiaFile.ParsePepXML(tandemPara, combinePepID); DiaFile.BuildStructure(); if (!DiaFile.MS1FeatureMap.ReadPeakCluster()) { Logger.getRootLogger().info("Loading peak and structure failed, job is incomplete"); System.exit(1); } DiaFile.MS1FeatureMap.ClearMonoisotopicPeakOfCluster(); //Generate mapping between index of precursor feature and pseudo MS/MS scan index DiaFile.GenerateClusterScanNomapping(); //Doing quantification DiaFile.AssignQuant(); DiaFile.ClearStructure(); } DiaFile.IDsummary.ReduceMemoryUsage(); time = System.currentTimeMillis() - time; Logger.getRootLogger().info(DiaFile.Filename + " processed time:" + String.format("%d hour, %d min, %d sec", TimeUnit.MILLISECONDS.toHours(time), TimeUnit.MILLISECONDS.toMinutes(time) - TimeUnit.HOURS.toMinutes(TimeUnit.MILLISECONDS.toHours(time)), TimeUnit.MILLISECONDS.toSeconds(time) - TimeUnit.MINUTES.toSeconds(TimeUnit.MILLISECONDS.toMinutes(time)))); } //<editor-fold defaultstate="collapsed" desc="Targete re-extraction using internal library"> Logger.getRootLogger().info( "================================================================================================="); if (InternalLibSearch && FileList.size() > 1) { Logger.getRootLogger().info("Module C: Targeted extraction using internal library"); FragmentLibManager libManager = FragmentLibManager.ReadFragmentLibSerialization(WorkFolder, InternalLibID); if (libManager == null) { Logger.getRootLogger().info("Building internal spectral library"); libManager = new FragmentLibManager(InternalLibID); ArrayList<LCMSID> LCMSIDList = new ArrayList<>(); for (DIAPack dia : FileList) { LCMSIDList.add(dia.IDsummary); } libManager.ImportFragLibTopFrag(LCMSIDList, Freq, TopNFrag); libManager.WriteFragmentLibSerialization(WorkFolder); } libManager.ReduceMemoryUsage(); Logger.getRootLogger() .info("Building retention time prediction model and generate candidate peptide list"); for (int i = 0; i < FileList.size(); i++) { FileList.get(i).IDsummary.ClearMappedPep(); } for (int i = 0; i < FileList.size(); i++) { for (int j = i + 1; j < FileList.size(); j++) { RTAlignedPepIonMapping alignment = new RTAlignedPepIonMapping(WorkFolder, FileList.get(i).GetParameter(), FileList.get(i).IDsummary, FileList.get(j).IDsummary); alignment.GenerateModel(); alignment.GenerateMappedPepIon(); } FileList.get(i).ExportID(); FileList.get(i).IDsummary = null; } Logger.getRootLogger().info("Targeted matching........"); for (DIAPack diafile : FileList) { if (diafile.IDsummary == null) { diafile.ReadSerializedLCMSID(); } if (!diafile.IDsummary.GetMappedPepIonList().isEmpty()) { diafile.UseMappedIon = true; diafile.FilterMappedIonByProb = false; diafile.BuildStructure(); diafile.MS1FeatureMap.ReadPeakCluster(); diafile.MS1FeatureMap.ClearMonoisotopicPeakOfCluster(); diafile.GenerateMassCalibrationRTMap(); diafile.TargetedExtractionQuant(false, libManager, 1.1f, RTWindow_Int); diafile.MS1FeatureMap.ClearAllPeaks(); diafile.IDsummary.ReduceMemoryUsage(); diafile.IDsummary.RemoveLowProbMappedIon(ProbThreshold); diafile.ExportID(); Logger.getRootLogger().info("Peptide ions: " + diafile.IDsummary.GetPepIonList().size() + " Mapped ions: " + diafile.IDsummary.GetMappedPepIonList().size()); diafile.ClearStructure(); } diafile.IDsummary = null; System.gc(); } Logger.getRootLogger().info( "================================================================================================="); } //</editor-fold> //<editor-fold defaultstate="collapsed" desc="Targeted re-extraction using external library"> //External library search if (ExternalLibSearch) { Logger.getRootLogger().info("Module C: Targeted extraction using external library"); //Read exteranl library FragmentLibManager ExlibManager = FragmentLibManager.ReadFragmentLibSerialization(WorkFolder, FilenameUtils.getBaseName(ExternalLibPath)); if (ExlibManager == null) { ExlibManager = new FragmentLibManager(FilenameUtils.getBaseName(ExternalLibPath)); //Import traML file ExlibManager.ImportFragLibByTraML(ExternalLibPath, ExternalLibDecoyTag); //Check if there are decoy spectra ExlibManager.CheckDecoys(); //ExlibManager.ImportFragLibBySPTXT(ExternalLibPath); ExlibManager.WriteFragmentLibSerialization(WorkFolder); } Logger.getRootLogger() .info("No. of peptide ions in external lib:" + ExlibManager.PeptideFragmentLib.size()); for (DIAPack diafile : FileList) { if (diafile.IDsummary == null) { diafile.ReadSerializedLCMSID(); } //Generate RT mapping RTMappingExtLib RTmap = new RTMappingExtLib(diafile.IDsummary, ExlibManager, diafile.GetParameter()); RTmap.GenerateModel(); RTmap.GenerateMappedPepIon(); diafile.BuildStructure(); diafile.MS1FeatureMap.ReadPeakCluster(); diafile.GenerateMassCalibrationRTMap(); //Perform targeted re-extraction diafile.TargetedExtractionQuant(false, ExlibManager, ProbThreshold, RTWindow_Ext); diafile.MS1FeatureMap.ClearAllPeaks(); diafile.IDsummary.ReduceMemoryUsage(); //Remove target IDs below the defined probability threshold diafile.IDsummary.RemoveLowProbMappedIon(ExtProbThreshold); diafile.ExportID(); diafile.ClearStructure(); Logger.getRootLogger().info("Peptide ions: " + diafile.IDsummary.GetPepIonList().size() + " Mapped ions: " + diafile.IDsummary.GetMappedPepIonList().size()); } } //</editor-fold> //<editor-fold defaultstate="collapsed" desc="Peptide and fragment selection"> Logger.getRootLogger().info("Peptide and fragment selection across the whole dataset"); ArrayList<LCMSID> SummaryList = new ArrayList<>(); for (DIAPack diafile : FileList) { if (diafile.IDsummary == null) { diafile.ReadSerializedLCMSID(); diafile.IDsummary.ClearAssignPeakCluster(); //diafile.IDsummary.ClearPSMs(); } if (protID != null) { //Generate protein list according to mapping of peptide ions for each DIA file to the master protein list diafile.IDsummary.GenerateProteinByRefIDByPepSeq(protID, true); diafile.IDsummary.ReMapProPep(); } if ("GW".equals(FilterWeight)) { diafile.IDsummary.SetFilterByGroupWeight(); } else if ("PepW".equals(FilterWeight)) { diafile.IDsummary.SetFilterByWeight(); } SummaryList.add(diafile.IDsummary); } FragmentSelection fragselection = new FragmentSelection(SummaryList); fragselection.freqPercent = Freq; fragselection.MinFragMZ = MinFragMz; fragselection.GeneratePepFragScoreMap(); fragselection.GenerateTopFragMap(TopNFrag); fragselection.GenerateProtPepScoreMap(MinWeight); fragselection.GenerateTopPepMap(TopNPep); //</editor-fold> //<editor-fold defaultstate="collapsed" desc="Writing general reports"> ExportTable export = new ExportTable(WorkFolder, SummaryList, IDSummaryFragments, protID, fragselection); export.Export(TopNPep, TopNFrag, Freq); //</editor-fold> //<editor-fold defaultstate="collapsed" desc="//<editor-fold defaultstate="collapsed" desc="Generate SAINT input files"> if (ExportSaint && protID != null) { HashMap<String, DIAPack> Filemap = new HashMap<>(); for (DIAPack DIAfile : FileList) { Filemap.put(DIAfile.GetBaseName(), DIAfile); } FileWriter baitfile = new FileWriter(WorkFolder + "SAINT_Bait_" + DateTimeTag.GetTag() + ".txt"); FileWriter preyfile = new FileWriter(WorkFolder + "SAINT_Prey_" + DateTimeTag.GetTag() + ".txt"); FileWriter interactionfileMS1 = null; FileWriter interactionfileMS2 = null; if (SAINT_MS1) { interactionfileMS1 = new FileWriter( WorkFolder + "SAINT_Interaction_MS1_" + DateTimeTag.GetTag() + ".txt"); } if (SAINT_MS2) { interactionfileMS2 = new FileWriter( WorkFolder + "SAINT_Interaction_MS2_" + DateTimeTag.GetTag() + ".txt"); } HashMap<String, String> PreyID = new HashMap<>(); for (String samplekey : ControlName.keySet()) { String name = ControlName.get(samplekey); for (String file : ControlList.get(samplekey)) { baitfile.write(FilenameUtils.getBaseName(file) + "\t" + name + "\t" + "C\n"); LCMSID IDsummary = Filemap.get(FilenameUtils.getBaseName(file)).IDsummary; if (SAINT_MS1) { SaintOutput(protID, IDsummary, fragselection, interactionfileMS1, file, name, PreyID, 1); } if (SAINT_MS2) { SaintOutput(protID, IDsummary, fragselection, interactionfileMS2, file, name, PreyID, 2); } } } for (String samplekey : BaitName.keySet()) { String name = BaitName.get(samplekey); for (String file : BaitList.get(samplekey)) { baitfile.write(FilenameUtils.getBaseName(file) + "\t" + name + "\t" + "T\n"); LCMSID IDsummary = Filemap.get(FilenameUtils.getBaseName(file)).IDsummary; if (SAINT_MS1) { SaintOutput(protID, IDsummary, fragselection, interactionfileMS1, file, name, PreyID, 1); } if (SAINT_MS2) { SaintOutput(protID, IDsummary, fragselection, interactionfileMS2, file, name, PreyID, 2); } } } baitfile.close(); if (SAINT_MS1) { interactionfileMS1.close(); } if (SAINT_MS2) { interactionfileMS2.close(); } for (String AccNo : PreyID.keySet()) { preyfile.write(AccNo + "\t" + PreyID.get(AccNo) + "\n"); } preyfile.close(); } //</editor-fold> Logger.getRootLogger().info("Job done"); Logger.getRootLogger().info( "================================================================================================="); } catch (Exception e) { Logger.getRootLogger().error(ExceptionUtils.getStackTrace(e)); throw e; } }