List of usage examples for java.util List subList
List<E> subList(int fromIndex, int toIndex);
From source file:diffhunter.DiffHunter.java
/** * @param args the command line arguments * @throws org.apache.commons.cli.ParseException * @throws java.io.IOException/*from w ww.j a v a 2 s . c o m*/ */ public static void main(String[] args) throws ParseException, IOException { //String test_ = Paths.get("J:\\VishalData\\additional\\", "Sasan" + "_BDB").toAbsolutePath().toString(); // TODO code application logic here /*args = new String[] { "-i", "-b", "J:\\VishalData\\additional\\Ptbp2_E18_5_cortex_CLIP_mm9_plus_strand_sorted.bed", "-r", "J:\\VishalData\\additional\\mouse_mm9.txt", "-o", "J:\\VishalData" };*/ /*args = new String[] { "-c", "-r", "J:\\VishalData\\additional\\mouse_mm9.txt", "-1", "J:\\VishalData\\Ptbp2_Adult_testis_CLIP_mm9_plus_strand_sorted_BDB", "-2", "J:\\VishalData\\Ptbp2_E18_5_cortex_CLIP_mm9_plus_strand_sorted_BDB", "-w", "200", "-s", "50", "-o", "J:\\VishalData" };*/ Options options = new Options(); // add t option options.addOption("i", "index", false, "Indexing BED files."); options.addOption("b", "bed", true, "bed file to be indexed"); options.addOption("o", "output", true, "Folder that the index/comparison file will be created."); options.addOption("r", "reference", true, "Reference annotation file to be used for indexing"); options.addOption("c", "compare", false, "Finding differences between two conditions"); options.addOption("1", "first", true, "First sample index location"); options.addOption("2", "second", true, "Second sample index location"); options.addOption("w", "window", true, "Length of window for identifying differences"); options.addOption("s", "sliding", true, "Length of sliding"); CommandLineParser parser = new BasicParser(); CommandLine cmd = parser.parse(options, args); boolean indexing = false; boolean comparing = false; //Indexing! if (cmd.hasOption("i")) { //if(cmd.hasOption("1")) //System.err.println("sasan"); //System.out.println("sasa"); indexing = true; } else if (cmd.hasOption("c")) { //System.err.println(""); comparing = true; } else { //System.err.println("Option is not deteced."); HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("diffhunter", options); return; } //Indexing is selected // if (indexing == true) { //Since indexing is true. //User have to provide file for indexing. if (!(cmd.hasOption("o") || cmd.hasOption("r") || cmd.hasOption("b"))) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("diffhunter", options); return; } String bedfile_ = cmd.getOptionValue("b"); String reference_file = cmd.getOptionValue("r"); String folder_loc = cmd.getOptionValue("o"); String sample_name = FilenameUtils.getBaseName(bedfile_); try (Database B2 = BerkeleyDB_Box.Get_BerkeleyDB( Paths.get(folder_loc, sample_name + "_BDB").toAbsolutePath().toString(), true, sample_name)) { Indexer indexing_ = new Indexer(reference_file); indexing_.Make_Index(B2, bedfile_, Paths.get(folder_loc, sample_name + "_BDB").toAbsolutePath().toString()); B2.close(); } } else if (comparing == true) { if (!(cmd.hasOption("o") || cmd.hasOption("w") || cmd.hasOption("s") || cmd.hasOption("1") || cmd.hasOption("2"))) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("diffhunter", options); return; } String folder_loc = cmd.getOptionValue("o"); int window_ = Integer.parseInt(cmd.getOptionValue("w")); //int window_=600; int slide_ = Integer.parseInt(cmd.getOptionValue("s")); String first = cmd.getOptionValue("1").replace("_BDB", ""); String second = cmd.getOptionValue("2").replace("_BDB", ""); String reference_file = cmd.getOptionValue("r"); //String folder_loc=cmd.getOptionValue("o"); String sample_name_first = FilenameUtils.getBaseName(first); String sample_name_second = FilenameUtils.getBaseName(second); Database B1 = BerkeleyDB_Box.Get_BerkeleyDB(first + "_BDB", false, sample_name_first); Database B2 = BerkeleyDB_Box.Get_BerkeleyDB(second + "_BDB", false, sample_name_second); List<String> first_condition_genes = Files .lines(Paths.get(first + "_BDB", sample_name_first + ".txt").toAbsolutePath()) .collect(Collectors.toList()); List<String> second_condition_genes = Files .lines(Paths.get(second + "_BDB", sample_name_second + ".txt").toAbsolutePath()) .collect(Collectors.toList()); System.out.println("First and second condition are loaded!!! "); List<String> intersection_ = new ArrayList<>(first_condition_genes); intersection_.retainAll(second_condition_genes); BufferedWriter output = new BufferedWriter( new FileWriter(Paths.get(folder_loc, "differences_" + window_ + "_s" + slide_ + "_c" + ".txt") .toAbsolutePath().toString(), false)); List<Result_Window> final_results = Collections.synchronizedList(new ArrayList<>()); Worker_New worker_class = new Worker_New(); worker_class.Read_Reference(reference_file); while (!intersection_.isEmpty()) { List<String> selected_genes = new ArrayList<>(); //if (intersection_.size()<=10000){selected_genes.addAll(intersection_.subList(0, intersection_.size()));} //else selected_genes.addAll(intersection_.subList(0, 10000)); if (intersection_.size() <= intersection_.size()) { selected_genes.addAll(intersection_.subList(0, intersection_.size())); } else { selected_genes.addAll(intersection_.subList(0, intersection_.size())); } intersection_.removeAll(selected_genes); //System.out.println("Intersection count is:"+intersection_.size()); //final List<Result_Window> resultssss_=new ArrayList<>(); IntStream.range(0, selected_genes.size()).parallel().forEach(i -> { System.out.println(selected_genes.get(i) + "\tprocessing......"); String gene_of_interest = selected_genes.get(i);//"ENSG00000142657|PGD";//intersection_.get(6);////"ENSG00000163395|IGFN1";//"ENSG00000270066|SCARNA2"; int start = worker_class.dic_genes.get(gene_of_interest).start_loc; int end = worker_class.dic_genes.get(gene_of_interest).end_loc; Map<Integer, Integer> first_ = Collections.EMPTY_MAP; try { first_ = BerkeleyDB_Box.Get_Coord_Read(B1, gene_of_interest); } catch (IOException | ClassNotFoundException ex) { Logger.getLogger(DiffHunter.class.getName()).log(Level.SEVERE, null, ex); } Map<Integer, Integer> second_ = Collections.EMPTY_MAP; try { second_ = BerkeleyDB_Box.Get_Coord_Read(B2, gene_of_interest); } catch (IOException | ClassNotFoundException ex) { Logger.getLogger(DiffHunter.class.getName()).log(Level.SEVERE, null, ex); } List<Window> top_windows_first = worker_class.Get_Top_Windows(window_, first_, slide_); List<Window> top_windows_second = worker_class.Get_Top_Windows(window_, second_, slide_); //System.out.println("passed for window peak call for gene \t"+selected_genes.get(i)); // System.out.println("top_window_first_Count\t"+top_windows_first.size()); // System.out.println("top_window_second_Count\t"+top_windows_second.size()); if (top_windows_first.isEmpty() && top_windows_second.isEmpty()) { return; } List<Result_Window> res_temp = new Worker_New().Get_Significant_Windows(gene_of_interest, start, end, top_windows_first, top_windows_second, second_, first_, sample_name_first, sample_name_second, 0.01); if (!res_temp.isEmpty()) { final_results.addAll(res_temp);//final_results.addAll(worker_class.Get_Significant_Windows(gene_of_interest, start, end, top_windows_first, top_windows_second, second_, first_, first_condition, second_condition, 0.01)); } //System.out.println(selected_genes.get(i)+"\tprocessed."); }); /*selected_genes.parallelStream().forEach(i -> { });*/ List<Double> pvals = new ArrayList<>(); for (int i = 0; i < final_results.size(); i++) { pvals.add(final_results.get(i).p_value); } List<Double> qvals = MultipleTestCorrection.benjaminiHochberg(pvals); System.out.println("Writing to file..."); output.append("Gene_Symbol\tContributing_Sample\tStart\tEnd\tOddsRatio\tp_Value\tFDR"); output.newLine(); for (int i = 0; i < final_results.size(); i++) { Result_Window item = final_results.get(i); output.append(item.associated_gene_symbol + "\t" + item.contributing_windows + "\t" + item.start_loc + "\t" + item.end_loc + "\t" + item.oddsratio_ + "\t" + item.p_value + "\t" + qvals.get(i)); //+ "\t" + item.average_other_readcount_cotributing + "\t" + item.average_other_readcount_cotributing + "\t" + item.average_window_readcount_non + "\t" + item.average_other_readcount_non); output.newLine(); } /* for (Result_Window item : final_results) { output.append(item.associated_gene_symbol + "\t" + item.contributing_windows + "\t" + item.start_loc + "\t" + item.end_loc + "\t" + item.oddsratio_ + "\t" + item.p_value); //+ "\t" + item.average_other_readcount_cotributing + "\t" + item.average_other_readcount_cotributing + "\t" + item.average_window_readcount_non + "\t" + item.average_other_readcount_non); output.newLine(); } */ final_results.clear(); } output.close(); } System.out.println("Done."); }
From source file:com.example.bigtable.simplecli.HBaseCLI.java
/** * The main method for the CLI. This method takes the command line * arguments and runs the appropriate commands. *///from w w w. j a v a2 s . com public static void main(String[] args) { // We use Apache commons-cli to check for a help option. Options options = new Options(); Option help = new Option("help", "print this message"); options.addOption(help); // create the parser CommandLineParser parser = new BasicParser(); CommandLine line = null; try { // parse the command line arguments line = parser.parse(options, args); } catch (ParseException exp) { // oops, something went wrong System.err.println(exp.getMessage()); usage(); System.exit(1); } // Create a list of commands that are supported. Each // command defines a run method and some methods for // printing help. // See the definition of each command below. HashMap<String, Command> commands = new HashMap<String, Command>(); commands.put("create", new CreateCommand("create")); commands.put("scan", new ScanCommand("scan")); commands.put("get", new GetCommand("get")); commands.put("put", new PutCommand("put")); commands.put("list", new ListCommand("list")); Command command = null; List<String> argsList = Arrays.asList(args); if (argsList.size() > 0) { command = commands.get(argsList.get(0)); } // Check for the help option and if it's there // display the appropriate help. if (line.hasOption("help")) { // If there is a command listed (e.g. create -help) // then show the help for that command if (command == null) { help(commands.values()); } else { help(command); } System.exit(0); } else if (command == null) { // No valid command was given so print the usage. usage(); System.exit(0); } try { Connection connection = ConnectionFactory.createConnection(); try { try { // Run the command with the arguments after the command name. command.run(connection, argsList.subList(1, argsList.size())); } catch (InvalidArgsException e) { System.out.println("ERROR: Invalid arguments"); usage(command); System.exit(0); } } finally { // Make sure the connection is closed even if // an exception occurs. connection.close(); } } catch (IOException e) { e.printStackTrace(); } }
From source file:edu.isi.karma.research.modeling.ModelLearner_LOD.java
public static void main(String[] args) throws Exception { ServletContextParameterMap contextParameters = ContextParametersRegistry.getInstance().getDefault(); contextParameters.setParameterValue(ContextParameter.USER_CONFIG_DIRECTORY, "/Users/mohsen/karma/config"); OntologyManager ontologyManager = new OntologyManager(contextParameters.getId()); File ff = new File(Params.ONTOLOGY_DIR); File[] files = ff.listFiles(); if (files == null) { logger.error("no ontology to import at " + ff.getAbsolutePath()); return;//www. j a v a 2 s. c om } for (File f : files) { if (f.getName().endsWith(".owl") || f.getName().endsWith(".rdf") || f.getName().endsWith(".n3") || f.getName().endsWith(".ttl") || f.getName().endsWith(".xml")) { logger.info("Loading ontology file: " + f.getAbsolutePath()); ontologyManager.doImport(f, "UTF-8"); } } ontologyManager.updateCache(); String outputPath = Params.OUTPUT_DIR; String graphPath = Params.GRAPHS_DIR; FileUtils.cleanDirectory(new File(graphPath)); List<SemanticModel> semanticModels = ModelReader.importSemanticModelsFromJsonFiles(Params.MODEL_DIR, Params.MODEL_MAIN_FILE_EXT); ModelLearner_LOD modelLearner = null; boolean onlyGenerateSemanticTypeStatistics = false; boolean onlyUseOntology = false; boolean useCorrectType = false; int numberOfCandidates = 4; boolean onlyEvaluateInternalLinks = false; int maxPatternSize = 3; if (onlyGenerateSemanticTypeStatistics) { getStatistics(semanticModels); return; } String filePath = Params.RESULTS_DIR + "temp/"; String filename = ""; filename += "lod-results"; filename += useCorrectType ? "-correct" : "-k=" + numberOfCandidates; filename += onlyUseOntology ? "-ontology" : "-p" + maxPatternSize; filename += onlyEvaluateInternalLinks ? "-internal" : "-all"; filename += ".csv"; PrintWriter resultFile = new PrintWriter(new File(filePath + filename)); resultFile.println("source \t p \t r \t t \n"); for (int i = 0; i < semanticModels.size(); i++) { // for (int i = 0; i <= 10; i++) { // int i = 1; { int newSourceIndex = i; SemanticModel newSource = semanticModels.get(newSourceIndex); logger.info("======================================================"); logger.info(newSource.getName() + "(#attributes:" + newSource.getColumnNodes().size() + ")"); System.out.println(newSource.getName() + "(#attributes:" + newSource.getColumnNodes().size() + ")"); logger.info("======================================================"); SemanticModel correctModel = newSource; List<ColumnNode> columnNodes = correctModel.getColumnNodes(); List<Node> steinerNodes = new LinkedList<Node>(columnNodes); String graphName = graphPath + "lod" + Params.GRAPH_FILE_EXT; if (onlyUseOntology) { modelLearner = new ModelLearner_LOD(new GraphBuilder(ontologyManager, false), steinerNodes); } else if (new File(graphName).exists()) { // read graph from file try { logger.info("loading the graph ..."); DirectedWeightedMultigraph<Node, DefaultLink> graph = GraphUtil.importJson(graphName); modelLearner = new ModelLearner_LOD(new GraphBuilderTopK(ontologyManager, graph), steinerNodes); } catch (Exception e) { e.printStackTrace(); resultFile.close(); return; } } else { logger.info("building the graph ..."); // create and save the graph to file // GraphBuilder_Popularity b = new GraphBuilder_Popularity(ontologyManager, // Params.LOD_OBJECT_PROPERIES_FILE, // Params.LOD_DATA_PROPERIES_FILE); GraphBuilder_LOD_Pattern b = new GraphBuilder_LOD_Pattern(ontologyManager, Params.PATTERNS_DIR, maxPatternSize); modelLearner = new ModelLearner_LOD(b.getGraphBuilder(), steinerNodes); } long start = System.currentTimeMillis(); List<SortableSemanticModel> hypothesisList = modelLearner.hypothesize(useCorrectType, numberOfCandidates); long elapsedTimeMillis = System.currentTimeMillis() - start; float elapsedTimeSec = elapsedTimeMillis / 1000F; List<SortableSemanticModel> topHypotheses = null; if (hypothesisList != null) { // for (SortableSemanticModel sss : hypothesisList) { // ModelEvaluation mmm = sss.evaluate(correctModel); // System.out.println(mmm.getPrecision() + ", " + mmm.getRecall()); // } topHypotheses = hypothesisList.size() > 10 ? hypothesisList.subList(0, 10) : hypothesisList; } Map<String, SemanticModel> models = new TreeMap<String, SemanticModel>(); ModelEvaluation me; models.put("1-correct model", correctModel); if (topHypotheses != null) for (int k = 0; k < topHypotheses.size(); k++) { SortableSemanticModel m = topHypotheses.get(k); me = m.evaluate(correctModel, onlyEvaluateInternalLinks, false); String label = "candidate " + k + "\n" + // (m.getSteinerNodes() == null ? "" : m.getSteinerNodes().getScoreDetailsString()) + "link coherence:" + (m.getLinkCoherence() == null ? "" : m.getLinkCoherence().getCoherenceValue()) + "\n"; label += (m.getSteinerNodes() == null || m.getSteinerNodes().getCoherence() == null) ? "" : "node coherence:" + m.getSteinerNodes().getCoherence().getCoherenceValue() + "\n"; label += "confidence:" + m.getConfidenceScore() + "\n"; label += m.getSteinerNodes() == null ? "" : "mapping score:" + m.getSteinerNodes().getScore() + "\n"; label += "cost:" + roundDecimals(m.getCost(), 6) + "\n" + // "-distance:" + me.getDistance() + "-precision:" + me.getPrecision() + "-recall:" + me.getRecall(); models.put(label, m); if (k == 0) { // first rank model System.out.println("precision: " + me.getPrecision() + ", recall: " + me.getRecall() + ", time: " + elapsedTimeSec); logger.info("precision: " + me.getPrecision() + ", recall: " + me.getRecall() + ", time: " + elapsedTimeSec); String s = newSource.getName() + "\t" + me.getPrecision() + "\t" + me.getRecall() + "\t" + elapsedTimeSec; resultFile.println(s); } } String outName = outputPath + newSource.getName() + Params.GRAPHVIS_OUT_DETAILS_FILE_EXT; GraphVizUtil.exportSemanticModelsToGraphviz(models, newSource.getName(), outName, GraphVizLabelType.LocalId, GraphVizLabelType.LocalUri, true, true); } resultFile.close(); }
From source file:edu.stanford.epad.common.pixelmed.TIFFMasksToDSOConverter.java
/** * @param args//from ww w . ja v a2s . c o m */ public static void main(String[] args) { if (args.length < 3) { System.out.println( "\n\nInvalid arguments.\n\nUsage: java -classpath epad-ws-1.1-jar-with-dependencies.jar edu.stanford.epad.common.pixelmed.TIFFMasksToDSOConverter tiffFolder dicomFolder outputDSO.dcm"); return; } String maskFilesDirectory = args[0]; String dicomFilesDirectory = args[1]; String outputFileName = args[2]; //String maskFilesDirectory = "/Stanford/rlabs/data/tiffmasks"; //String dicomFilesDirectory = "/Stanford/rlabs/data/dicoms"; //String outputFileName = "/Stanford/rlabs/data/output/dso.dcm"; List<String> dicomFilePaths = listFilesInAlphabeticOrder(dicomFilesDirectory); for (int i = 0; i < dicomFilePaths.size(); i++) { if (!dicomFilePaths.get(i).toLowerCase().endsWith(".dcm")) { System.out.println("Removing DICOM file " + dicomFilePaths.get(i)); dicomFilePaths.remove(i); i--; } } if (dicomFilePaths.size() == 0) { System.out.println("No DICOM files found"); return; } List<String> maskFilePaths = listFilesInAlphabeticOrder(maskFilesDirectory); for (int i = 0; i < maskFilePaths.size(); i++) { if (!maskFilePaths.get(i).toLowerCase().endsWith(".tif") && !maskFilePaths.get(i).toLowerCase().endsWith(".tiff")) { System.out.println("Removing tif file " + maskFilePaths.get(i)); maskFilePaths.remove(i); i--; } } // Flip them because the code expects that List<String> reverseMaskFilePaths = new ArrayList<String>(); for (int i = maskFilePaths.size(); i > 0; i--) { reverseMaskFilePaths.add(maskFilePaths.get(i - 1)); } if (maskFilePaths.size() == 0) { System.out.println("No Tif Mask files found"); return; } if (dicomFilePaths.size() > maskFilePaths.size()) dicomFilePaths = dicomFilePaths.subList(0, reverseMaskFilePaths.size()); else if (reverseMaskFilePaths.size() > dicomFilePaths.size()) reverseMaskFilePaths = reverseMaskFilePaths.subList(0, dicomFilePaths.size()); try { TIFFMasksToDSOConverter converter = new TIFFMasksToDSOConverter(); String[] uids = null; if (args.length > 3) uids = converter.generateDSO(reverseMaskFilePaths, dicomFilePaths, outputFileName, args[3]); else uids = converter.generateDSO(reverseMaskFilePaths, dicomFilePaths, outputFileName); System.out .println("DICOM Segmentation Object created. SeriesUID:" + uids[0] + " InstanceUID:" + uids[1]); } catch (Exception e) { System.err.println(e); e.printStackTrace(System.err); System.exit(0); } }
From source file:Main.java
static int countDistinctElements(List source, int position, int count) { List list = source.subList(position, position + count); Set set = new HashSet(); set.addAll(list);//from ww w.ja v a2s .c o m return set.size(); }
From source file:Main.java
/** * fromIndex, inclusive, //from w w w .j a va 2 s.c o m * and toIndex, exclusive */ public static <E> void removeLastElements(List<E> list, int fromIdx) { list.subList(fromIdx, list.size()).clear(); }
From source file:Main.java
public static <T> List<T> slice(List<T> l, int fromIndex) { return l.subList(fromIndex, l.size()); }
From source file:Main.java
public static <T> void removeHeadItems(List<T> list, int deleteCount) { list.subList(0, deleteCount).clear(); }
From source file:Main.java
public static <T> List limit(List<? extends T> source, int size) { return source.subList(0, size); }
From source file:Main.java
public static <T> List<T> head(List<T> col) { return col.isEmpty() ? col : col.subList(0, col.size() - 1); }