List of usage examples for java.util.stream Collectors toList
public static <T> Collector<T, ?, List<T>> toList()
From source file:com.khartec.waltz.jobs.sample.InvolvementGenerator.java
public static void main(String[] args) { AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext(DIConfiguration.class); DSLContext dsl = ctx.getBean(DSLContext.class); List<String> developers = getEmployeeIdsByTitle(dsl, "%developer%"); List<String> managers = getEmployeeIdsByTitle(dsl, "%manager%"); List<String> analysts = getEmployeeIdsByTitle(dsl, "%analyst%"); List<String> administrators = getEmployeeIdsByTitle(dsl, "%administrator%"); List<String> qa = getEmployeeIdsByTitle(dsl, "%qa%"); List<String> directors = getEmployeeIdsByTitle(dsl, "%director%"); ;// w w w . ja v a2s . co m List<Long> orgUnitIds = dsl.select(ORGANISATIONAL_UNIT.ID).from(ORGANISATIONAL_UNIT) .fetch(ORGANISATIONAL_UNIT.ID); List<Long> inHouseApps = getAppIdsByKind(dsl, ApplicationKind.IN_HOUSE); List<Long> hostedApps = getAppIdsByKind(dsl, ApplicationKind.INTERNALLY_HOSTED); List<Long> externalApps = getAppIdsByKind(dsl, ApplicationKind.EXTERNALLY_HOSTED); List<Long> eucApps = getAppIdsByKind(dsl, ApplicationKind.EUC); List<InvolvementRecord> devInvolvements = inHouseApps.stream().map(id -> toAppRef(id)) .flatMap(appRef -> mkInvolvments(appRef, developers, InvolvementKind.DEVELOPER, 7)) .collect(Collectors.toList()); List<InvolvementRecord> qaInvolvements = concat(inHouseApps, hostedApps).stream().map(id -> toAppRef(id)) .flatMap(appRef -> mkInvolvments(appRef, qa, InvolvementKind.QA, 3)).collect(Collectors.toList()); List<InvolvementRecord> projectManagerInvolvements = concat(inHouseApps, externalApps, hostedApps, eucApps) .stream().map(id -> toAppRef(id)) .flatMap(appRef -> mkInvolvments(appRef, managers, InvolvementKind.PROJECT_MANAGER, 1)) .collect(Collectors.toList()); List<InvolvementRecord> supportManagerInvolvments = concat(inHouseApps, externalApps, hostedApps).stream() .map(id -> toAppRef(id)) .flatMap(appRef -> mkInvolvments(appRef, managers, InvolvementKind.SUPPORT_MANAGER, 1)) .collect(Collectors.toList()); List<InvolvementRecord> analystInvolvments = concat(inHouseApps, externalApps, hostedApps).stream() .map(id -> toAppRef(id)) .flatMap(appRef -> mkInvolvments(appRef, analysts, InvolvementKind.BUSINESS_ANALYST, 3)) .collect(Collectors.toList()); List<InvolvementRecord> ouArchitects = orgUnitIds.stream() .map(id -> new InvolvementRecord(EntityKind.ORG_UNIT.name(), id, InvolvementKind.IT_ARCHITECT.name(), randomPick(directors), "RANDOM_GENERATOR")) .collect(Collectors.toList()); List<InvolvementRecord> ouSponsors = orgUnitIds.stream() .map(id -> new InvolvementRecord(EntityKind.ORG_UNIT.name(), id, InvolvementKind.BUSINESS_SPONSOR.name(), randomPick(directors), "RANDOM_GENERATOR")) .collect(Collectors.toList()); dsl.delete(INVOLVEMENT).execute(); dsl.batchInsert(devInvolvements).execute(); dsl.batchInsert(qaInvolvements).execute(); dsl.batchInsert(supportManagerInvolvments).execute(); dsl.batchInsert(projectManagerInvolvements).execute(); dsl.batchInsert(analystInvolvments).execute(); dsl.batchInsert(ouArchitects).execute(); dsl.batchInsert(ouSponsors).execute(); System.out.println("Done"); }
From source file:de.peran.DependencyReadingStarter.java
public static void main(final String[] args) throws ParseException, FileNotFoundException { final Options options = OptionConstants.createOptions(OptionConstants.FOLDER, OptionConstants.STARTVERSION, OptionConstants.ENDVERSION, OptionConstants.OUT); final CommandLineParser parser = new DefaultParser(); final CommandLine line = parser.parse(options, args); final File projectFolder = new File(line.getOptionValue(OptionConstants.FOLDER.getName())); final File dependencyFile; if (line.hasOption(OptionConstants.OUT.getName())) { dependencyFile = new File(line.getOptionValue(OptionConstants.OUT.getName())); } else {/*from w w w .j av a2 s . co m*/ dependencyFile = new File("dependencies.xml"); } File outputFile = projectFolder.getParentFile(); if (outputFile.isDirectory()) { outputFile = new File(projectFolder.getParentFile(), "ausgabe.txt"); } LOG.debug("Lese {}", projectFolder.getAbsolutePath()); final VersionControlSystem vcs = VersionControlSystem.getVersionControlSystem(projectFolder); System.setOut(new PrintStream(outputFile)); // System.setErr(new PrintStream(outputFile)); final DependencyReader reader; if (vcs.equals(VersionControlSystem.SVN)) { final String url = SVNUtils.getInstance().getWCURL(projectFolder); final List<SVNLogEntry> entries = getSVNCommits(line, url); LOG.debug("SVN commits: " + entries.stream().map(entry -> entry.getRevision()).collect(Collectors.toList())); reader = new DependencyReader(projectFolder, url, dependencyFile, entries); } else if (vcs.equals(VersionControlSystem.GIT)) { final List<GitCommit> commits = getGitCommits(line, projectFolder); reader = new DependencyReader(projectFolder, dependencyFile, commits); LOG.debug("Reader initalized"); } else { throw new RuntimeException("Unknown version control system"); } reader.readDependencies(); }
From source file:com.github.fhuss.kafka.streams.cep.demo.CEPStockKStreamsDemo.java
public static void main(String[] args) { Properties props = new Properties(); props.put(StreamsConfig.APPLICATION_ID_CONFIG, "streams-cep"); props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); props.put(StreamsConfig.ZOOKEEPER_CONNECT_CONFIG, "localhost:2181"); props.put(StreamsConfig.KEY_SERDE_CLASS_CONFIG, StockEventSerDe.class); props.put(StreamsConfig.VALUE_SERDE_CLASS_CONFIG, StockEventSerDe.class); // setting offset reset to earliest so that we can re-run the demo code with the same pre-loaded data props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); // build query final Pattern<Object, StockEvent> pattern = new QueryBuilder<Object, StockEvent>().select() .where((k, v, ts, store) -> v.volume > 1000).<Long>fold("avg", (k, v, curr) -> v.price).then() .select().zeroOrMore().skipTillNextMatch() .where((k, v, ts, state) -> v.price > (long) state.get("avg")) .<Long>fold("avg", (k, v, curr) -> (curr + v.price) / 2) .<Long>fold("volume", (k, v, curr) -> v.volume).then().select().skipTillNextMatch() .where((k, v, ts, state) -> v.volume < 0.8 * state.getOrElse("volume", 0L)) .within(1, TimeUnit.HOURS).build(); KStreamBuilder builder = new KStreamBuilder(); CEPStream<Object, StockEvent> stream = new CEPStream<>(builder.stream("StockEvents")); KStream<Object, Sequence<Object, StockEvent>> stocks = stream.query("Stocks", pattern); stocks.mapValues(seq -> {/*from w w w .j a v a 2 s . c o m*/ JSONObject json = new JSONObject(); seq.asMap().forEach((k, v) -> { JSONArray events = new JSONArray(); json.put(k, events); List<String> collect = v.stream().map(e -> e.value.name).collect(Collectors.toList()); Collections.reverse(collect); collect.forEach(e -> events.add(e)); }); return json.toJSONString(); }).through(null, Serdes.String(), "Matches").print(); //Use the topologyBuilder and streamingConfig to start the kafka streams process KafkaStreams streaming = new KafkaStreams(builder, props); //streaming.cleanUp(); streaming.start(); }
From source file:csv.sorting.PrepareWeatherData.java
public static void main(String[] args) throws Exception { // Path to read the CSV data from: final Path csvStationDataFilePath = FileSystems.getDefault() .getPath("C:\\Users\\philipp\\Downloads\\csv\\201503station.txt"); final Path csvLocalWeatherDataUnsortedFilePath = FileSystems.getDefault() .getPath("C:\\Users\\philipp\\Downloads\\csv\\201503hourly.txt"); final Path csvLocalWeatherDataSortedFilePath = FileSystems.getDefault() .getPath("C:\\Users\\philipp\\Downloads\\csv\\201503hourly_sorted.txt"); // A map between the WBAN and Station for faster Lookups: final Map<String, Station> stationMap = getStationMap(csvStationDataFilePath); // Holds the List of Sorted DateTimes (including ZoneOffset): List<Integer> indices = new ArrayList<>(); // Comparator for sorting the File: Comparator<OffsetDateTime> byMeasurementTime = (e1, e2) -> e1.compareTo(e2); // Get the sorted indices from the stream of LocalWeatherData Elements: try (Stream<CsvMappingResult<csv.model.LocalWeatherData>> stream = getLocalWeatherData( csvLocalWeatherDataUnsortedFilePath)) { // Holds the current line index, when processing the input Stream: AtomicInteger currentIndex = new AtomicInteger(1); // We want to get a list of indices, which sorts the CSV file by measurement time: indices = stream//from ww w . j av a 2s . c o m // Skip the CSV Header: .skip(1) // Start by enumerating ALL mapping results: .map(x -> new ImmutablePair<>(currentIndex.getAndAdd(1), x)) // Then only take those lines, that are actually valid: .filter(x -> x.getRight().isValid()) // Now take the parsed entity from the CsvMappingResult: .map(x -> new ImmutablePair<>(x.getLeft(), x.getRight().getResult())) // Take only those measurements, that are also available in the list of stations: .filter(x -> stationMap.containsKey(x.getRight().getWban())) // Get the OffsetDateTime from the LocalWeatherData, which includes the ZoneOffset of the Station: .map(x -> { // Get the matching station: csv.model.Station station = stationMap.get(x.getRight().getWban()); // Calculate the OffsetDateTime from the given measurement: OffsetDateTime measurementTime = OffsetDateTime.of(x.getRight().getDate(), x.getRight().getTime(), ZoneOffset.ofHours(0)); // Build the Immutable pair with the Index again: return new ImmutablePair<>(x.getLeft(), measurementTime); }) // Now sort the Measurements by their Timestamp: .sorted((x, y) -> byMeasurementTime.compare(x.getRight(), y.getRight())) // Take only the Index: .map(x -> x.getLeft()) // And turn it into a List: .collect(Collectors.toList()); } // Now sorts the File by Line Number: writeSortedFileByIndices(csvLocalWeatherDataUnsortedFilePath, indices, csvLocalWeatherDataSortedFilePath); }
From source file:com.datastax.sparql.ConsoleCompiler.java
public static void main(final String[] args) throws IOException { //args = "/examples/modern1.sparql"; final Options options = new Options(); options.addOption("f", "file", true, "a file that contains a SPARQL query"); options.addOption("g", "graph", true, "the graph that's used to execute the query [classic|modern|crew|kryo file]"); // TODO: add an OLAP option (perhaps: "--olap spark"?) final CommandLineParser parser = new DefaultParser(); final CommandLine commandLine; try {//from w w w .ja va 2s. c o m commandLine = parser.parse(options, args); } catch (ParseException e) { System.out.println(e.getMessage()); printHelp(1); return; } final InputStream inputStream = commandLine.hasOption("file") ? new FileInputStream(commandLine.getOptionValue("file")) : System.in; final BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream)); final StringBuilder queryBuilder = new StringBuilder(); if (!reader.ready()) { printHelp(1); } String line; while (null != (line = reader.readLine())) { queryBuilder.append(System.lineSeparator()).append(line); } final String queryString = queryBuilder.toString(); final Graph graph; if (commandLine.hasOption("graph")) { switch (commandLine.getOptionValue("graph").toLowerCase()) { case "classic": graph = TinkerFactory.createClassic(); break; case "modern": graph = TinkerFactory.createModern(); System.out.println("Modern Graph Created"); break; case "crew": graph = TinkerFactory.createTheCrew(); break; default: graph = TinkerGraph.open(); System.out.println("Graph Created"); long startTime = System.nanoTime(); graph.io(IoCore.gryo()).readGraph(commandLine.getOptionValue("graph")); long endTime = System.nanoTime(); System.out.println("Time taken to load graph from kyro file: " + (endTime - startTime) / 1000000 + " mili seconds"); break; } } else { graph = TinkerFactory.createModern(); } final Traversal<Vertex, ?> traversal = SparqlToGremlinCompiler.convertToGremlinTraversal(graph, queryString); printWithHeadline("SPARQL Query", queryString); printWithHeadline("Traversal (prior execution)", traversal); Bytecode traversalByteCode = traversal.asAdmin().getBytecode(); //JavaTranslator.of(graph.traversal()).translate(traversalByteCode); System.out.println("the Byte Code : " + traversalByteCode.toString()); printWithHeadline("Result", String.join(System.lineSeparator(), JavaTranslator.of(graph.traversal()) .translate(traversalByteCode).toStream().map(Object::toString).collect(Collectors.toList()))); printWithHeadline("Traversal (after execution)", traversal); }
From source file:com.act.biointerpretation.mechanisminspection.ReactionValidator.java
public static void main(String[] args) throws Exception { Options opts = new Options(); for (Option.Builder b : OPTION_BUILDERS) { opts.addOption(b.build());/*from ww w . j a v a 2 s.c om*/ } CommandLine cl = null; try { CommandLineParser parser = new DefaultParser(); cl = parser.parse(opts, args); } catch (ParseException e) { System.err.format("Argument parsing failed: %s\n", e.getMessage()); HELP_FORMATTER.printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); System.exit(1); } if (cl.hasOption("help")) { HELP_FORMATTER.printHelp(ReactionDesalter.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); return; } NoSQLAPI api = new NoSQLAPI(cl.getOptionValue(OPTION_READ_DB), cl.getOptionValue(OPTION_READ_DB)); MechanisticValidator validator = new MechanisticValidator(api); validator.init(); Map<Integer, List<Ero>> results = validator .validateOneReaction(Long.parseLong(cl.getOptionValue(OPTION_RXN_ID))); if (results == null) { System.out.format("ERROR: validation results are null.\n"); } else if (results.size() == 0) { System.out.format("No matching EROs were found for the specified reaction.\n"); } else { for (Map.Entry<Integer, List<Ero>> entry : results.entrySet()) { List<String> eroIds = entry.getValue().stream().map(x -> x.getId().toString()) .collect(Collectors.toList()); System.out.format("%d: %s\n", entry.getKey(), StringUtils.join(eroIds, ", ")); } } }
From source file:com.github.horrorho.inflatabledonkey.Main.java
/** * @param args the command line arguments * @throws IOException/* w ww . j a v a 2 s . c o m*/ */ public static void main(String[] args) throws IOException { try { if (!PropertyLoader.instance().test(args)) { return; } } catch (IllegalArgumentException ex) { System.out.println("Argument error: " + ex.getMessage()); System.out.println("Try '" + Property.APP_NAME.value() + " --help' for more information."); System.exit(-1); } // SystemDefault HttpClient. // TODO concurrent CloseableHttpClient httpClient = HttpClients.custom().setUserAgent("CloudKit/479 (13A404)") .useSystemProperties().build(); // Auth // TODO rework when we have UncheckedIOException for Authenticator Auth auth = Property.AUTHENTICATION_TOKEN.value().map(Auth::new).orElse(null); if (auth == null) { auth = Authenticator.authenticate(httpClient, Property.AUTHENTICATION_APPLEID.value().get(), Property.AUTHENTICATION_PASSWORD.value().get()); } logger.debug("-- main() - auth: {}", auth); logger.info("-- main() - dsPrsID:mmeAuthToken: {}:{}", auth.dsPrsID(), auth.mmeAuthToken()); if (Property.ARGS_TOKEN.booleanValue().orElse(false)) { System.out.println("DsPrsID:mmeAuthToken " + auth.dsPrsID() + ":" + auth.mmeAuthToken()); return; } logger.info("-- main() - Apple ID: {}", Property.AUTHENTICATION_APPLEID.value()); logger.info("-- main() - password: {}", Property.AUTHENTICATION_PASSWORD.value()); logger.info("-- main() - token: {}", Property.AUTHENTICATION_TOKEN.value()); // Account Account account = Accounts.account(httpClient, auth); // Backup Backup backup = Backup.create(httpClient, account); // BackupAccount BackupAccount backupAccount = backup.backupAccount(httpClient); logger.debug("-- main() - backup account: {}", backupAccount); // Devices List<Device> devices = backup.devices(httpClient, backupAccount.devices()); logger.debug("-- main() - device count: {}", devices.size()); // Snapshots List<SnapshotID> snapshotIDs = devices.stream().map(Device::snapshots).flatMap(Collection::stream) .collect(Collectors.toList()); logger.info("-- main() - total snapshot count: {}", snapshotIDs.size()); Map<String, Snapshot> snapshots = backup.snapshot(httpClient, snapshotIDs).stream().collect( Collectors.toMap(s -> s.record().getRecordIdentifier().getValue().getName(), Function.identity())); boolean repeat = false; do { for (int i = 0; i < devices.size(); i++) { Device device = devices.get(i); List<SnapshotID> deviceSnapshotIDs = device.snapshots(); System.out.println(i + " " + device.info()); for (int j = 0; j < deviceSnapshotIDs.size(); j++) { SnapshotID sid = deviceSnapshotIDs.get(j); System.out.println("\t" + j + snapshots.get(sid.id()).info() + " " + sid.timestamp()); } } if (Property.PRINT_SNAPSHOTS.booleanValue().orElse(false)) { return; } // Selection Scanner input = new Scanner(System.in); int deviceIndex; int snapshotIndex = Property.SELECT_SNAPSHOT_INDEX.intValue().get(); if (devices.size() > 1) { System.out.printf("Select a device [0 - %d]: ", devices.size() - 1); deviceIndex = input.nextInt(); } else deviceIndex = Property.SELECT_DEVICE_INDEX.intValue().get(); if (deviceIndex >= devices.size() || deviceIndex < 0) { System.out.println("No such device: " + deviceIndex); System.exit(-1); } Device device = devices.get(deviceIndex); System.out.println("Selected device: " + deviceIndex + ", " + device.info()); if (device.snapshots().size() > 1) { System.out.printf("Select a snapshot [0 - %d]: ", device.snapshots().size() - 1); snapshotIndex = input.nextInt(); } else snapshotIndex = Property.SELECT_SNAPSHOT_INDEX.intValue().get(); if (snapshotIndex >= devices.get(deviceIndex).snapshots().size() || snapshotIndex < 0) { System.out.println("No such snapshot for selected device: " + snapshotIndex); System.exit(-1); } logger.info("-- main() - arg device index: {}", deviceIndex); logger.info("-- main() - arg snapshot index: {}", snapshotIndex); String selected = devices.get(deviceIndex).snapshots().get(snapshotIndex).id(); Snapshot snapshot = snapshots.get(selected); System.out.println("Selected snapshot: " + snapshotIndex + ", " + snapshot.info()); // Asset list. List<Assets> assetsList = backup.assetsList(httpClient, snapshot); logger.info("-- main() - assets count: {}", assetsList.size()); // Domains filter --domain option String chosenDomain = Property.FILTER_DOMAIN.value().orElse("").toLowerCase(Locale.US); logger.info("-- main() - arg domain substring filter: {}", Property.FILTER_DOMAIN.value()); // Output domains --domains option if (Property.PRINT_DOMAIN_LIST.booleanValue().orElse(false)) { System.out.println("Domains / file count:"); assetsList.stream().filter(a -> a.domain().isPresent()) .map(a -> a.domain().get() + " / " + a.files().size()).sorted() .forEach(System.out::println); System.out.print("Type a domain ('null' to exit): "); chosenDomain = input.next().toLowerCase(Locale.US); if (chosenDomain.equals("null")) return; // TODO check Assets without domain information. } String domainSubstring = chosenDomain; Predicate<Optional<String>> domainFilter = domain -> domain.map(d -> d.toLowerCase(Locale.US)) .map(d -> d.contains(domainSubstring)).orElse(false); List<String> files = Assets.files(assetsList, domainFilter); logger.info("-- main() - domain filtered file count: {}", files.size()); // Output folders. Path outputFolder = Paths.get(Property.OUTPUT_FOLDER.value().orElse("output")); Path assetOutputFolder = outputFolder.resolve("assets"); // TODO assets value injection Path chunkOutputFolder = outputFolder.resolve("chunks"); // TODO chunks value injection logger.info("-- main() - output folder chunks: {}", chunkOutputFolder); logger.info("-- main() - output folder assets: {}", assetOutputFolder); // Download tools. AuthorizeAssets authorizeAssets = AuthorizeAssets.backupd(); DiskChunkStore chunkStore = new DiskChunkStore(chunkOutputFolder); StandardChunkEngine chunkEngine = new StandardChunkEngine(chunkStore); AssetDownloader assetDownloader = new AssetDownloader(chunkEngine); KeyBagManager keyBagManager = backup.newKeyBagManager(); // Mystery Moo. Moo moo = new Moo(authorizeAssets, assetDownloader, keyBagManager); // Filename extension filter. String filenameExtension = Property.FILTER_EXTENSION.value().orElse("").toLowerCase(Locale.US); logger.info("-- main() - arg filename extension filter: {}", Property.FILTER_EXTENSION.value()); Predicate<Asset> assetFilter = asset -> asset.relativePath().map(d -> d.toLowerCase(Locale.US)) .map(d -> d.endsWith(filenameExtension)).orElse(false); // Batch process files in groups of 100. // TODO group files into batches based on file size. List<List<String>> batches = ListUtils.partition(files, 100); for (List<String> batch : batches) { List<Asset> assets = backup.assets(httpClient, batch).stream().filter(assetFilter::test) .collect(Collectors.toList()); logger.info("-- main() - filtered asset count: {}", assets.size()); moo.download(httpClient, assets, assetOutputFolder); } System.out.print("Download other snapshot (Y/N)? "); repeat = input.next().toLowerCase(Locale.US).charAt(0) == 'y'; } while (repeat == true); }
From source file:diffhunter.DiffHunter.java
/** * @param args the command line arguments * @throws org.apache.commons.cli.ParseException * @throws java.io.IOException/*from w w w.j av a 2 s. c o m*/ */ public static void main(String[] args) throws ParseException, IOException { //String test_ = Paths.get("J:\\VishalData\\additional\\", "Sasan" + "_BDB").toAbsolutePath().toString(); // TODO code application logic here /*args = new String[] { "-i", "-b", "J:\\VishalData\\additional\\Ptbp2_E18_5_cortex_CLIP_mm9_plus_strand_sorted.bed", "-r", "J:\\VishalData\\additional\\mouse_mm9.txt", "-o", "J:\\VishalData" };*/ /*args = new String[] { "-c", "-r", "J:\\VishalData\\additional\\mouse_mm9.txt", "-1", "J:\\VishalData\\Ptbp2_Adult_testis_CLIP_mm9_plus_strand_sorted_BDB", "-2", "J:\\VishalData\\Ptbp2_E18_5_cortex_CLIP_mm9_plus_strand_sorted_BDB", "-w", "200", "-s", "50", "-o", "J:\\VishalData" };*/ Options options = new Options(); // add t option options.addOption("i", "index", false, "Indexing BED files."); options.addOption("b", "bed", true, "bed file to be indexed"); options.addOption("o", "output", true, "Folder that the index/comparison file will be created."); options.addOption("r", "reference", true, "Reference annotation file to be used for indexing"); options.addOption("c", "compare", false, "Finding differences between two conditions"); options.addOption("1", "first", true, "First sample index location"); options.addOption("2", "second", true, "Second sample index location"); options.addOption("w", "window", true, "Length of window for identifying differences"); options.addOption("s", "sliding", true, "Length of sliding"); CommandLineParser parser = new BasicParser(); CommandLine cmd = parser.parse(options, args); boolean indexing = false; boolean comparing = false; //Indexing! if (cmd.hasOption("i")) { //if(cmd.hasOption("1")) //System.err.println("sasan"); //System.out.println("sasa"); indexing = true; } else if (cmd.hasOption("c")) { //System.err.println(""); comparing = true; } else { //System.err.println("Option is not deteced."); HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("diffhunter", options); return; } //Indexing is selected // if (indexing == true) { //Since indexing is true. //User have to provide file for indexing. if (!(cmd.hasOption("o") || cmd.hasOption("r") || cmd.hasOption("b"))) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("diffhunter", options); return; } String bedfile_ = cmd.getOptionValue("b"); String reference_file = cmd.getOptionValue("r"); String folder_loc = cmd.getOptionValue("o"); String sample_name = FilenameUtils.getBaseName(bedfile_); try (Database B2 = BerkeleyDB_Box.Get_BerkeleyDB( Paths.get(folder_loc, sample_name + "_BDB").toAbsolutePath().toString(), true, sample_name)) { Indexer indexing_ = new Indexer(reference_file); indexing_.Make_Index(B2, bedfile_, Paths.get(folder_loc, sample_name + "_BDB").toAbsolutePath().toString()); B2.close(); } } else if (comparing == true) { if (!(cmd.hasOption("o") || cmd.hasOption("w") || cmd.hasOption("s") || cmd.hasOption("1") || cmd.hasOption("2"))) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("diffhunter", options); return; } String folder_loc = cmd.getOptionValue("o"); int window_ = Integer.parseInt(cmd.getOptionValue("w")); //int window_=600; int slide_ = Integer.parseInt(cmd.getOptionValue("s")); String first = cmd.getOptionValue("1").replace("_BDB", ""); String second = cmd.getOptionValue("2").replace("_BDB", ""); String reference_file = cmd.getOptionValue("r"); //String folder_loc=cmd.getOptionValue("o"); String sample_name_first = FilenameUtils.getBaseName(first); String sample_name_second = FilenameUtils.getBaseName(second); Database B1 = BerkeleyDB_Box.Get_BerkeleyDB(first + "_BDB", false, sample_name_first); Database B2 = BerkeleyDB_Box.Get_BerkeleyDB(second + "_BDB", false, sample_name_second); List<String> first_condition_genes = Files .lines(Paths.get(first + "_BDB", sample_name_first + ".txt").toAbsolutePath()) .collect(Collectors.toList()); List<String> second_condition_genes = Files .lines(Paths.get(second + "_BDB", sample_name_second + ".txt").toAbsolutePath()) .collect(Collectors.toList()); System.out.println("First and second condition are loaded!!! "); List<String> intersection_ = new ArrayList<>(first_condition_genes); intersection_.retainAll(second_condition_genes); BufferedWriter output = new BufferedWriter( new FileWriter(Paths.get(folder_loc, "differences_" + window_ + "_s" + slide_ + "_c" + ".txt") .toAbsolutePath().toString(), false)); List<Result_Window> final_results = Collections.synchronizedList(new ArrayList<>()); Worker_New worker_class = new Worker_New(); worker_class.Read_Reference(reference_file); while (!intersection_.isEmpty()) { List<String> selected_genes = new ArrayList<>(); //if (intersection_.size()<=10000){selected_genes.addAll(intersection_.subList(0, intersection_.size()));} //else selected_genes.addAll(intersection_.subList(0, 10000)); if (intersection_.size() <= intersection_.size()) { selected_genes.addAll(intersection_.subList(0, intersection_.size())); } else { selected_genes.addAll(intersection_.subList(0, intersection_.size())); } intersection_.removeAll(selected_genes); //System.out.println("Intersection count is:"+intersection_.size()); //final List<Result_Window> resultssss_=new ArrayList<>(); IntStream.range(0, selected_genes.size()).parallel().forEach(i -> { System.out.println(selected_genes.get(i) + "\tprocessing......"); String gene_of_interest = selected_genes.get(i);//"ENSG00000142657|PGD";//intersection_.get(6);////"ENSG00000163395|IGFN1";//"ENSG00000270066|SCARNA2"; int start = worker_class.dic_genes.get(gene_of_interest).start_loc; int end = worker_class.dic_genes.get(gene_of_interest).end_loc; Map<Integer, Integer> first_ = Collections.EMPTY_MAP; try { first_ = BerkeleyDB_Box.Get_Coord_Read(B1, gene_of_interest); } catch (IOException | ClassNotFoundException ex) { Logger.getLogger(DiffHunter.class.getName()).log(Level.SEVERE, null, ex); } Map<Integer, Integer> second_ = Collections.EMPTY_MAP; try { second_ = BerkeleyDB_Box.Get_Coord_Read(B2, gene_of_interest); } catch (IOException | ClassNotFoundException ex) { Logger.getLogger(DiffHunter.class.getName()).log(Level.SEVERE, null, ex); } List<Window> top_windows_first = worker_class.Get_Top_Windows(window_, first_, slide_); List<Window> top_windows_second = worker_class.Get_Top_Windows(window_, second_, slide_); //System.out.println("passed for window peak call for gene \t"+selected_genes.get(i)); // System.out.println("top_window_first_Count\t"+top_windows_first.size()); // System.out.println("top_window_second_Count\t"+top_windows_second.size()); if (top_windows_first.isEmpty() && top_windows_second.isEmpty()) { return; } List<Result_Window> res_temp = new Worker_New().Get_Significant_Windows(gene_of_interest, start, end, top_windows_first, top_windows_second, second_, first_, sample_name_first, sample_name_second, 0.01); if (!res_temp.isEmpty()) { final_results.addAll(res_temp);//final_results.addAll(worker_class.Get_Significant_Windows(gene_of_interest, start, end, top_windows_first, top_windows_second, second_, first_, first_condition, second_condition, 0.01)); } //System.out.println(selected_genes.get(i)+"\tprocessed."); }); /*selected_genes.parallelStream().forEach(i -> { });*/ List<Double> pvals = new ArrayList<>(); for (int i = 0; i < final_results.size(); i++) { pvals.add(final_results.get(i).p_value); } List<Double> qvals = MultipleTestCorrection.benjaminiHochberg(pvals); System.out.println("Writing to file..."); output.append("Gene_Symbol\tContributing_Sample\tStart\tEnd\tOddsRatio\tp_Value\tFDR"); output.newLine(); for (int i = 0; i < final_results.size(); i++) { Result_Window item = final_results.get(i); output.append(item.associated_gene_symbol + "\t" + item.contributing_windows + "\t" + item.start_loc + "\t" + item.end_loc + "\t" + item.oddsratio_ + "\t" + item.p_value + "\t" + qvals.get(i)); //+ "\t" + item.average_other_readcount_cotributing + "\t" + item.average_other_readcount_cotributing + "\t" + item.average_window_readcount_non + "\t" + item.average_other_readcount_non); output.newLine(); } /* for (Result_Window item : final_results) { output.append(item.associated_gene_symbol + "\t" + item.contributing_windows + "\t" + item.start_loc + "\t" + item.end_loc + "\t" + item.oddsratio_ + "\t" + item.p_value); //+ "\t" + item.average_other_readcount_cotributing + "\t" + item.average_other_readcount_cotributing + "\t" + item.average_window_readcount_non + "\t" + item.average_other_readcount_non); output.newLine(); } */ final_results.clear(); } output.close(); } System.out.println("Done."); }
From source file:acmi.l2.clientmod.l2_version_switcher.Main.java
public static void main(String[] args) { if (args.length != 3 && args.length != 4) { System.out.println("USAGE: l2_version_switcher.jar host game version <--splash> <filter>"); System.out.println("EXAMPLE: l2_version_switcher.jar " + L2.NCWEST_HOST + " " + L2.NCWEST_GAME + " 1 \"system\\*\""); System.out.println(//from w w w . j av a2 s . c o m " l2_version_switcher.jar " + L2.PLAYNC_TEST_HOST + " " + L2.PLAYNC_TEST_GAME + " 48"); System.exit(0); } List<String> argsList = new ArrayList<>(Arrays.asList(args)); String host = argsList.get(0); String game = argsList.get(1); int version = Integer.parseInt(argsList.get(2)); Helper helper = new Helper(host, game, version); boolean available = false; try { available = helper.isAvailable(); } catch (IOException e) { System.err.print(e.getClass().getSimpleName()); if (e.getMessage() != null) { System.err.print(": " + e.getMessage()); } System.err.println(); } System.out.println(String.format("Version %d available: %b", version, available)); if (!available) { System.exit(0); } List<FileInfo> fileInfoList = null; try { fileInfoList = helper.getFileInfoList(); } catch (IOException e) { System.err.println("Couldn\'t get file info map"); System.exit(1); } boolean splash = argsList.remove("--splash"); if (splash) { Optional<FileInfo> splashObj = fileInfoList.stream() .filter(fi -> fi.getPath().contains("sp_32b_01.bmp")).findAny(); if (splashObj.isPresent()) { try (InputStream is = new FilterInputStream( Util.getUnzipStream(helper.getDownloadStream(splashObj.get().getPath()))) { @Override public int read() throws IOException { int b = super.read(); if (b >= 0) b ^= 0x36; return b; } @Override public int read(byte[] b, int off, int len) throws IOException { int r = super.read(b, off, len); if (r >= 0) { for (int i = 0; i < r; i++) b[off + i] ^= 0x36; } return r; } }) { new DataInputStream(is).readFully(new byte[28]); BufferedImage bi = ImageIO.read(is); JFrame frame = new JFrame("Lineage 2 [" + version + "] " + splashObj.get().getPath()); frame.setContentPane(new JComponent() { { setPreferredSize(new Dimension(bi.getWidth(), bi.getHeight())); } @Override protected void paintComponent(Graphics g) { g.drawImage(bi, 0, 0, null); } }); frame.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE); frame.pack(); frame.setLocationRelativeTo(null); frame.setVisible(true); } catch (IOException e) { e.printStackTrace(); } } else { System.out.println("Splash not found"); } return; } String filter = argsList.size() > 3 ? separatorsToSystem(argsList.get(3)) : null; File l2Folder = new File(System.getProperty("user.dir")); List<FileInfo> toUpdate = fileInfoList.parallelStream().filter(fi -> { String filePath = separatorsToSystem(fi.getPath()); if (filter != null && !wildcardMatch(filePath, filter, IOCase.INSENSITIVE)) return false; File file = new File(l2Folder, filePath); try { if (file.exists() && file.length() == fi.getSize() && Util.hashEquals(file, fi.getHash())) { System.out.println(filePath + ": OK"); return false; } } catch (IOException e) { System.out.println(filePath + ": couldn't check hash: " + e); return true; } System.out.println(filePath + ": need update"); return true; }).collect(Collectors.toList()); List<String> errors = Collections.synchronizedList(new ArrayList<>()); ExecutorService executor = Executors.newFixedThreadPool(16); CompletableFuture[] tasks = toUpdate.stream().map(fi -> CompletableFuture.runAsync(() -> { String filePath = separatorsToSystem(fi.getPath()); File file = new File(l2Folder, filePath); File folder = file.getParentFile(); if (!folder.exists()) { if (!folder.mkdirs()) { errors.add(filePath + ": couldn't create parent dir"); return; } } try (InputStream input = Util .getUnzipStream(new BufferedInputStream(helper.getDownloadStream(fi.getPath()))); OutputStream output = new BufferedOutputStream(new FileOutputStream(file))) { byte[] buffer = new byte[Math.min(fi.getSize(), 1 << 24)]; int pos = 0; int r; while ((r = input.read(buffer, pos, buffer.length - pos)) >= 0) { pos += r; if (pos == buffer.length) { output.write(buffer, 0, pos); pos = 0; } } if (pos != 0) { output.write(buffer, 0, pos); } System.out.println(filePath + ": OK"); } catch (IOException e) { String msg = filePath + ": FAIL: " + e.getClass().getSimpleName(); if (e.getMessage() != null) { msg += ": " + e.getMessage(); } errors.add(msg); } }, executor)).toArray(CompletableFuture[]::new); CompletableFuture.allOf(tasks).thenRun(() -> { for (String err : errors) System.err.println(err); executor.shutdown(); }); }
From source file:act.installer.reachablesexplorer.WikiWebServicesExporter.java
public static void main(String[] args) throws Exception { CLIUtil cliUtil = new CLIUtil(WikiWebServicesExporter.class, HELP_MESSAGE, OPTION_BUILDERS); CommandLine cl = cliUtil.parseCommandLine(args); String host = cl.getOptionValue(OPTION_INPUT_DB_HOST, DEFAULT_HOST); Integer port = Integer.parseInt(cl.getOptionValue(OPTION_INPUT_DB_PORT, DEFAULT_PORT)); String dbName = cl.getOptionValue(OPTION_INPUT_DB, DEFAULT_DB); String collection = cl.getOptionValue(OPTION_INPUT_DB_COLLECTION, DEFAULT_COLLECTION); String sequenceCollection = cl.getOptionValue(OPTION_INPUT_SEQUENCE_COLLECTION, DEFAULT_SEQUENCES_COLLECTION); LOGGER.info("Attempting to connect to DB %s:%d/%s, collection %s", host, port, dbName, collection); Loader loader = new Loader(host, port, UNUSED_SOURCE_DB, dbName, collection, sequenceCollection, DEFAULT_RENDERING_CACHE);//from w w w. j av a 2 s .c o m JacksonDBCollection<Reachable, String> reachables = loader.getJacksonReachablesCollection(); LOGGER.info("Connected to DB, reading reachables"); List<Long> exportIds = !cl.hasOption(OPTION_EXPORT_SOME) ? Collections.emptyList() : Arrays.stream(cl.getOptionValues(OPTION_EXPORT_SOME)).map(Long::valueOf) .collect(Collectors.toList()); TSVWriter<String, String> tsvWriter = new TSVWriter<>(HEADER); tsvWriter.open(new File(cl.getOptionValue(OPTION_OUTPUT_FILE))); try { DBCursor<Reachable> cursor = exportIds.isEmpty() ? reachables.find() : reachables.find(DBQuery.in("_id", exportIds)); int written = 0; while (cursor.hasNext()) { final Reachable r = cursor.next(); Map<String, String> row = new HashMap<String, String>() { { put("inchi", r.getInchi()); put("inchi_key", r.getInchiKey()); put("display_name", r.getPageName()); put("image_name", r.getStructureFilename()); } }; tsvWriter.append(row); tsvWriter.flush(); written++; } LOGGER.info("Wrote %d reachables to output TSV", written); } finally { tsvWriter.close(); } }