List of usage examples for java.util Set toArray
<T> T[] toArray(T[] a);
From source file:Main.java
public static void main(String[] args) { String elements[] = { "M", "N", "O", "P", "Q" }; Set set = new HashSet(Arrays.asList(elements)); String[] strObj = new String[set.size()]; strObj = (String[]) set.toArray(strObj); for (int i = 0; i < strObj.length; i++) { System.out.println(strObj[i]); }//from w w w .j ava 2s .co m System.out.println(set); }
From source file:MainClass.java
public static void main(String[] a) { String elements[] = { "A", "B", "C", "D", "E" }; Set set = new HashSet(Arrays.asList(elements)); String[] strObj = new String[set.size()]; strObj = (String[]) set.toArray(strObj); for (int i = 0; i < strObj.length; i++) { System.out.println(strObj[i]); }//from w w w . ja va 2s. c o m System.out.println(set); }
From source file:Main.java
public static void main(final String[] args) throws Exception { Random random = new Random(); Set<Integer> intSet = new HashSet<>(); while (intSet.size() < 6) { intSet.add(random.nextInt(49) + 1); }//from w w w.j a v a 2 s .c o m Integer[] ints = intSet.toArray(new Integer[intSet.size()]); System.out.println(Arrays.toString(ints)); }
From source file:Main.java
public static void main(String[] args) { // A string array with duplicate values String[] data = { "A", "C", "B", "D", "A", "B", "E", "D", "B", "C" }; System.out.println("Original array : " + Arrays.toString(data)); List<String> list = Arrays.asList(data); Set<String> set = new HashSet<String>(list); System.out.print("Remove duplicate result: "); String[] result = new String[set.size()]; set.toArray(result); for (String s : result) { System.out.print(s + ", "); }/*w ww.j a va 2 s .c o m*/ }
From source file:Main.java
public static void main(String[] argv) { // Create the sorted set Set<String> set = new TreeSet<String>(); set.add("b"); set.add("c"); set.add("a"); Iterator it = set.iterator(); while (it.hasNext()) { Object element = it.next(); System.out.println(element); }//from ww w . j a v a2 s . c om // Create an array containing the elements in a set String[] array = (String[]) set.toArray(new String[set.size()]); Arrays.toString(array); }
From source file:de.unisb.cs.st.javaslicer.slicing.Slicer.java
public static void main(String[] args) throws InterruptedException { Options options = createOptions();//w ww. j a va 2 s. c o m CommandLineParser parser = new GnuParser(); CommandLine cmdLine; try { cmdLine = parser.parse(options, args, true); } catch (ParseException e) { System.err.println("Error parsing the command line arguments: " + e.getMessage()); return; } if (cmdLine.hasOption('h')) { printHelp(options, System.out); System.exit(0); } String[] additionalArgs = cmdLine.getArgs(); if (additionalArgs.length != 2) { printHelp(options, System.err); System.exit(-1); } // ?? 1. ? 2.? File traceFile = new File(additionalArgs[0]); String slicingCriterionString = additionalArgs[1]; Long threadId = null; if (cmdLine.hasOption('t')) { // the interesting thread id for slicing try { threadId = Long.parseLong(cmdLine.getOptionValue('t')); } catch (NumberFormatException e) { System.err.println("Illegal thread id: " + cmdLine.getOptionValue('t')); System.exit(-1); } } TraceResult trace; try { trace = TraceResult.readFrom(traceFile); } catch (IOException e) { System.err.format("Could not read the trace file \"%s\": %s%n", traceFile, e); System.exit(-1); return; } List<SlicingCriterion> sc = null; // a list contains the instruction's info corresponds to the slicing criterion //slicingCriterionString get from additionalArgs[1] try { sc = StaticSlicingCriterion.parseAll(slicingCriterionString, trace.getReadClasses()); } catch (IllegalArgumentException e) { System.err.println("Error parsing slicing criterion: " + e.getMessage()); System.exit(-1); return; } List<ThreadId> threads = trace.getThreads(); // the threads that generate the traces if (threads.size() == 0) { System.err.println("The trace file contains no tracing information."); System.exit(-1); } // threadID is used to mark the interesting thread ThreadId tracing = null; for (ThreadId t : threads) { if (threadId == null) { if ("main".equals(t.getThreadName()) && (tracing == null || t.getJavaThreadId() < tracing.getJavaThreadId())) tracing = t; } else if (t.getJavaThreadId() == threadId.longValue()) { tracing = t; } } if (tracing == null) { System.err.println(threadId == null ? "Couldn't find the main thread." : "The thread you specified was not found."); System.exit(-1); return; } long startTime = System.nanoTime(); Slicer slicer = new Slicer(trace); if (cmdLine.hasOption("progress")) // the parameter process indicates that we need to monitor the process of slicing slicer.addProgressMonitor(new ConsoleProgressMonitor()); boolean multithreaded; if (cmdLine.hasOption("multithreaded")) { String multithreadedStr = cmdLine.getOptionValue("multithreaded"); multithreaded = ("1".equals(multithreadedStr) || "true".equals(multithreadedStr)); } else { multithreaded = Runtime.getRuntime().availableProcessors() > 1; } boolean warnUntracedMethods = cmdLine.hasOption("warn-untraced"); // give some warns when encounters untraced functions //sliceInstructionCollector implements the interface slice visitor, which travel the dependence graph SliceInstructionsCollector collector = new SliceInstructionsCollector(); // the collector is used to collect the instructions in the dependence graph according to the slice criterion. slicer.addSliceVisitor(collector); // zhushi by yhb if (warnUntracedMethods) slicer.addUntracedCallVisitor(new PrintUniqueUntracedMethods()); // the user need the untraced function info, so add untraced call visitor slicer.process(tracing, sc, multithreaded); //----------------------the key process of slicing!!! Set<InstructionInstance> slice = collector.getDynamicSlice(); // return the slice result from the collector long endTime = System.nanoTime(); Instruction[] sliceArray = slice.toArray(new Instruction[slice.size()]); // convert the set to array Arrays.sort(sliceArray); // in order to ensure the sequence of dynamic execution // show the slicing result System.out.println("The dynamic slice for criterion " + sc + ":"); for (Instruction insn : sliceArray) { System.out.format((Locale) null, "%s.%s:%d %s%n", insn.getMethod().getReadClass().getName(), insn.getMethod().getName(), insn.getLineNumber(), insn.toString()); } System.out.format((Locale) null, "%nSlice consists of %d bytecode instructions.%n", sliceArray.length); System.out.format((Locale) null, "Computation took %.2f seconds.%n", 1e-9 * (endTime - startTime)); }
From source file:Inmemantlr.java
public static void main(String[] args) { LOGGER.info("Inmemantlr tool"); HelpFormatter hformatter = new HelpFormatter(); Options options = new Options(); // Binary arguments options.addOption("h", "print this message"); Option grmr = Option.builder().longOpt("grmrfiles").hasArgs().desc("comma-separated list of ANTLR files") .required(true).argName("grmrfiles").type(String.class).valueSeparator(',').build(); Option infiles = Option.builder().longOpt("infiles").hasArgs() .desc("comma-separated list of files to parse").required(true).argName("infiles").type(String.class) .valueSeparator(',').build(); Option utilfiles = Option.builder().longOpt("utilfiles").hasArgs() .desc("comma-separated list of utility files to be added for " + "compilation").required(false) .argName("utilfiles").type(String.class).valueSeparator(',').build(); Option odir = Option.builder().longOpt("outdir") .desc("output directory in which the dot files will be " + "created").required(false).hasArg(true) .argName("outdir").type(String.class).build(); options.addOption(infiles);/*from w w w . j av a 2s .co m*/ options.addOption(grmr); options.addOption(utilfiles); options.addOption(odir); CommandLineParser parser = new DefaultParser(); CommandLine cmd = null; try { cmd = parser.parse(options, args); if (cmd.hasOption('h')) { hformatter.printHelp("java -jar inmemantlr.jar", options); System.exit(0); } } catch (ParseException e) { hformatter.printHelp("java -jar inmemantlr.jar", options); LOGGER.error(e.getMessage()); System.exit(-1); } // input files Set<File> ins = getFilesForOption(cmd, "infiles"); // grammar files Set<File> gs = getFilesForOption(cmd, "grmrfiles"); // utility files Set<File> uf = getFilesForOption(cmd, "utilfiles"); // output dir Set<File> od = getFilesForOption(cmd, "outdir"); if (od.size() > 1) { LOGGER.error("output directories must be less than or equal to 1"); System.exit(-1); } if (ins.size() <= 0) { LOGGER.error("no input files were specified"); System.exit(-1); } if (gs.size() <= 0) { LOGGER.error("no grammar files were specified"); System.exit(-1); } LOGGER.info("create generic parser"); GenericParser gp = null; try { gp = new GenericParser(gs.toArray(new File[gs.size()])); } catch (FileNotFoundException e) { LOGGER.error(e.getMessage()); System.exit(-1); } if (!uf.isEmpty()) { try { gp.addUtilityJavaFiles(uf.toArray(new String[uf.size()])); } catch (FileNotFoundException e) { LOGGER.error(e.getMessage()); System.exit(-1); } } LOGGER.info("create and add parse tree listener"); DefaultTreeListener dt = new DefaultTreeListener(); gp.setListener(dt); LOGGER.info("compile generic parser"); try { gp.compile(); } catch (CompilationException e) { LOGGER.error("cannot compile generic parser: {}", e.getMessage()); System.exit(-1); } String fpfx = ""; for (File of : od) { if (!of.exists() || !of.isDirectory()) { LOGGER.error("output directory does not exist or is not a " + "directory"); System.exit(-1); } fpfx = of.getAbsolutePath(); } Ast ast; for (File f : ins) { try { gp.parse(f); } catch (IllegalWorkflowException | FileNotFoundException e) { LOGGER.error(e.getMessage()); System.exit(-1); } ast = dt.getAst(); if (!fpfx.isEmpty()) { String of = fpfx + "/" + FilenameUtils.removeExtension(f.getName()) + ".dot"; LOGGER.info("write file {}", of); try { FileUtils.writeStringToFile(new File(of), ast.toDot(), "UTF-8"); } catch (IOException e) { LOGGER.error(e.getMessage()); System.exit(-1); } } else { LOGGER.info("Tree for {} \n {}", f.getName(), ast.toDot()); } } System.exit(0); }
From source file:org.ala.hbase.RepoDataLoader.java
/** * This takes a list of infosource ids... * <p/>//from w w w . java 2 s. c o m * Usage: -stats or -reindex or -gList and list of infosourceId * * @param args */ public static void main(String[] args) throws Exception { //RepoDataLoader loader = new RepoDataLoader(); ApplicationContext context = SpringUtils.getContext(); RepoDataLoader loader = (RepoDataLoader) context.getBean(RepoDataLoader.class); long start = System.currentTimeMillis(); loader.loadInfoSources(); String filePath = repositoryDir; if (args.length > 0) { if (args[0].equalsIgnoreCase("-stats")) { loader.statsOnly = true; args = (String[]) ArrayUtils.subarray(args, 1, args.length); } if (args[0].equalsIgnoreCase("-reindex")) { loader.reindex = true; loader.indexer = context.getBean(PartialIndex.class); args = (String[]) ArrayUtils.subarray(args, 1, args.length); logger.info("**** -reindex: " + loader.reindex); logger.debug("reindex url: " + loader.reindexUrl); } if (args[0].equalsIgnoreCase("-gList")) { loader.gList = true; args = (String[]) ArrayUtils.subarray(args, 1, args.length); logger.info("**** -gList: " + loader.gList); } if (args[0].equalsIgnoreCase("-biocache")) { Hashtable<String, String> hashTable = new Hashtable<String, String>(); hashTable.put("accept", "application/json"); ObjectMapper mapper = new ObjectMapper(); mapper.getDeserializationConfig().set(DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES, false); RestfulClient restfulClient = new RestfulClient(0); String fq = "&fq="; if (args.length > 1) { java.util.Date date = new java.util.Date(); if (args[1].equals("-lastWeek")) { date = DateUtils.addWeeks(date, -1); } else if (args[1].equals("-lastMonth")) { date = DateUtils.addMonths(date, -1); } else if (args[1].equals("-lastYear")) { date = DateUtils.addYears(date, -1); } else date = null; if (date != null) { SimpleDateFormat sfd = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'"); fq += "last_load_date:%5B" + sfd.format(date) + "%20TO%20*%5D"; } } Object[] resp = restfulClient .restGet("http://biocache.ala.org.au/ws/occurrences/search?q=multimedia:Image" + fq + "&facets=data_resource_uid&pageSize=0", hashTable); logger.info("The URL: " + "http://biocache.ala.org.au/ws/occurrences/search?q=multimedia:Image" + fq + "&facets=data_resource_uid&pageSize=0"); if ((Integer) resp[0] == HttpStatus.SC_OK) { String content = resp[1].toString(); logger.debug(resp[1]); if (content != null && content.length() > "[]".length()) { Map map = mapper.readValue(content, Map.class); try { List<java.util.LinkedHashMap<String, String>> list = ((List<java.util.LinkedHashMap<String, String>>) ((java.util.LinkedHashMap) ((java.util.ArrayList) map .get("facetResults")).get(0)).get("fieldResult")); Set<String> arg = new LinkedHashSet<String>(); for (int i = 0; i < list.size(); i++) { java.util.LinkedHashMap<String, String> value = list.get(i); String dataResource = getDataResource(value.get("fq")); Object provider = (loader.getUidInfoSourceMap().get(dataResource)); if (provider != null) { arg.add(provider.toString()); } } logger.info("Set of biocache infosource ids to load: " + arg); args = new String[] {}; args = arg.toArray(args); //handle the situation where biocache-service reports no data resources if (args.length < 1) { logger.error("No biocache data resources found. Unable to load."); System.exit(0); } } catch (Exception e) { logger.error("ERROR: exit process....." + e); e.printStackTrace(); System.exit(0); } } } else { logger.warn("Unable to process url: "); } } } int filesRead = loader.load(filePath, args); //FIX ME - move to config long finish = System.currentTimeMillis(); logger.info(filesRead + " files scanned/loaded in: " + ((finish - start) / 60000) + " minutes " + ((finish - start) / 1000) + " seconds."); System.exit(1); }
From source file:com.lightboxtechnologies.spectrum.SequenceFileExport.java
public static void main(String[] args) throws Exception { final Configuration conf = new Configuration(); final String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs(); String imageID;/*from w ww . jav a 2 s . c o m*/ String outpath; String friendlyname; final Set<String> exts = new HashSet<String>(); if ("-f".equals(otherArgs[0])) { if (otherArgs.length != 4) { die(); } // load extensions from file final Path extpath = new Path(otherArgs[1]); InputStream in = null; try { in = extpath.getFileSystem(conf).open(extpath); Reader r = null; try { r = new InputStreamReader(in); BufferedReader br = null; try { br = new BufferedReader(r); String line; while ((line = br.readLine()) != null) { exts.add(line.trim().toLowerCase()); } br.close(); } finally { IOUtils.closeQuietly(br); } r.close(); } finally { IOUtils.closeQuietly(r); } in.close(); } finally { IOUtils.closeQuietly(in); } imageID = otherArgs[2]; friendlyname = otherArgs[3]; outpath = otherArgs[4]; } else { if (otherArgs.length < 3) { die(); } // read extensions from trailing args imageID = otherArgs[0]; friendlyname = otherArgs[1]; outpath = otherArgs[2]; // lowercase all file extensions for (int i = 2; i < otherArgs.length; ++i) { exts.add(otherArgs[i].toLowerCase()); } } conf.setStrings("extensions", exts.toArray(new String[exts.size()])); final Job job = SKJobFactory.createJobFromConf(imageID, friendlyname, "SequenceFileExport", conf); job.setJarByClass(SequenceFileExport.class); job.setMapperClass(SequenceFileExportMapper.class); job.setNumReduceTasks(0); job.setOutputKeyClass(BytesWritable.class); job.setOutputValueClass(MapWritable.class); job.setInputFormatClass(FsEntryHBaseInputFormat.class); FsEntryHBaseInputFormat.setupJob(job, imageID); job.setOutputFormatClass(SequenceFileOutputFormat.class); SequenceFileOutputFormat.setOutputCompressionType(job, SequenceFile.CompressionType.BLOCK); FileOutputFormat.setOutputPath(job, new Path(outpath)); System.exit(job.waitForCompletion(true) ? 0 : 1); }
From source file:com.act.lcms.db.io.PrintConstructInfo.java
public static void main(String[] args) throws Exception { Options opts = new Options(); for (Option.Builder b : OPTION_BUILDERS) { opts.addOption(b.build());/*from ww w.j av a 2 s. com*/ } CommandLine cl = null; try { CommandLineParser parser = new DefaultParser(); cl = parser.parse(opts, args); } catch (ParseException e) { System.err.format("Argument parsing failed: %s\n", e.getMessage()); HELP_FORMATTER.printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); System.exit(1); } if (cl.hasOption("help")) { HELP_FORMATTER.printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); return; } File lcmsDir = new File(cl.getOptionValue(OPTION_DIRECTORY)); if (!lcmsDir.isDirectory()) { System.err.format("File at %s is not a directory\n", lcmsDir.getAbsolutePath()); HELP_FORMATTER.printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); System.exit(1); } try (DB db = DB.openDBFromCLI(cl)) { System.out.print("Loading/updating LCMS scan files into DB\n"); ScanFile.insertOrUpdateScanFilesInDirectory(db, lcmsDir); String construct = cl.getOptionValue(OPTION_CONSTRUCT); List<LCMSWell> lcmsWells = LCMSWell.getInstance().getByConstructID(db, construct); Collections.sort(lcmsWells, new Comparator<LCMSWell>() { @Override public int compare(LCMSWell o1, LCMSWell o2) { return o1.getId().compareTo(o2.getId()); } }); Set<String> uniqueMSIDs = new HashSet<>(); Map<Integer, Plate> platesById = new HashMap<>(); System.out.format("\n\n-- Construct %s --\n\n", construct); List<ChemicalAssociatedWithPathway> pathwayChems = ChemicalAssociatedWithPathway.getInstance() .getChemicalsAssociatedWithPathwayByConstructId(db, construct); System.out.print("Chemicals associated with pathway:\n"); System.out.format(" %-8s%-15s%-45s\n", "index", "kind", "chemical"); for (ChemicalAssociatedWithPathway chem : pathwayChems) { System.out.format(" %-8d%-15s%-45s\n", chem.getIndex(), chem.getKind(), chem.getChemical()); } System.out.print("\nLCMS wells:\n"); System.out.format(" %-15s%-6s%-15s%-15s%-15s\n", "barcode", "well", "msid", "fed", "lcms_count"); for (LCMSWell well : lcmsWells) { uniqueMSIDs.add(well.getMsid()); Plate p = platesById.get(well.getPlateId()); if (p == null) { // TODO: migrate Plate to be a subclass of BaseDBModel. p = Plate.getPlateById(db, well.getPlateId()); platesById.put(p.getId(), p); } String chem = well.getChemical(); List<ScanFile> scanFiles = ScanFile.getScanFileByPlateIDRowAndColumn(db, p.getId(), well.getPlateRow(), well.getPlateColumn()); System.out.format(" %-15s%-6s%-15s%-15s%-15d\n", p.getBarcode(), well.getCoordinatesString(), well.getMsid(), chem == null || chem.isEmpty() ? "--" : chem, scanFiles.size()); System.out.flush(); } List<Integer> plateIds = Arrays.asList(platesById.keySet().toArray(new Integer[platesById.size()])); Collections.sort(plateIds); System.out.print("\nAppears in plates:\n"); for (Integer id : plateIds) { Plate p = platesById.get(id); System.out.format(" %s: %s\n", p.getBarcode(), p.getName()); } List<String> msids = Arrays.asList(uniqueMSIDs.toArray(new String[uniqueMSIDs.size()])); Collections.sort(msids); System.out.format("\nMSIDS: %s\n", StringUtils.join(msids, ", ")); Set<String> availableNegativeControls = new HashSet<>(); for (Map.Entry<Integer, Plate> entry : platesById.entrySet()) { List<LCMSWell> wells = LCMSWell.getInstance().getByPlateId(db, entry.getKey()); for (LCMSWell well : wells) { if (!construct.equals(well.getComposition())) { availableNegativeControls.add(well.getComposition()); } } } // Print available standards for each step w/ plate barcodes and coordinates. System.out.format("\nAvailable Standards:\n"); Map<Integer, Plate> plateCache = new HashMap<>(); for (ChemicalAssociatedWithPathway chem : pathwayChems) { List<StandardWell> matchingWells = StandardWell.getInstance().getStandardWellsByChemical(db, chem.getChemical()); for (StandardWell well : matchingWells) { if (!plateCache.containsKey(well.getPlateId())) { Plate p = Plate.getPlateById(db, well.getPlateId()); plateCache.put(p.getId(), p); } } Map<Integer, List<StandardWell>> standardWellsByPlateId = new HashMap<>(); for (StandardWell well : matchingWells) { List<StandardWell> plateWells = standardWellsByPlateId.get(well.getPlateId()); if (plateWells == null) { plateWells = new ArrayList<>(); standardWellsByPlateId.put(well.getPlateId(), plateWells); } plateWells.add(well); } List<Pair<String, Integer>> plateBarcodes = new ArrayList<>(plateCache.size()); for (Plate p : plateCache.values()) { if (p.getBarcode() == null) { plateBarcodes.add(Pair.of("(no barcode)", p.getId())); } else { plateBarcodes.add(Pair.of(p.getBarcode(), p.getId())); } } Collections.sort(plateBarcodes); System.out.format(" %s:\n", chem.getChemical()); for (Pair<String, Integer> barcodePair : plateBarcodes) { // TODO: hoist this whole sorting/translation step into a utility class. List<StandardWell> wells = standardWellsByPlateId.get(barcodePair.getRight()); if (wells == null) { // Don't print plates that don't apply to this chemical, which can happen because we're caching the plates. continue; } Collections.sort(wells, new Comparator<StandardWell>() { @Override public int compare(StandardWell o1, StandardWell o2) { int c = o1.getPlateRow().compareTo(o2.getPlateRow()); if (c != 0) return c; return o1.getPlateColumn().compareTo(o2.getPlateColumn()); } }); List<String> descriptions = new ArrayList<>(wells.size()); for (StandardWell well : wells) { descriptions.add(String.format("%s in %s%s", well.getCoordinatesString(), well.getMedia(), well.getConcentration() == null ? "" : String.format(" c. %f", well.getConcentration()))); } System.out.format(" %s: %s\n", barcodePair.getLeft(), StringUtils.join(descriptions, ", ")); } } List<String> negativeControlStrains = Arrays .asList(availableNegativeControls.toArray(new String[availableNegativeControls.size()])); Collections.sort(negativeControlStrains); System.out.format("\nAvailable negative controls: %s\n", StringUtils.join(negativeControlStrains, ",")); System.out.print("\n----------\n"); System.out.print("\n\n"); } }