List of usage examples for com.google.common.collect Multimap get
Collection<V> get(@Nullable K key);
From source file:com.ironiacorp.network.tool.nf.NetworkFreud.java
public static void main(String[] args) { ArpDatabase database = new ArpDatabase(); ArpwatchParser parser = new ArpwatchParser(); List<Tuple<MacAddress, InetAddress>> list = parser.parse(); Iterator<Tuple<MacAddress, InetAddress>> i = list.iterator(); while (i.hasNext()) { Tuple<MacAddress, InetAddress> tuple = i.next(); database.add(tuple.getT(), tuple.getU()); }/*from w w w.j a v a2 s . c o m*/ Multimap<InetAddress, MacAddress> bogusIP = database.findBogusIP(); Iterator<InetAddress> ips = bogusIP.keySet().iterator(); while (ips.hasNext()) { InetAddress ip = ips.next(); System.out.println(ip.toString()); Collection<MacAddress> macs = bogusIP.get(ip); Iterator<MacAddress> j = macs.iterator(); while (j.hasNext()) { MacAddress mac = j.next(); System.out.println("\t" + mac.toString()); } } }
From source file:org.apache.cassandra.tools.SSTableExpiredBlockers.java
public static void main(String[] args) throws IOException { PrintStream out = System.out; if (args.length < 2) { out.println("Usage: sstableexpiredblockers <keyspace> <table>"); System.exit(1);/*w w w . j av a2 s . com*/ } Util.initDatabaseDescriptor(); String keyspace = args[args.length - 2]; String columnfamily = args[args.length - 1]; Schema.instance.loadFromDisk(false); CFMetaData metadata = Schema.instance.getCFMetaData(keyspace, columnfamily); if (metadata == null) throw new IllegalArgumentException( String.format("Unknown keyspace/table %s.%s", keyspace, columnfamily)); Keyspace ks = Keyspace.openWithoutSSTables(keyspace); ColumnFamilyStore cfs = ks.getColumnFamilyStore(columnfamily); Directories.SSTableLister lister = cfs.directories.sstableLister().skipTemporary(true); Set<SSTableReader> sstables = new HashSet<>(); for (Map.Entry<Descriptor, Set<Component>> sstable : lister.list().entrySet()) { if (sstable.getKey() != null) { try { SSTableReader reader = SSTableReader.open(sstable.getKey()); sstables.add(reader); } catch (Throwable t) { out.println("Couldn't open sstable: " + sstable.getKey().filenameFor(Component.DATA) + " (" + t.getMessage() + ")"); } } } if (sstables.isEmpty()) { out.println("No sstables for " + keyspace + "." + columnfamily); System.exit(1); } int gcBefore = (int) (System.currentTimeMillis() / 1000) - metadata.getGcGraceSeconds(); Multimap<SSTableReader, SSTableReader> blockers = checkForExpiredSSTableBlockers(sstables, gcBefore); for (SSTableReader blocker : blockers.keySet()) { out.println(String.format("%s blocks %d expired sstables from getting dropped: %s%n", formatForExpiryTracing(Collections.singleton(blocker)), blockers.get(blocker).size(), formatForExpiryTracing(blockers.get(blocker)))); } System.exit(0); }
From source file:uk.ac.ebi.caf.component.factory.PreferencePanelFactory.java
public static void main(String[] args) { JFrame frame = new JFrame(); Multimap<String, Preference> map = ComponentPreferences.getInstance().getCategoryMap(); ComponentPreferences.getInstance().list(System.out); System.out.println(map);//from w ww. j a v a 2 s. c o m frame.add(PreferencePanelFactory.getPreferencePanel(map.get("Rendering"))); frame.setVisible(true); frame.pack(); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); }
From source file:com.github.rinde.vanlon15prima.PerformExperiment.java
public static void main(String[] args) { final long time = System.currentTimeMillis(); final Experiment.Builder experimentBuilder = Experiment.build(SUM).computeLocal().withRandomSeed(123) .withThreads(Runtime.getRuntime().availableProcessors()).repeat(1) .addScenarios(FileProvider.builder().add(Paths.get(DATASET)).filter("glob:**[09].scen")) .addResultListener(new CommandLineProgress(System.out)) .usePostProcessor(PostProcessors.statisticsPostProcessor()) // central: cheapest insertion configuration .addConfiguration(/* w ww .j a va2 s . c o m*/ Central.solverConfiguration(CheapestInsertionHeuristic.supplier(SUM), "CheapInsert")) // central: random .addConfiguration(Central.solverConfiguration(RandomSolver.supplier())) // mas: auction cheapest insertion with 2-opt per vehicle .addConfiguration(MASConfiguration.pdptwBuilder().setName("Auction-R-opt2cih-B-cih") .addEventHandler(AddVehicleEvent.class, new VehicleHandler( SolverRoutePlanner.supplier( Opt2.breadthFirstSupplier(CheapestInsertionHeuristic.supplier(SUM), SUM)), SolverBidder.supplier(SUM, CheapestInsertionHeuristic.supplier(SUM)))) .addModel(SolverModel.builder()).addModel(AuctionCommModel.builder()).build()); final Optional<ExperimentResults> results = experimentBuilder.perform(System.out, args); final long duration = System.currentTimeMillis() - time; if (!results.isPresent()) { return; } System.out.println("Done, computed " + results.get().getResults().size() + " simulations in " + duration / 1000d + "s"); final Multimap<MASConfiguration, SimulationResult> groupedResults = LinkedHashMultimap.create(); for (final SimulationResult sr : results.get().sortedResults()) { groupedResults.put(sr.getSimArgs().getMasConfig(), sr); } for (final MASConfiguration config : groupedResults.keySet()) { final Collection<SimulationResult> group = groupedResults.get(config); final File configResult = new File(RESULTS + config.getName() + ".csv"); try { Files.createParentDirs(configResult); } catch (final IOException e1) { throw new IllegalStateException(e1); } // deletes the file in case it already exists configResult.delete(); try { Files.append( "dynamism,urgency,scale,cost,travel_time,tardiness,over_time,is_valid,scenario_id,random_seed,comp_time,num_vehicles,num_orders\n", configResult, Charsets.UTF_8); } catch (final IOException e1) { throw new IllegalStateException(e1); } for (final SimulationResult sr : group) { final String pc = sr.getSimArgs().getScenario().getProblemClass().getId(); final String id = sr.getSimArgs().getScenario().getProblemInstanceId(); final int numVehicles = FluentIterable.from(sr.getSimArgs().getScenario().getEvents()) .filter(AddVehicleEvent.class).size(); try { final String scenarioName = Joiner.on("-").join(pc, id); final List<String> propsStrings = Files .readLines(new File(DATASET + scenarioName + ".properties"), Charsets.UTF_8); final Map<String, String> properties = Splitter.on("\n").withKeyValueSeparator(" = ") .split(Joiner.on("\n").join(propsStrings)); final double dynamism = Double.parseDouble(properties.get("dynamism_bin")); final long urgencyMean = Long.parseLong(properties.get("urgency")); final double scale = Double.parseDouble(properties.get("scale")); final StatisticsDTO stats = (StatisticsDTO) sr.getResultObject(); final double cost = SUM.computeCost(stats); final double travelTime = SUM.travelTime(stats); final double tardiness = SUM.tardiness(stats); final double overTime = SUM.overTime(stats); final boolean isValidResult = SUM.isValidResult(stats); final long computationTime = stats.computationTime; final long numOrders = Long.parseLong(properties.get("AddParcelEvent")); final String line = Joiner.on(",") .appendTo(new StringBuilder(), asList(dynamism, urgencyMean, scale, cost, travelTime, tardiness, overTime, isValidResult, scenarioName, sr.getSimArgs().getRandomSeed(), computationTime, numVehicles, numOrders)) .append(System.lineSeparator()).toString(); if (!isValidResult) { System.err.println("WARNING: FOUND AN INVALID RESULT: "); System.err.println(line); } Files.append(line, configResult, Charsets.UTF_8); } catch (final IOException e) { throw new IllegalStateException(e); } } } }
From source file:PCC.java
/** * @param args the command line arguments * @throws java.io.IOException//w w w .j av a 2 s .com */ public static void main(String[] args) throws IOException { // TODO code application logic here PearsonsCorrelation corel = new PearsonsCorrelation(); PCC method = new PCC(); ArrayList<String> name = new ArrayList<>(); Multimap<String, String> genes = ArrayListMultimap.create(); BufferedWriter bw = new BufferedWriter(new FileWriter(args[1])); BufferedReader br = new BufferedReader(new FileReader(args[0])); String str; while ((str = br.readLine()) != null) { String[] a = str.split("\t"); name.add(a[0]); for (int i = 1; i < a.length; i++) { genes.put(a[0], a[i]); } } for (String key : genes.keySet()) { double[] first = new double[genes.get(key).size()]; int element1 = 0; for (String value : genes.get(key)) { double d = Double.parseDouble(value); first[element1] = d; element1++; } for (String key1 : genes.keySet()) { if (!key.equals(key1)) { double[] second = new double[genes.get(key1).size()]; int element2 = 0; for (String value : genes.get(key1)) { double d = Double.parseDouble(value); second[element2] = d; element2++; } double corrlation = corel.correlation(first, second); if (corrlation > 0.5) { bw.write(key + "\t" + key1 + "\t" + corrlation + "\t" + method.pvalue(corrlation, second.length) + "\n"); } } } } br.close(); bw.close(); }
From source file:com.github.rinde.dynurg.Experimentation.java
public static void main(String[] args) { System.out.println(System.getProperty("jppf.config")); final long time = System.currentTimeMillis(); final Experiment.Builder experimentBuilder = Experiment.build(SUM).computeDistributed().withRandomSeed(123) .repeat(10).numBatches(10)//from w w w. j a v a2 s. co m .addScenarios( FileProvider.builder().add(Paths.get(DATASET)).filter("glob:**[01].[0-9]0#[0-5].scen")) .addResultListener(new CommandLineProgress(System.out)) .addConfiguration( Central.solverConfiguration(CheapestInsertionHeuristic.supplier(SUM), "-CheapInsert")) .addConfiguration(Central.solverConfiguration(CheapestInsertionHeuristic.supplier(TARDINESS), "-CheapInsert-Tard")) .addConfiguration(Central.solverConfiguration(CheapestInsertionHeuristic.supplier(DISTANCE), "-CheapInsert-Dist")) .addConfiguration(Central.solverConfiguration( Opt2.breadthFirstSupplier(CheapestInsertionHeuristic.supplier(SUM), SUM), "-bfsOpt2-CheapInsert")) .addConfiguration(Central.solverConfiguration( Opt2.breadthFirstSupplier(CheapestInsertionHeuristic.supplier(TARDINESS), TARDINESS), "-bfsOpt2-CheapInsert-Tard")) .addConfiguration(Central.solverConfiguration( Opt2.breadthFirstSupplier(CheapestInsertionHeuristic.supplier(DISTANCE), DISTANCE), "-bfsOpt2-CheapInsert-Dist")) .addConfiguration(Central.solverConfiguration( Opt2.depthFirstSupplier(CheapestInsertionHeuristic.supplier(SUM), SUM), "-dfsOpt2-CheapInsert")) .addConfiguration(Central.solverConfiguration( Opt2.depthFirstSupplier(CheapestInsertionHeuristic.supplier(TARDINESS), TARDINESS), "-dfsOpt2-CheapInsert-Tard")) .addConfiguration(Central.solverConfiguration( Opt2.depthFirstSupplier(CheapestInsertionHeuristic.supplier(DISTANCE), DISTANCE), "-dfsOpt2-CheapInsert-Dist")); final Menu m = ExperimentCli.createMenuBuilder(experimentBuilder) .add(Option.builder("nv", ArgumentParser.INTEGER).longName("number-of-vehicles") .description("Changes the number of vehicles in all scenarios.").build(), experimentBuilder, new ArgHandler<Experiment.Builder, Integer>() { @Override public void execute(Experiment.Builder subject, Optional<Integer> argument) { subject.setScenarioReader(new NumVehiclesScenarioParser(argument.get())); } }) .build(); final Optional<String> error = m.safeExecute(args); if (error.isPresent()) { System.err.println(error.get()); return; } final ExperimentResults results = experimentBuilder.perform(); final long duration = System.currentTimeMillis() - time; System.out .println("Done, computed " + results.results.size() + " simulations in " + duration / 1000d + "s"); final Multimap<MASConfiguration, SimulationResult> groupedResults = LinkedHashMultimap.create(); for (final SimulationResult sr : results.sortedResults()) { groupedResults.put(sr.masConfiguration, sr); } for (final MASConfiguration config : groupedResults.keySet()) { final Collection<SimulationResult> group = groupedResults.get(config); final File configResult = new File(RESULTS + config.toString() + ".csv"); try { Files.createParentDirs(configResult); } catch (final IOException e1) { throw new IllegalStateException(e1); } // deletes the file in case it already exists configResult.delete(); try { Files.append( "dynamism,urgency_mean,urgency_sd,cost,travel_time,tardiness,over_time,is_valid,scenario_id,random_seed,comp_time,num_vehicles\n", configResult, Charsets.UTF_8); } catch (final IOException e1) { throw new IllegalStateException(e1); } for (final SimulationResult sr : group) { final String pc = sr.scenario.getProblemClass().getId(); final String id = sr.scenario.getProblemInstanceId(); final int numVehicles = FluentIterable.from(sr.scenario.asList()).filter(AddVehicleEvent.class) .size(); try { final List<String> propsStrings = Files .readLines(new File("files/dataset/" + pc + id + ".properties"), Charsets.UTF_8); final Map<String, String> properties = Splitter.on("\n").withKeyValueSeparator(" = ") .split(Joiner.on("\n").join(propsStrings)); final double dynamism = Double.parseDouble(properties.get("dynamism")); final double urgencyMean = Double.parseDouble(properties.get("urgency_mean")); final double urgencySd = Double.parseDouble(properties.get("urgency_sd")); final double cost = SUM.computeCost(sr.stats); final double travelTime = SUM.travelTime(sr.stats); final double tardiness = SUM.tardiness(sr.stats); final double overTime = SUM.overTime(sr.stats); final boolean isValidResult = SUM.isValidResult(sr.stats); final long computationTime = sr.stats.computationTime; final String line = Joiner.on(",") .appendTo(new StringBuilder(), asList(dynamism, urgencyMean, urgencySd, cost, travelTime, tardiness, overTime, isValidResult, pc + id, sr.seed, computationTime, numVehicles)) .append(System.lineSeparator()).toString(); if (!isValidResult) { System.err.println("WARNING: FOUND AN INVALID RESULT: "); System.err.println(line); } Files.append(line, configResult, Charsets.UTF_8); } catch (final IOException e) { throw new IllegalStateException(e); } } } }
From source file:org.apache.ctakes.temporal.data.analysis.PrintInconsistentAnnotations.java
public static void main(String[] args) throws Exception { Options options = CliFactory.parseArguments(Options.class, args); int windowSize = 50; List<Integer> patientSets = options.getPatients().getList(); List<Integer> trainItems = THYMEData.getPatientSets(patientSets, THYMEData.TRAIN_REMAINDERS); List<File> files = THYMEData.getFilesFor(trainItems, options.getRawTextDirectory()); CollectionReader reader = UriCollectionReader.getCollectionReaderFromFiles(files); AggregateBuilder aggregateBuilder = new AggregateBuilder(); aggregateBuilder.add(UriToDocumentTextAnnotator.getDescription()); aggregateBuilder.add(AnalysisEngineFactory.createEngineDescription(XMIReader.class, XMIReader.PARAM_XMI_DIRECTORY, options.getXMIDirectory())); int totalDocTimeRels = 0; int totalInconsistentDocTimeRels = 0; for (Iterator<JCas> casIter = new JCasIterator(reader, aggregateBuilder.createAggregate()); casIter .hasNext();) {// ww w . j a v a2s. c o m JCas jCas = casIter.next(); String text = jCas.getDocumentText(); JCas goldView = jCas.getView("GoldView"); // group events by their narrative container Multimap<Annotation, EventMention> containers = HashMultimap.create(); for (TemporalTextRelation relation : JCasUtil.select(goldView, TemporalTextRelation.class)) { if (relation.getCategory().equals("CONTAINS")) { Annotation arg1 = relation.getArg1().getArgument(); Annotation arg2 = relation.getArg2().getArgument(); if (arg2 instanceof EventMention) { EventMention event = (EventMention) arg2; containers.put(arg1, event); } } } // check each container for inconsistent DocTimeRels for (Annotation container : containers.keySet()) { Set<String> docTimeRels = Sets.newHashSet(); for (EventMention event : containers.get(container)) { docTimeRels.add(event.getEvent().getProperties().getDocTimeRel()); } totalDocTimeRels += docTimeRels.size(); boolean inconsistentDocTimeRels; if (container instanceof EventMention) { EventMention mention = ((EventMention) container); String containerDocTimeRel = mention.getEvent().getProperties().getDocTimeRel(); inconsistentDocTimeRels = false; for (String docTimeRel : docTimeRels) { if (docTimeRel.equals(containerDocTimeRel)) { continue; } if (containerDocTimeRel.equals("BEFORE/OVERLAP") && (docTimeRel.equals("BEFORE") || docTimeRel.equals("OVERLAP"))) { continue; } inconsistentDocTimeRels = true; break; } } else { if (docTimeRels.size() == 1) { inconsistentDocTimeRels = false; } else if (docTimeRels.contains("BEFORE/OVERLAP")) { inconsistentDocTimeRels = docTimeRels.size() == 1 && (docTimeRels.contains("BEFORE") || docTimeRels.contains("OVERLAP")); } else { inconsistentDocTimeRels = true; } } // if inconsistent: print events, DocTimeRels and surrounding context if (inconsistentDocTimeRels) { totalInconsistentDocTimeRels += docTimeRels.size(); List<Integer> offsets = Lists.newArrayList(); offsets.add(container.getBegin()); offsets.add(container.getEnd()); for (EventMention event : containers.get(container)) { offsets.add(event.getBegin()); offsets.add(event.getEnd()); } Collections.sort(offsets); int begin = Math.max(offsets.get(0) - windowSize, 0); int end = Math.min(offsets.get(offsets.size() - 1) + windowSize, text.length()); System.err.printf("Inconsistent DocTimeRels in %s, ...%s...\n", new File(ViewUriUtil.getURI(jCas)).getName(), text.substring(begin, end).replaceAll("([\r\n])[\r\n]+", "$1")); if (container instanceof EventMention) { System.err.printf("Container: \"%s\" (docTimeRel=%s)\n", container.getCoveredText(), ((EventMention) container).getEvent().getProperties().getDocTimeRel()); } else { System.err.printf("Container: \"%s\"\n", container.getCoveredText()); } Ordering<EventMention> byBegin = Ordering.natural() .onResultOf(new Function<EventMention, Integer>() { @Override public Integer apply(@Nullable EventMention event) { return event.getBegin(); } }); for (EventMention event : byBegin.sortedCopy(containers.get(container))) { System.err.printf("* \"%s\" (docTimeRel=%s)\n", event.getCoveredText(), event.getEvent().getProperties().getDocTimeRel()); } System.err.println(); } } } System.err.printf("Inconsistent DocTimeRels: %.1f%% (%d/%d)\n", 100.0 * totalInconsistentDocTimeRels / totalDocTimeRels, totalInconsistentDocTimeRels, totalDocTimeRels); }
From source file:it.units.malelab.ege.MappingPropertiesExperimenter.java
public static void main(String[] args) throws IOException, InterruptedException, ExecutionException { final int n = 10000; final int nDist = 10000; //prepare problems and methods List<String> problems = Lists.newArrayList("bool-parity5", "bool-mopm3", "sr-keijzer6", "sr-nguyen7", "sr-pagie1", "sr-vladislavleva4", "other-klandscapes3", "other-klandscapes7", "other-text"); List<String> mappers = new ArrayList<>(); for (int gs : new int[] { 64, 128, 256, 512, 1024 }) { mappers.add("ge-" + gs + "-2"); mappers.add("ge-" + gs + "-4"); mappers.add("ge-" + gs + "-8"); mappers.add("ge-" + gs + "-12"); mappers.add("pige-" + gs + "-4"); mappers.add("pige-" + gs + "-8"); mappers.add("pige-" + gs + "-16"); mappers.add("pige-" + gs + "-24"); mappers.add("hge-" + gs + "-0"); mappers.add("whge-" + gs + "-2"); mappers.add("whge-" + gs + "-3"); mappers.add("whge-" + gs + "-5"); }/*from ww w.jav a 2 s . c o m*/ mappers.add("sge-0-5"); mappers.add("sge-0-6"); mappers.add("sge-0-7"); mappers.add("sge-0-8"); mappers.clear(); mappers.addAll(Lists.newArrayList("ge-1024-8", "pige-1024-16", "hge-1024-0", "whge-1024-3", "sge-0-6")); PrintStream filePrintStream = null; if (args.length > 0) { filePrintStream = new PrintStream(args[0]); } else { filePrintStream = System.out; } filePrintStream.printf("problem;mapper;genotypeSize;param;property;value%n"); //prepare distances Distance<Node<String>> phenotypeDistance = new CachedDistance<>(new LeavesEdit<String>()); Distance<Sequence> genotypeDistance = new CachedDistance<>(new Hamming()); //iterate for (String problemName : problems) { for (String mapperName : mappers) { System.out.printf("%20.20s, %20.20s", problemName, mapperName); //build problem Problem<String, NumericFitness> problem = null; if (problemName.equals("bool-parity5")) { problem = new Parity(5); } else if (problemName.equals("bool-mopm3")) { problem = new MultipleOutputParallelMultiplier(3); } else if (problemName.equals("sr-keijzer6")) { problem = new HarmonicCurve(); } else if (problemName.equals("sr-nguyen7")) { problem = new Nguyen7(1); } else if (problemName.equals("sr-pagie1")) { problem = new Pagie1(); } else if (problemName.equals("sr-vladislavleva4")) { problem = new Vladislavleva4(1); } else if (problemName.equals("other-klandscapes3")) { problem = new KLandscapes(3); } else if (problemName.equals("other-klandscapes7")) { problem = new KLandscapes(7); } else if (problemName.equals("other-text")) { problem = new Text(); } //build configuration and evolver Mapper mapper = null; int genotypeSize = Integer.parseInt(mapperName.split("-")[1]); int mapperMainParam = Integer.parseInt(mapperName.split("-")[2]); if (mapperName.split("-")[0].equals("ge")) { mapper = new StandardGEMapper<>(mapperMainParam, 1, problem.getGrammar()); } else if (mapperName.split("-")[0].equals("pige")) { mapper = new PiGEMapper<>(mapperMainParam, 1, problem.getGrammar()); } else if (mapperName.split("-")[0].equals("sge")) { mapper = new SGEMapper<>(mapperMainParam, problem.getGrammar()); } else if (mapperName.split("-")[0].equals("hge")) { mapper = new HierarchicalMapper<>(problem.getGrammar()); } else if (mapperName.split("-")[0].equals("whge")) { mapper = new WeightedHierarchicalMapper<>(mapperMainParam, false, true, problem.getGrammar()); } //prepare things Random random = new Random(1); Set<Sequence> genotypes = new LinkedHashSet<>(n); //build genotypes if (mapperName.split("-")[0].equals("sge")) { SGEGenotypeFactory<String> factory = new SGEGenotypeFactory<>((SGEMapper) mapper); while (genotypes.size() < n) { genotypes.add(factory.build(random)); } genotypeSize = factory.getBitSize(); } else { BitsGenotypeFactory factory = new BitsGenotypeFactory(genotypeSize); while (genotypes.size() < n) { genotypes.add(factory.build(random)); } } //build and fill map Multimap<Node<String>, Sequence> multimap = HashMultimap.create(); int progress = 0; for (Sequence genotype : genotypes) { Node<String> phenotype; try { if (mapperName.split("-")[0].equals("sge")) { phenotype = mapper.map((SGEGenotype<String>) genotype, new HashMap<>()); } else { phenotype = mapper.map((BitsGenotype) genotype, new HashMap<>()); } } catch (MappingException e) { phenotype = Node.EMPTY_TREE; } multimap.put(phenotype, genotype); progress = progress + 1; if (progress % Math.round(n / 10) == 0) { System.out.print("."); } } System.out.println(); //compute distances List<Pair<Double, Double>> allDistances = new ArrayList<>(); List<Pair<Double, Double>> allValidDistances = new ArrayList<>(); Multimap<Node<String>, Double> genotypeDistances = ArrayListMultimap.create(); for (Node<String> phenotype : multimap.keySet()) { for (Sequence genotype1 : multimap.get(phenotype)) { for (Sequence genotype2 : multimap.get(phenotype)) { double gDistance = genotypeDistance.d(genotype1, genotype2); genotypeDistances.put(phenotype, gDistance); if (genotypeDistances.get(phenotype).size() > nDist) { break; } } if (genotypeDistances.get(phenotype).size() > nDist) { break; } } } List<Map.Entry<Node<String>, Sequence>> entries = new ArrayList<>(multimap.entries()); Collections.shuffle(entries, random); for (Map.Entry<Node<String>, Sequence> entry1 : entries) { for (Map.Entry<Node<String>, Sequence> entry2 : entries) { double gDistance = genotypeDistance.d(entry1.getValue(), entry2.getValue()); double pDistance = phenotypeDistance.d(entry1.getKey(), entry2.getKey()); allDistances.add(new Pair<>(gDistance, pDistance)); if (!Node.EMPTY_TREE.equals(entry1.getKey()) && !Node.EMPTY_TREE.equals(entry2.getKey())) { allValidDistances.add(new Pair<>(gDistance, pDistance)); } if (allDistances.size() > nDist) { break; } } if (allDistances.size() > nDist) { break; } } //compute properties double invalidity = (double) multimap.get(Node.EMPTY_TREE).size() / (double) genotypes.size(); double redundancy = 1 - (double) multimap.keySet().size() / (double) genotypes.size(); double validRedundancy = redundancy; if (multimap.keySet().contains(Node.EMPTY_TREE)) { validRedundancy = 1 - ((double) multimap.keySet().size() - 1d) / (double) (genotypes.size() - multimap.get(Node.EMPTY_TREE).size()); } double locality = Utils.pearsonCorrelation(allDistances); double validLocality = Utils.pearsonCorrelation(allValidDistances); double[] sizes = new double[multimap.keySet().size()]; double[] meanGenotypeDistances = new double[multimap.keySet().size()]; int invalidIndex = -1; int c = 0; for (Node<String> phenotype : multimap.keySet()) { if (Node.EMPTY_TREE.equals(phenotype)) { invalidIndex = c; } sizes[c] = multimap.get(phenotype).size(); double[] distances = new double[genotypeDistances.get(phenotype).size()]; int k = 0; for (Double distance : genotypeDistances.get(phenotype)) { distances[k] = distance; k = k + 1; } meanGenotypeDistances[c] = StatUtils.mean(distances); c = c + 1; } double nonUniformity = Math.sqrt(StatUtils.variance(sizes)) / StatUtils.mean(sizes); double nonSynonymousity = StatUtils.mean(meanGenotypeDistances) / StatUtils.mean(firsts(allDistances)); double validNonUniformity = nonUniformity; double validNonSynonymousity = nonSynonymousity; if (invalidIndex != -1) { double[] validSizes = new double[multimap.keySet().size() - 1]; double[] validMeanGenotypeDistances = new double[multimap.keySet().size() - 1]; if (invalidIndex > 0) { System.arraycopy(sizes, 0, validSizes, 0, invalidIndex); System.arraycopy(meanGenotypeDistances, 0, validMeanGenotypeDistances, 0, invalidIndex); } System.arraycopy(sizes, invalidIndex + 1, validSizes, invalidIndex, sizes.length - invalidIndex - 1); System.arraycopy(meanGenotypeDistances, invalidIndex + 1, validMeanGenotypeDistances, invalidIndex, meanGenotypeDistances.length - invalidIndex - 1); validNonUniformity = Math.sqrt(StatUtils.variance(validSizes)) / StatUtils.mean(validSizes); validNonSynonymousity = StatUtils.mean(validMeanGenotypeDistances) / StatUtils.mean(firsts(allValidDistances)); } //compute locality filePrintStream.printf("%s;%s;%d;%d;invalidity;%f %n", problemName, mapperName.split("-")[0], genotypeSize, mapperMainParam, invalidity); filePrintStream.printf("%s;%s;%d;%d;redundancy;%f %n", problemName, mapperName.split("-")[0], genotypeSize, mapperMainParam, redundancy); filePrintStream.printf("%s;%s;%d;%d;validRedundancy;%f %n", problemName, mapperName.split("-")[0], genotypeSize, mapperMainParam, validRedundancy); filePrintStream.printf("%s;%s;%d;%d;locality;%f %n", problemName, mapperName.split("-")[0], genotypeSize, mapperMainParam, locality); filePrintStream.printf("%s;%s;%d;%d;validLLocality;%f %n", problemName, mapperName.split("-")[0], genotypeSize, mapperMainParam, validLocality); filePrintStream.printf("%s;%s;%d;%d;nonUniformity;%f %n", problemName, mapperName.split("-")[0], genotypeSize, mapperMainParam, nonUniformity); filePrintStream.printf("%s;%s;%d;%d;validNonUniformity;%f %n", problemName, mapperName.split("-")[0], genotypeSize, mapperMainParam, validNonUniformity); filePrintStream.printf("%s;%s;%d;%d;nonSynonymousity;%f %n", problemName, mapperName.split("-")[0], genotypeSize, mapperMainParam, nonSynonymousity); filePrintStream.printf("%s;%s;%d;%d;validNonSynonymousity;%f %n", problemName, mapperName.split("-")[0], genotypeSize, mapperMainParam, validNonSynonymousity); } } if (filePrintStream != null) { filePrintStream.close(); } }
From source file:org.commoncrawl.mapred.ec2.postprocess.deduper.DeduperUtils.java
/** * //w w w . j av a2 s. co m * @param args */ public static void main(String[] args) throws IOException { URLFPBloomFilter filter = new URLFPBloomFilter(JSONSetBuilder.NUM_ELEMENTS, JSONSetBuilder.NUM_HASH_FUNCTIONS, JSONSetBuilder.NUM_BITS); DescriptiveStatistics filterClearStats = new DescriptiveStatistics(); for (int i = 0; i < 1000; ++i) { long timeStart = System.nanoTime(); filter.clear(); long timeEnd = System.nanoTime(); filterClearStats.addValue(timeEnd - timeStart); } System.out.println("Mean Clear Time:" + filterClearStats.getMean()); System.out.println("size:" + BINOMIAL_COFF); for (int j = 0; j < BINOMIAL_COFF; ++j) { int value = patternArray[j]; System.out.print("value:" + value + " "); for (int i = 5; i >= 0; --i) { System.out.print(((value & (1 << i)) != 0) ? '1' : '0'); } System.out.print(" Key MSBLen:" + Integer.toString(patternKeyMSBits[j]) + "\n"); } validateGenerator(); long key1 = new BitBuilder().on(10).off(1).on(53).bits(); long key2 = new BitBuilder().on(10).off(4).on(50).bits(); long key3 = new BitBuilder().on(10).off(4).on(47).off(3).bits(); long key4 = new BitBuilder().off(10).on(4).off(47).on(3).bits(); long key5 = new BitBuilder().off(10).on(4).off(47).on(1).off(2).bits(); Assert.assertTrue(SimHash.hammingDistance(key1, key2) == 3); Assert.assertTrue(SimHash.hammingDistance(key1, key3) != 3); Assert.assertTrue(SimHash.hammingDistance(key2, key3) == 3); Assert.assertTrue(SimHash.hammingDistance(key1, key4) > 3); Assert.assertTrue(SimHash.hammingDistance(key2, key4) > 3); Assert.assertTrue(SimHash.hammingDistance(key3, key4) > 3); Assert.assertTrue(SimHash.hammingDistance(key4, key5) <= 3); ImmutableList<DeduperValue> values = new ImmutableList.Builder<DeduperValue>() .add(new DeduperValue(key1, 1000, 2000, IPAddressUtils.IPV4AddressStrToInteger("10.0.0.1"), 1000, new TextBytes("http://adomain.com/"))) .add(new DeduperValue(key2, 1001, 2001, IPAddressUtils.IPV4AddressStrToInteger("10.0.0.2"), 1000, new TextBytes("http://bdomain.com/"))) .add(new DeduperValue(key3, 1002, 2002, IPAddressUtils.IPV4AddressStrToInteger("10.0.0.3"), 1000, new TextBytes("http://cdomain.com/"))) .add(new DeduperValue(key4, 1003, 2003, IPAddressUtils.IPV4AddressStrToInteger("10.0.0.4"), 1000, new TextBytes("http://ddomain.com/"))) .add(new DeduperValue(key5, 1004, 2004, IPAddressUtils.IPV4AddressStrToInteger("10.0.0.5"), 1000, new TextBytes("http://edomain.com/"))) .build(); SimhashMatcher unionFinder = new SimhashMatcher(); final Multimap<String, Long> rootDomainToDupes = TreeMultimap.create(); // collect all json set representations ... final ArrayList<TextBytes> jsonSets = new ArrayList<TextBytes>(); unionFinder.emitMatches(3, values.iterator(), new OutputCollector<TextBytes, TextBytes>() { @Override public void collect(TextBytes key, TextBytes value) throws IOException { System.out.println("Root:" + key + " JSON: " + value.toString()); populateTestJSONSetData(rootDomainToDupes, key, value); // collect all json sets for later disjoint-set join jsonSets.add(value); } }, null); ImmutableList<Long> hashSuperSet1 = ImmutableList.of(2000L, 2001L, 2002L); ImmutableList<Long> hashSuperSet2 = ImmutableList.of(2003L, 2004L); Assert.assertTrue(rootDomainToDupes.get("adomain.com").containsAll(hashSuperSet1)); Assert.assertTrue(rootDomainToDupes.get("bdomain.com").containsAll(hashSuperSet1)); Assert.assertTrue(rootDomainToDupes.get("cdomain.com").containsAll(hashSuperSet1)); Assert.assertTrue(rootDomainToDupes.get("ddomain.com").containsAll(hashSuperSet2)); Assert.assertTrue(rootDomainToDupes.get("edomain.com").containsAll(hashSuperSet2)); ImmutableList<DeduperValue> secondSetValues = new ImmutableList.Builder<DeduperValue>() .add(new DeduperValue(key1, 1000, 2000, IPAddressUtils.IPV4AddressStrToInteger("10.0.0.2"), 1000, new TextBytes("http://adomain.com/"))) .add(new DeduperValue(key1, 1007, 2007, IPAddressUtils.IPV4AddressStrToInteger("10.0.0.2"), 1000, new TextBytes("http://z1domain.com/"))) .add(new DeduperValue(key2, 1008, 2008, IPAddressUtils.IPV4AddressStrToInteger("10.0.0.2"), 1000, new TextBytes("http://z2domain.com/"))) .add(new DeduperValue(key3, 1009, 2009, IPAddressUtils.IPV4AddressStrToInteger("10.0.0.2"), 1000, new TextBytes("http://z3domain.com/"))) .build(); unionFinder.emitMatches(3, secondSetValues.iterator(), new OutputCollector<TextBytes, TextBytes>() { @Override public void collect(TextBytes key, TextBytes value) throws IOException { System.out.println("Root:" + key + " JSON: " + value.toString()); // collect all json sets for later disjoint-set join jsonSets.add(value); } }, null); SetUnionFinder unionFinder2 = new SetUnionFinder(); // union all json sets ... unionFinder2.union(jsonSets.iterator()); // ok emit union of sets ... unionFinder2.emit(new TextBytes("test"), new OutputCollector<TextBytes, TextBytes>() { @Override public void collect(TextBytes key, TextBytes value) throws IOException { System.out.println("Root:" + key + " JSON: " + value.toString()); } }, null); }
From source file:com.cloudbees.api.Main.java
public static void main(String[] args) throws Exception { File beesCredentialsFile = new File(System.getProperty("user.home"), ".bees/bees.config"); Preconditions.checkArgument(beesCredentialsFile.exists(), "File %s not found", beesCredentialsFile); Properties beesCredentials = new Properties(); beesCredentials.load(new FileInputStream(beesCredentialsFile)); String apiUrl = "https://api.cloudbees.com/api"; String apiKey = beesCredentials.getProperty("bees.api.key"); String secret = beesCredentials.getProperty("bees.api.secret"); BeesClient client = new BeesClient(apiUrl, apiKey, secret, "xml", "1.0"); client.setVerbose(false);/* ww w . ja va 2 s .co m*/ URL databasesUrl = Thread.currentThread().getContextClassLoader().getResource("databases.txt"); Preconditions.checkNotNull(databasesUrl, "File 'databases.txt' NOT found in the classpath"); Collection<String> databaseNames; try { databaseNames = Sets.newTreeSet(Resources.readLines(databasesUrl, Charsets.ISO_8859_1)); } catch (Exception e) { throw Throwables.propagate(e); } databaseNames = Collections2.transform(databaseNames, new Function<String, String>() { @Nullable @Override public String apply(@Nullable String input) { // {host_db_create,<<"tco_q5rm">>,<<"TCO_q5rm">>, if (input == null) return null; if (input.startsWith("#")) return null; if (input.indexOf('"') == -1) { logger.warn("Skip invalid line {}", input); return null; } input = input.substring(input.indexOf('"') + 1); if (input.indexOf('"') == -1) { logger.warn("Skip invalid line {}", input); return null; } return input.substring(0, input.indexOf('"')); } }); databaseNames = Collections2.filter(databaseNames, new Predicate<String>() { @Override public boolean apply(@Nullable String s) { return !Strings.isNullOrEmpty(s); } }); Multimap<String, String> databasesByAccount = ArrayListMultimap.create(); Class.forName("com.mysql.jdbc.Driver"); for (String databaseName : databaseNames) { try { DatabaseInfo databaseInfo = client.databaseInfo(databaseName, true); databasesByAccount.put(databaseInfo.getOwner(), databaseInfo.getName()); logger.debug("Evaluate " + databaseInfo.getName()); if (true == false) { // Hibernate logger.info("Hibernate {}", databaseName); Map<String, String> params = new HashMap<String, String>(); params.put("database_id", databaseName); String url = client.getRequestURL("database.hibernate", params); String response = client.executeRequest(url); DatabaseInfoResponse apiResponse = (DatabaseInfoResponse) client.readResponse(response); logger.info("DB {} status: {}", apiResponse.getDatabaseInfo().getName(), apiResponse.getDatabaseInfo().getStatus()); } if (true == false) { // Hibernate logger.info("Activate {}", databaseName); Map<String, String> params = new HashMap<String, String>(); params.put("database_id", databaseName); String url = client.getRequestURL("database.activate", params); String response = client.executeRequest(url); DatabaseInfoResponse apiResponse = (DatabaseInfoResponse) client.readResponse(response); logger.info("DB {} status: {}", apiResponse.getDatabaseInfo().getName(), apiResponse.getDatabaseInfo().getStatus()); } String dbUrl = "jdbc:mysql://" + databaseInfo.getMaster() + "/" + databaseInfo.getName(); logger.info("Connect to {} user={}", dbUrl, databaseInfo.getUsername()); Connection cnn = DriverManager.getConnection(dbUrl, databaseInfo.getUsername(), databaseInfo.getPassword()); cnn.setAutoCommit(false); cnn.close(); } catch (Exception e) { logger.warn("Exception for {}", databaseName, e); } } System.out.println("OWNERS"); for (String account : databasesByAccount.keySet()) { System.out.println(account + ": " + Joiner.on(", ").join(databasesByAccount.get(account))); } }