Example usage for java.util Set add

List of usage examples for java.util Set add

Introduction

In this page you can find the example usage for java.util Set add.

Prototype

boolean add(E e);

Source Link

Document

Adds the specified element to this set if it is not already present (optional operation).

Usage

From source file:dkpro.similarity.algorithms.vsm.store.convert.ConvertLuceneToVectorIndex.java

public static void main(String[] args) throws Exception {
    File inputPath = new File(args[0]);
    File outputPath = new File(args[1]);

    deleteQuietly(outputPath);//from  www.  ja v  a  2s  .c om
    outputPath.mkdirs();

    boolean ignoreNumerics = true;
    boolean ignoreCardinal = true;
    boolean ignoreMonetary = true;
    int minTermLength = 3;
    int minDocFreq = 5;

    System.out.println("Quality criteria");
    System.out.println("Minimum term length            : " + minTermLength);
    System.out.println("Minimum document frequency     : " + minDocFreq);
    System.out.println("Ignore numeric tokens          : " + ignoreNumerics);
    System.out.println("Ignore cardinal numeric tokens : " + ignoreNumerics);
    System.out.println("Ignore money values            : " + ignoreMonetary);

    System.out.print("Fetching terms list... ");

    IndexReader reader = IndexReader.open(FSDirectory.open(inputPath));
    TermEnum termEnum = reader.terms();
    Set<String> terms = new HashSet<String>();
    int ignoredTerms = 0;
    while (termEnum.next()) {
        String term = termEnum.term().text();
        if (((minTermLength > 0) && (term.length() < minTermLength)) || (ignoreCardinal && isCardinal(term))
                || (ignoreMonetary && isMonetary(term)) || (ignoreNumerics && isNumericSpace(term))
                || ((minDocFreq > 0) && (termEnum.docFreq() < minDocFreq))) {
            ignoredTerms++;
            continue;
        }

        terms.add(term);
    }
    reader.close();

    System.out.println(terms.size() + " terms found. " + ignoredTerms + " terms ignored.");

    System.out.println("Opening source ESA index " + inputPath);
    VectorReader source = new LuceneVectorReader(inputPath);
    System.out.println("Opening destination ESA index " + inputPath);
    VectorIndexWriter esaWriter = new VectorIndexWriter(outputPath, source.getConceptCount());

    ProgressMeter p = new ProgressMeter(terms.size());
    for (String term : terms) {
        Vector vector = source.getVector(term);
        esaWriter.put(term, vector);

        p.next();
        System.out.println("[" + term + "] " + p);
    }

    esaWriter.close();
}

From source file:com.github.ansell.shp.SHPDump.java

public static void main(String... args) throws Exception {
    final OptionParser parser = new OptionParser();

    final OptionSpec<Void> help = parser.accepts("help").forHelp();
    final OptionSpec<File> input = parser.accepts("input").withRequiredArg().ofType(File.class).required()
            .describedAs("The input SHP file");
    final OptionSpec<File> output = parser.accepts("output").withRequiredArg().ofType(File.class).required()
            .describedAs("The output directory to use for debugging files");
    final OptionSpec<String> outputPrefix = parser.accepts("prefix").withRequiredArg().ofType(String.class)
            .defaultsTo("shp-debug").describedAs("The output prefix to use for debugging files");
    final OptionSpec<File> outputMappingTemplate = parser.accepts("output-mapping").withRequiredArg()
            .ofType(File.class).describedAs("The output mapping template file if it needs to be generated.");
    final OptionSpec<Integer> resolution = parser.accepts("resolution").withRequiredArg().ofType(Integer.class)
            .defaultsTo(2048).describedAs("The output image file resolution");
    final OptionSpec<String> format = parser.accepts("format").withRequiredArg().ofType(String.class)
            .defaultsTo("png").describedAs("The output image format");
    final OptionSpec<String> removeIfEmpty = parser.accepts("remove-if-empty").withRequiredArg()
            .ofType(String.class).describedAs(
                    "The name of an attribute to remove if its value is empty before outputting the resulting shapefile. Use multiple times to specify multiple fields to check");

    OptionSet options = null;//w w w  .jav a 2 s  . c o  m

    try {
        options = parser.parse(args);
    } catch (final OptionException e) {
        System.out.println(e.getMessage());
        parser.printHelpOn(System.out);
        throw e;
    }

    if (options.has(help)) {
        parser.printHelpOn(System.out);
        return;
    }

    final Path inputPath = input.value(options).toPath();
    if (!Files.exists(inputPath)) {
        throw new FileNotFoundException("Could not find input SHP file: " + inputPath.toString());
    }

    final Path outputPath = output.value(options).toPath();
    if (!Files.exists(outputPath)) {
        throw new FileNotFoundException("Output directory does not exist: " + outputPath.toString());
    }

    final Path outputMappingPath = options.has(outputMappingTemplate)
            ? outputMappingTemplate.value(options).toPath()
            : null;
    if (options.has(outputMappingTemplate) && Files.exists(outputMappingPath)) {
        throw new FileNotFoundException(
                "Output mapping template file already exists: " + outputMappingPath.toString());
    }

    final Set<String> filterFields = ConcurrentHashMap.newKeySet();
    if (options.has(removeIfEmpty)) {
        for (String nextFilterField : removeIfEmpty.values(options)) {
            System.out.println("Will filter field if empty value found: " + nextFilterField);
            filterFields.add(nextFilterField);
        }
    }

    if (!filterFields.isEmpty()) {
        System.out.println("Full set of filter fields: " + filterFields);
    }

    final String prefix = outputPrefix.value(options);

    FileDataStore store = FileDataStoreFinder.getDataStore(inputPath.toFile());

    if (store == null) {
        throw new RuntimeException("Could not read the given input as an ESRI Shapefile: "
                + inputPath.toAbsolutePath().toString());
    }

    for (String typeName : new LinkedHashSet<>(Arrays.asList(store.getTypeNames()))) {
        System.out.println("");
        System.out.println("Type: " + typeName);
        SimpleFeatureSource featureSource = store.getFeatureSource(typeName);
        SimpleFeatureType schema = featureSource.getSchema();

        Name outputSchemaName = new NameImpl(schema.getName().getNamespaceURI(),
                schema.getName().getLocalPart().replace(" ", "").replace("%20", ""));
        System.out.println("Replacing name on schema: " + schema.getName() + " with " + outputSchemaName);
        SimpleFeatureType outputSchema = SHPUtils.changeSchemaName(schema, outputSchemaName);

        List<String> attributeList = new ArrayList<>();
        for (AttributeDescriptor attribute : schema.getAttributeDescriptors()) {
            System.out.println("Attribute: " + attribute.getName().toString());
            attributeList.add(attribute.getName().toString());
        }
        CsvSchema csvSchema = CSVUtil.buildSchema(attributeList);

        SimpleFeatureCollection collection = featureSource.getFeatures();
        int featureCount = 0;
        Path nextCSVFile = outputPath.resolve(prefix + ".csv");
        Path nextSummaryCSVFile = outputPath
                .resolve(prefix + "-" + outputSchema.getTypeName() + "-Summary.csv");
        List<SimpleFeature> outputFeatureList = new CopyOnWriteArrayList<>();

        try (SimpleFeatureIterator iterator = collection.features();
                Writer bufferedWriter = Files.newBufferedWriter(nextCSVFile, StandardCharsets.UTF_8,
                        StandardOpenOption.CREATE_NEW);
                SequenceWriter csv = CSVUtil.newCSVWriter(bufferedWriter, csvSchema);) {
            List<String> nextLine = new ArrayList<>();
            while (iterator.hasNext()) {
                SimpleFeature feature = iterator.next();
                featureCount++;
                if (featureCount <= 2) {
                    System.out.println("");
                    System.out.println(feature.getIdentifier());
                } else if (featureCount % 100 == 0) {
                    System.out.print(".");
                }
                boolean filterThisFeature = false;
                for (AttributeDescriptor attribute : schema.getAttributeDescriptors()) {
                    String featureString = Optional.ofNullable(feature.getAttribute(attribute.getName()))
                            .orElse("").toString();
                    nextLine.add(featureString);
                    if (filterFields.contains(attribute.getName().toString())
                            && featureString.trim().isEmpty()) {
                        filterThisFeature = true;
                    }
                    if (featureString.length() > 100) {
                        featureString = featureString.substring(0, 100) + "...";
                    }
                    if (featureCount <= 2) {
                        System.out.print(attribute.getName() + "=");
                        System.out.println(featureString);
                    }
                }
                if (!filterThisFeature) {
                    outputFeatureList.add(SHPUtils.changeSchemaName(feature, outputSchema));
                    csv.write(nextLine);
                }
                nextLine.clear();
            }
        }
        try (Reader csvReader = Files.newBufferedReader(nextCSVFile, StandardCharsets.UTF_8);
                Writer summaryOutput = Files.newBufferedWriter(nextSummaryCSVFile, StandardCharsets.UTF_8,
                        StandardOpenOption.CREATE_NEW);
                final Writer mappingWriter = options.has(outputMappingTemplate)
                        ? Files.newBufferedWriter(outputMappingPath)
                        : NullWriter.NULL_WRITER) {
            CSVSummariser.runSummarise(csvReader, summaryOutput, mappingWriter,
                    CSVSummariser.DEFAULT_SAMPLE_COUNT, false);
        }
        if (featureCount > 100) {
            System.out.println("");
        }
        System.out.println("");
        System.out.println("Feature count: " + featureCount);

        SimpleFeatureCollection outputCollection = new ListFeatureCollection(outputSchema, outputFeatureList);
        Path outputShapefilePath = outputPath.resolve(prefix + "-" + outputSchema.getTypeName() + "-dump");
        if (!Files.exists(outputShapefilePath)) {
            Files.createDirectory(outputShapefilePath);
        }
        SHPUtils.writeShapefile(outputCollection, outputShapefilePath);

        // Create ZIP file from the contents to keep the subfiles together
        Path outputShapefileZipPath = outputPath
                .resolve(prefix + "-" + outputSchema.getTypeName() + "-dump.zip");
        try (final OutputStream out = Files.newOutputStream(outputShapefileZipPath,
                StandardOpenOption.CREATE_NEW);
                final ZipOutputStream zip = new ZipOutputStream(out, StandardCharsets.UTF_8);) {
            Files.list(outputShapefilePath).forEachOrdered(Unchecked.consumer(e -> {
                zip.putNextEntry(new ZipEntry(e.getFileName().toString()));
                Files.copy(e, zip);
                zip.closeEntry();
            }));
        }

        try (final OutputStream outputStream = Files.newOutputStream(
                outputPath.resolve(prefix + "." + format.value(options)), StandardOpenOption.CREATE_NEW);) {
            MapContent map = new MapContent();
            map.setTitle(prefix + "-" + outputSchema.getTypeName());
            Style style = SLD.createSimpleStyle(featureSource.getSchema());
            Layer layer = new FeatureLayer(new CollectionFeatureSource(outputCollection), style);
            map.addLayer(layer);
            SHPUtils.renderImage(map, outputStream, resolution.value(options), format.value(options));
        }
    }
}

From source file:com.act.lcms.db.io.ExportStandardIonResultsFromDB.java

public static void main(String[] args) throws Exception {
    Options opts = new Options();
    for (Option.Builder b : OPTION_BUILDERS) {
        opts.addOption(b.build());/*from w  ww. j  a va2s .com*/
    }

    CommandLine cl = null;
    try {
        CommandLineParser parser = new DefaultParser();
        cl = parser.parse(opts, args);
    } catch (ParseException e) {
        System.err.format("Argument parsing failed: %s\n", e.getMessage());
        HELP_FORMATTER.printHelp(ExportStandardIonResultsFromDB.class.getCanonicalName(), HELP_MESSAGE, opts,
                null, true);
        System.exit(1);
    }

    if (cl.hasOption("help")) {
        HELP_FORMATTER.printHelp(ExportStandardIonResultsFromDB.class.getCanonicalName(), HELP_MESSAGE, opts,
                null, true);
        return;
    }

    try (DB db = DB.openDBFromCLI(cl)) {
        List<String> chemicalNames = new ArrayList<>();
        if (cl.hasOption(OPTION_CONSTRUCT)) {
            // Extract the chemicals in the pathway and their product masses, then look up info on those chemicals
            List<Pair<ChemicalAssociatedWithPathway, Double>> productMasses = Utils
                    .extractMassesForChemicalsAssociatedWithConstruct(db, cl.getOptionValue(OPTION_CONSTRUCT));

            for (Pair<ChemicalAssociatedWithPathway, Double> pair : productMasses) {
                chemicalNames.add(pair.getLeft().getChemical());
            }
        }

        if (cl.hasOption(OPTION_CHEMICALS)) {
            chemicalNames.addAll(Arrays.asList(cl.getOptionValues(OPTION_CHEMICALS)));
        }

        if (chemicalNames.size() == 0) {
            System.err.format("No chemicals can be found from the input query.\n");
            System.exit(-1);
        }

        List<String> standardIonHeaderFields = new ArrayList<String>() {
            {
                add(STANDARD_ION_HEADER_FIELDS.CHEMICAL.name());
                add(STANDARD_ION_HEADER_FIELDS.BEST_ION_FROM_ALGO.name());
                add(STANDARD_ION_HEADER_FIELDS.MANUAL_PICK.name());
                add(STANDARD_ION_HEADER_FIELDS.AUTHOR.name());
                add(STANDARD_ION_HEADER_FIELDS.DIAGNOSTIC_PLOTS.name());
                add(STANDARD_ION_HEADER_FIELDS.NOTE.name());
            }
        };

        String outAnalysis;
        if (cl.hasOption(OPTION_OUTPUT_PREFIX)) {
            outAnalysis = cl.getOptionValue(OPTION_OUTPUT_PREFIX) + "." + TSV_FORMAT;
        } else {
            outAnalysis = String.join("-", chemicalNames) + "." + TSV_FORMAT;
        }

        File lcmsDir = new File(cl.getOptionValue(OPTION_DIRECTORY));
        if (!lcmsDir.isDirectory()) {
            System.err.format("File at %s is not a directory\n", lcmsDir.getAbsolutePath());
            HELP_FORMATTER.printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), HELP_MESSAGE, opts,
                    null, true);
            System.exit(1);
        }

        String plottingDirectory = cl.getOptionValue(OPTION_PLOTTING_DIR);

        TSVWriter<String, String> resultsWriter = new TSVWriter<>(standardIonHeaderFields);
        resultsWriter.open(new File(outAnalysis));

        // For each chemical, create a TSV row and a corresponding diagnostic plot
        for (String chemicalName : chemicalNames) {
            List<String> graphLabels = new ArrayList<>();
            List<Double> yMaxList = new ArrayList<>();

            String outData = plottingDirectory + "/" + chemicalName + ".data";
            String outImg = plottingDirectory + "/" + chemicalName + ".pdf";

            // For each diagnostic plot, open a new file stream.
            try (FileOutputStream fos = new FileOutputStream(outData)) {

                List<StandardIonResult> getResultByChemicalName = StandardIonResult.getByChemicalName(db,
                        chemicalName);

                if (getResultByChemicalName != null && getResultByChemicalName.size() > 0) {

                    // PART 1: Get the best metlin ion across all standard ion results for a given chemical
                    String bestGlobalMetlinIon = AnalysisHelper
                            .scoreAndReturnBestMetlinIonFromStandardIonResults(getResultByChemicalName,
                                    new HashMap<>(), true, true);

                    // PART 2: Plot all the graphs related to the chemical. The plots are structured as follows:
                    //
                    // Page 1: All graphs (water, MeOH, Yeast) for Global ion picked (best ion among ALL standard ion runs for
                    // the given chemical) by the algorithm
                    // Page 2: All graphs for M+H
                    // Page 3: All graphs for Local ions picked (best ion within a SINGLE standard ion run) + negative controls
                    // for Yeast.
                    //
                    // Each page is demarcated by a blank graph.

                    // Arrange results based on media
                    Map<String, List<StandardIonResult>> categories = StandardIonResult
                            .categorizeListOfStandardWellsByMedia(db, getResultByChemicalName);

                    // This set contains all the best metlin ions corresponding to all the standard ion runs.
                    Set<String> bestLocalIons = new HashSet<>();
                    bestLocalIons.add(bestGlobalMetlinIon);
                    bestLocalIons.add(DEFAULT_ION);

                    for (StandardIonResult result : getResultByChemicalName) {
                        bestLocalIons.add(result.getBestMetlinIon());
                    }

                    // We sort the best local ions are follows:
                    // 1) Global best ion spectra 2) M+H spectra 3) Local best ion spectra
                    List<String> bestLocalIonsArray = new ArrayList<>(bestLocalIons);
                    Collections.sort(bestLocalIonsArray, new Comparator<String>() {
                        @Override
                        public int compare(String o1, String o2) {
                            if (o1.equals(bestGlobalMetlinIon) && !o2.equals(bestGlobalMetlinIon)) {
                                return -1;
                            } else if (o1.equals(DEFAULT_ION) && !o2.equals(bestGlobalMetlinIon)) {
                                return -1;
                            } else {
                                return 1;
                            }
                        }
                    });

                    // This variable stores the index of the array at which all the remaining spectra are contained in one
                    // page. This happens right after the M+H ion spectra.
                    Integer combineAllSpectraIntoPageThreeFromIndex = 0;
                    for (int i = 0; i < bestLocalIonsArray.size(); i++) {
                        if (bestLocalIonsArray.get(i).equals(DEFAULT_ION)) {
                            combineAllSpectraIntoPageThreeFromIndex = i + 1;
                        }
                    }

                    for (int i = 0; i < bestLocalIonsArray.size(); i++) {

                        String ion = bestLocalIonsArray.get(i);
                        for (Map.Entry<String, List<StandardIonResult>> mediaToListOfIonResults : categories
                                .entrySet()) {

                            for (StandardIonResult result : mediaToListOfIonResults.getValue()) {

                                // For every standard ion result, we plot the best global metlin ion and M+H. These plots are in the
                                // pages 1 and 2. For all page 3 (aka miscellaneous spectra), we only plot the best local ion
                                // corresponding to it's spectra and not some other graph's spectra. In the below condition,
                                // we reach the page 3 case with not the same best ion as the spectra, in which case we just continue
                                // and not draw anything on the page.
                                if (i >= combineAllSpectraIntoPageThreeFromIndex
                                        && !(result.getBestMetlinIon().equals(ion))) {
                                    continue;
                                }

                                StandardWell positiveWell = StandardWell.getInstance().getById(db,
                                        result.getStandardWellId());
                                String positiveControlChemical = positiveWell.getChemical();

                                ScanData<StandardWell> encapsulatedDataForPositiveControl = AnalysisHelper
                                        .getScanDataForWell(db, lcmsDir, positiveWell, positiveControlChemical,
                                                positiveControlChemical);

                                Set<String> singletonSet = Collections.singleton(ion);
                                String additionalInfo = generateAdditionalLabelInformation(positiveWell, result,
                                        ion);

                                List<String> labels = AnalysisHelper
                                        .writeScanData(fos, lcmsDir, MAX_INTENSITY,
                                                encapsulatedDataForPositiveControl, false, false, singletonSet)
                                        .stream().map(label -> label + additionalInfo)
                                        .collect(Collectors.toList());

                                yMaxList.add(encapsulatedDataForPositiveControl.getMs1ScanResults()
                                        .getMaxIntensityForIon(ion));

                                List<String> negativeLabels = null;
                                // Only do the negative control in the miscellaneous page (page 3) and if the well is in yeast media.
                                if (mediaToListOfIonResults.getKey()
                                        .equals(StandardWell.MEDIA_TYPE.YEAST.name())
                                        && (i >= combineAllSpectraIntoPageThreeFromIndex
                                                && (result.getBestMetlinIon().equals(ion)))) {
                                    //TODO: Change the representative negative well to one that displays the highest noise in the future.
                                    // For now, we just use the first index among the negative wells.
                                    int representativeIndex = 0;
                                    StandardWell representativeNegativeControlWell = StandardWell.getInstance()
                                            .getById(db, result.getNegativeWellIds().get(representativeIndex));

                                    ScanData encapsulatedDataForNegativeControl = AnalysisHelper
                                            .getScanDataForWell(db, lcmsDir, representativeNegativeControlWell,
                                                    positiveWell.getChemical(),
                                                    representativeNegativeControlWell.getChemical());

                                    String negativePlateAdditionalInfo = generateAdditionalLabelInformation(
                                            representativeNegativeControlWell, null, null);

                                    negativeLabels = AnalysisHelper.writeScanData(fos, lcmsDir, MAX_INTENSITY,
                                            encapsulatedDataForNegativeControl, false, false, singletonSet)
                                            .stream().map(label -> label + negativePlateAdditionalInfo)
                                            .collect(Collectors.toList());

                                    yMaxList.add(encapsulatedDataForNegativeControl.getMs1ScanResults()
                                            .getMaxIntensityForIon(ion));
                                }

                                graphLabels.addAll(labels);

                                if (negativeLabels != null) {
                                    graphLabels.addAll(negativeLabels);
                                }
                            }
                        }

                        // Add a blank graph to demarcate pages.
                        if (i < combineAllSpectraIntoPageThreeFromIndex) {
                            graphLabels.addAll(AnalysisHelper.writeScanData(fos, lcmsDir, 0.0, BLANK_SCAN,
                                    false, false, new HashSet<>()));
                            yMaxList.add(0.0d);
                        }
                    }

                    // We need to pass the yMax values as an array to the Gnuplotter.
                    Double fontScale = null;
                    if (cl.hasOption(FONT_SCALE)) {
                        try {
                            fontScale = Double.parseDouble(cl.getOptionValue(FONT_SCALE));
                        } catch (IllegalArgumentException e) {
                            System.err.format("Argument for font-scale must be a floating point number.\n");
                            System.exit(1);
                        }
                    }

                    Double[] yMaxes = yMaxList.toArray(new Double[yMaxList.size()]);
                    Gnuplotter plotter = fontScale == null ? new Gnuplotter() : new Gnuplotter(fontScale);
                    plotter.plot2D(outData, outImg, graphLabels.toArray(new String[graphLabels.size()]), "time",
                            null, "intensity", "pdf", null, null, yMaxes, outImg + ".gnuplot");

                    Map<String, String> row = new HashMap<>();
                    row.put(STANDARD_ION_HEADER_FIELDS.CHEMICAL.name(), chemicalName);
                    row.put(STANDARD_ION_HEADER_FIELDS.BEST_ION_FROM_ALGO.name(), bestGlobalMetlinIon);
                    row.put(STANDARD_ION_HEADER_FIELDS.DIAGNOSTIC_PLOTS.name(), outImg);

                    resultsWriter.append(row);
                    resultsWriter.flush();
                }
            }
        }

        resultsWriter.flush();
        resultsWriter.close();
    }
}

From source file:com.jwm123.loggly.reporter.AppLauncher.java

public static void main(String args[]) throws Exception {
    try {/*  w w w.  j  a  v a 2  s. c om*/
        CommandLine cl = parseCLI(args);
        try {
            config = new Configuration();
        } catch (Exception e) {
            e.printStackTrace();
            System.err.println("ERROR: Failed to read in persisted configuration.");
        }
        if (cl.hasOption("h")) {

            HelpFormatter help = new HelpFormatter();
            String jarName = AppLauncher.class.getProtectionDomain().getCodeSource().getLocation().getFile();
            if (jarName.contains("/")) {
                jarName = jarName.substring(jarName.lastIndexOf("/") + 1);
            }
            help.printHelp("java -jar " + jarName + " [options]", opts);
        }
        if (cl.hasOption("c")) {
            config.update();
        }
        if (cl.hasOption("q")) {
            Client client = new Client(config);
            client.setQuery(cl.getOptionValue("q"));
            if (cl.hasOption("from")) {
                client.setFrom(cl.getOptionValue("from"));
            }
            if (cl.hasOption("to")) {
                client.setTo(cl.getOptionValue("to"));
            }
            List<Map<String, Object>> report = client.getReport();

            if (report != null) {
                List<Map<String, String>> reportContent = new ArrayList<Map<String, String>>();
                ReportGenerator generator = null;
                if (cl.hasOption("file")) {
                    generator = new ReportGenerator(new File(cl.getOptionValue("file")));
                }
                byte reportFile[] = null;

                if (cl.hasOption("g")) {
                    System.out.println("Search results: " + report.size());
                    Set<Object> values = new TreeSet<Object>();
                    Map<Object, Integer> counts = new HashMap<Object, Integer>();
                    for (String groupBy : cl.getOptionValues("g")) {
                        for (Map<String, Object> result : report) {
                            if (mapContains(result, groupBy)) {
                                Object value = mapGet(result, groupBy);
                                values.add(value);
                                if (counts.containsKey(value)) {
                                    counts.put(value, counts.get(value) + 1);
                                } else {
                                    counts.put(value, 1);
                                }
                            }
                        }
                        System.out.println("For key: " + groupBy);
                        for (Object value : values) {
                            System.out.println("  " + value + ": " + counts.get(value));
                        }
                    }
                    if (cl.hasOption("file")) {
                        Map<String, String> reportAddition = new LinkedHashMap<String, String>();
                        reportAddition.put("Month", MONTH_FORMAT.format(new Date()));
                        reportContent.add(reportAddition);
                        for (Object value : values) {
                            reportAddition = new LinkedHashMap<String, String>();
                            reportAddition.put(value.toString(), "" + counts.get(value));
                            reportContent.add(reportAddition);
                        }
                        reportAddition = new LinkedHashMap<String, String>();
                        reportAddition.put("Total", "" + report.size());
                        reportContent.add(reportAddition);
                    }
                } else {
                    System.out.println("The Search [" + cl.getOptionValue("q") + "] yielded " + report.size()
                            + " results.");
                    if (cl.hasOption("file")) {
                        Map<String, String> reportAddition = new LinkedHashMap<String, String>();
                        reportAddition.put("Month", MONTH_FORMAT.format(new Date()));
                        reportContent.add(reportAddition);
                        reportAddition = new LinkedHashMap<String, String>();
                        reportAddition.put("Count", "" + report.size());
                        reportContent.add(reportAddition);
                    }
                }
                if (cl.hasOption("file")) {
                    reportFile = generator.build(reportContent);
                    File reportFileObj = new File(cl.getOptionValue("file"));
                    FileUtils.writeByteArrayToFile(reportFileObj, reportFile);
                    if (cl.hasOption("e")) {
                        ReportMailer mailer = new ReportMailer(config, cl.getOptionValues("e"),
                                cl.getOptionValue("s"), reportFileObj.getName(), reportFile);
                        mailer.send();
                    }
                }
            }
        }

    } catch (IllegalArgumentException e) {
        System.err.println(e.getMessage());
        System.exit(1);
    }
}

From source file:edu.cuhk.hccl.evaluation.EvaluationApp.java

public static void main(String[] args) throws IOException, TasteException {
    File realFile = new File(args[0]);
    File estimateFile = new File(args[1]);

    // Build real-rating map
    Map<String, long[]> realMap = buildRatingMap(realFile);

    // Build estimate-rating map
    Map<String, long[]> estimateMap = buildRatingMap(estimateFile);

    // Compare realMap with estimateMap
    Map<Integer, List<Double>> realList = new HashMap<Integer, List<Double>>();
    Map<Integer, List<Double>> estimateList = new HashMap<Integer, List<Double>>();

    // Use set to store non-duplicate pairs only
    Set<String> noRatingList = new HashSet<String>();

    for (String pair : realMap.keySet()) {
        long[] realRatings = realMap.get(pair);
        long[] estimateRatings = estimateMap.get(pair);

        if (realRatings == null || estimateRatings == null)
            continue;

        for (int i = 0; i < realRatings.length; i++) {
            long real = realRatings[i];
            long estimate = estimateRatings[i];

            // continue if the aspect rating can not be estimated due to incomplete reviews
            if (estimate <= 0) {
                noRatingList.add(pair.replace("@", "\t"));
                continue;
            }/*  ww w.jav  a2  s .  c  o m*/

            if (real > 0 && estimate > 0) {
                if (!realList.containsKey(i))
                    realList.put(i, new ArrayList<Double>());

                realList.get(i).add((double) real);

                if (!estimateList.containsKey(i))
                    estimateList.put(i, new ArrayList<Double>());

                estimateList.get(i).add((double) estimate);
            }
        }
    }

    System.out.println("[INFO] RMSE, MAE for estimate ratings: ");
    System.out.println("------------------------------");
    System.out.println("Index \t RMSE \t MAE");
    for (int i = 1; i < 6; i++) {
        double rmse = Metric.computeRMSE(realList.get(i), estimateList.get(i));
        double mae = Metric.computeMAE(realList.get(i), estimateList.get(i));

        System.out.printf("%d \t %.3f \t %.3f \n", i, rmse, mae);
    }

    System.out.println("------------------------------");

    if (noRatingList.size() > 0) {
        String noRatingFileName = "evaluation-no-ratings.txt";
        FileUtils.writeLines(new File(noRatingFileName), noRatingList, false);

        System.out.println("[INFO] User-item pairs with no ratings are saved in file: " + noRatingFileName);
    } else {
        System.out.println("[INFO] All user-item pairs have ratings.");
    }
}

From source file:it.units.malelab.ege.MappingPropertiesExperimenter.java

public static void main(String[] args) throws IOException, InterruptedException, ExecutionException {
    final int n = 10000;
    final int nDist = 10000;
    //prepare problems and methods
    List<String> problems = Lists.newArrayList("bool-parity5", "bool-mopm3", "sr-keijzer6", "sr-nguyen7",
            "sr-pagie1", "sr-vladislavleva4", "other-klandscapes3", "other-klandscapes7", "other-text");
    List<String> mappers = new ArrayList<>();
    for (int gs : new int[] { 64, 128, 256, 512, 1024 }) {
        mappers.add("ge-" + gs + "-2");
        mappers.add("ge-" + gs + "-4");
        mappers.add("ge-" + gs + "-8");
        mappers.add("ge-" + gs + "-12");
        mappers.add("pige-" + gs + "-4");
        mappers.add("pige-" + gs + "-8");
        mappers.add("pige-" + gs + "-16");
        mappers.add("pige-" + gs + "-24");
        mappers.add("hge-" + gs + "-0");
        mappers.add("whge-" + gs + "-2");
        mappers.add("whge-" + gs + "-3");
        mappers.add("whge-" + gs + "-5");
    }/* w w  w  .  j a  va 2  s . c o  m*/
    mappers.add("sge-0-5");
    mappers.add("sge-0-6");
    mappers.add("sge-0-7");
    mappers.add("sge-0-8");
    mappers.clear();
    mappers.addAll(Lists.newArrayList("ge-1024-8", "pige-1024-16", "hge-1024-0", "whge-1024-3", "sge-0-6"));
    PrintStream filePrintStream = null;
    if (args.length > 0) {
        filePrintStream = new PrintStream(args[0]);
    } else {
        filePrintStream = System.out;
    }
    filePrintStream.printf("problem;mapper;genotypeSize;param;property;value%n");
    //prepare distances
    Distance<Node<String>> phenotypeDistance = new CachedDistance<>(new LeavesEdit<String>());
    Distance<Sequence> genotypeDistance = new CachedDistance<>(new Hamming());
    //iterate
    for (String problemName : problems) {
        for (String mapperName : mappers) {
            System.out.printf("%20.20s, %20.20s", problemName, mapperName);
            //build problem
            Problem<String, NumericFitness> problem = null;
            if (problemName.equals("bool-parity5")) {
                problem = new Parity(5);
            } else if (problemName.equals("bool-mopm3")) {
                problem = new MultipleOutputParallelMultiplier(3);
            } else if (problemName.equals("sr-keijzer6")) {
                problem = new HarmonicCurve();
            } else if (problemName.equals("sr-nguyen7")) {
                problem = new Nguyen7(1);
            } else if (problemName.equals("sr-pagie1")) {
                problem = new Pagie1();
            } else if (problemName.equals("sr-vladislavleva4")) {
                problem = new Vladislavleva4(1);
            } else if (problemName.equals("other-klandscapes3")) {
                problem = new KLandscapes(3);
            } else if (problemName.equals("other-klandscapes7")) {
                problem = new KLandscapes(7);
            } else if (problemName.equals("other-text")) {
                problem = new Text();
            }
            //build configuration and evolver
            Mapper mapper = null;
            int genotypeSize = Integer.parseInt(mapperName.split("-")[1]);
            int mapperMainParam = Integer.parseInt(mapperName.split("-")[2]);
            if (mapperName.split("-")[0].equals("ge")) {
                mapper = new StandardGEMapper<>(mapperMainParam, 1, problem.getGrammar());
            } else if (mapperName.split("-")[0].equals("pige")) {
                mapper = new PiGEMapper<>(mapperMainParam, 1, problem.getGrammar());
            } else if (mapperName.split("-")[0].equals("sge")) {
                mapper = new SGEMapper<>(mapperMainParam, problem.getGrammar());
            } else if (mapperName.split("-")[0].equals("hge")) {
                mapper = new HierarchicalMapper<>(problem.getGrammar());
            } else if (mapperName.split("-")[0].equals("whge")) {
                mapper = new WeightedHierarchicalMapper<>(mapperMainParam, false, true, problem.getGrammar());
            }
            //prepare things
            Random random = new Random(1);
            Set<Sequence> genotypes = new LinkedHashSet<>(n);
            //build genotypes
            if (mapperName.split("-")[0].equals("sge")) {
                SGEGenotypeFactory<String> factory = new SGEGenotypeFactory<>((SGEMapper) mapper);
                while (genotypes.size() < n) {
                    genotypes.add(factory.build(random));
                }
                genotypeSize = factory.getBitSize();
            } else {
                BitsGenotypeFactory factory = new BitsGenotypeFactory(genotypeSize);
                while (genotypes.size() < n) {
                    genotypes.add(factory.build(random));
                }
            }
            //build and fill map
            Multimap<Node<String>, Sequence> multimap = HashMultimap.create();
            int progress = 0;
            for (Sequence genotype : genotypes) {
                Node<String> phenotype;
                try {
                    if (mapperName.split("-")[0].equals("sge")) {
                        phenotype = mapper.map((SGEGenotype<String>) genotype, new HashMap<>());
                    } else {
                        phenotype = mapper.map((BitsGenotype) genotype, new HashMap<>());
                    }
                } catch (MappingException e) {
                    phenotype = Node.EMPTY_TREE;
                }
                multimap.put(phenotype, genotype);
                progress = progress + 1;
                if (progress % Math.round(n / 10) == 0) {
                    System.out.print(".");
                }
            }
            System.out.println();
            //compute distances
            List<Pair<Double, Double>> allDistances = new ArrayList<>();
            List<Pair<Double, Double>> allValidDistances = new ArrayList<>();
            Multimap<Node<String>, Double> genotypeDistances = ArrayListMultimap.create();
            for (Node<String> phenotype : multimap.keySet()) {
                for (Sequence genotype1 : multimap.get(phenotype)) {
                    for (Sequence genotype2 : multimap.get(phenotype)) {
                        double gDistance = genotypeDistance.d(genotype1, genotype2);
                        genotypeDistances.put(phenotype, gDistance);
                        if (genotypeDistances.get(phenotype).size() > nDist) {
                            break;
                        }
                    }
                    if (genotypeDistances.get(phenotype).size() > nDist) {
                        break;
                    }
                }
            }
            List<Map.Entry<Node<String>, Sequence>> entries = new ArrayList<>(multimap.entries());
            Collections.shuffle(entries, random);
            for (Map.Entry<Node<String>, Sequence> entry1 : entries) {
                for (Map.Entry<Node<String>, Sequence> entry2 : entries) {
                    double gDistance = genotypeDistance.d(entry1.getValue(), entry2.getValue());
                    double pDistance = phenotypeDistance.d(entry1.getKey(), entry2.getKey());
                    allDistances.add(new Pair<>(gDistance, pDistance));
                    if (!Node.EMPTY_TREE.equals(entry1.getKey()) && !Node.EMPTY_TREE.equals(entry2.getKey())) {
                        allValidDistances.add(new Pair<>(gDistance, pDistance));
                    }
                    if (allDistances.size() > nDist) {
                        break;
                    }
                }
                if (allDistances.size() > nDist) {
                    break;
                }
            }
            //compute properties
            double invalidity = (double) multimap.get(Node.EMPTY_TREE).size() / (double) genotypes.size();
            double redundancy = 1 - (double) multimap.keySet().size() / (double) genotypes.size();
            double validRedundancy = redundancy;
            if (multimap.keySet().contains(Node.EMPTY_TREE)) {
                validRedundancy = 1 - ((double) multimap.keySet().size() - 1d)
                        / (double) (genotypes.size() - multimap.get(Node.EMPTY_TREE).size());
            }
            double locality = Utils.pearsonCorrelation(allDistances);
            double validLocality = Utils.pearsonCorrelation(allValidDistances);
            double[] sizes = new double[multimap.keySet().size()];
            double[] meanGenotypeDistances = new double[multimap.keySet().size()];
            int invalidIndex = -1;
            int c = 0;
            for (Node<String> phenotype : multimap.keySet()) {
                if (Node.EMPTY_TREE.equals(phenotype)) {
                    invalidIndex = c;
                }
                sizes[c] = multimap.get(phenotype).size();
                double[] distances = new double[genotypeDistances.get(phenotype).size()];
                int k = 0;
                for (Double distance : genotypeDistances.get(phenotype)) {
                    distances[k] = distance;
                    k = k + 1;
                }
                meanGenotypeDistances[c] = StatUtils.mean(distances);
                c = c + 1;
            }
            double nonUniformity = Math.sqrt(StatUtils.variance(sizes)) / StatUtils.mean(sizes);
            double nonSynonymousity = StatUtils.mean(meanGenotypeDistances)
                    / StatUtils.mean(firsts(allDistances));
            double validNonUniformity = nonUniformity;
            double validNonSynonymousity = nonSynonymousity;
            if (invalidIndex != -1) {
                double[] validSizes = new double[multimap.keySet().size() - 1];
                double[] validMeanGenotypeDistances = new double[multimap.keySet().size() - 1];
                if (invalidIndex > 0) {
                    System.arraycopy(sizes, 0, validSizes, 0, invalidIndex);
                    System.arraycopy(meanGenotypeDistances, 0, validMeanGenotypeDistances, 0, invalidIndex);
                }
                System.arraycopy(sizes, invalidIndex + 1, validSizes, invalidIndex,
                        sizes.length - invalidIndex - 1);
                System.arraycopy(meanGenotypeDistances, invalidIndex + 1, validMeanGenotypeDistances,
                        invalidIndex, meanGenotypeDistances.length - invalidIndex - 1);
                validNonUniformity = Math.sqrt(StatUtils.variance(validSizes)) / StatUtils.mean(validSizes);
                validNonSynonymousity = StatUtils.mean(validMeanGenotypeDistances)
                        / StatUtils.mean(firsts(allValidDistances));
            }
            //compute locality
            filePrintStream.printf("%s;%s;%d;%d;invalidity;%f %n", problemName, mapperName.split("-")[0],
                    genotypeSize, mapperMainParam, invalidity);
            filePrintStream.printf("%s;%s;%d;%d;redundancy;%f %n", problemName, mapperName.split("-")[0],
                    genotypeSize, mapperMainParam, redundancy);
            filePrintStream.printf("%s;%s;%d;%d;validRedundancy;%f %n", problemName, mapperName.split("-")[0],
                    genotypeSize, mapperMainParam, validRedundancy);
            filePrintStream.printf("%s;%s;%d;%d;locality;%f %n", problemName, mapperName.split("-")[0],
                    genotypeSize, mapperMainParam, locality);
            filePrintStream.printf("%s;%s;%d;%d;validLLocality;%f %n", problemName, mapperName.split("-")[0],
                    genotypeSize, mapperMainParam, validLocality);
            filePrintStream.printf("%s;%s;%d;%d;nonUniformity;%f %n", problemName, mapperName.split("-")[0],
                    genotypeSize, mapperMainParam, nonUniformity);
            filePrintStream.printf("%s;%s;%d;%d;validNonUniformity;%f %n", problemName,
                    mapperName.split("-")[0], genotypeSize, mapperMainParam, validNonUniformity);
            filePrintStream.printf("%s;%s;%d;%d;nonSynonymousity;%f %n", problemName, mapperName.split("-")[0],
                    genotypeSize, mapperMainParam, nonSynonymousity);
            filePrintStream.printf("%s;%s;%d;%d;validNonSynonymousity;%f %n", problemName,
                    mapperName.split("-")[0], genotypeSize, mapperMainParam, validNonSynonymousity);
        }
    }
    if (filePrintStream != null) {
        filePrintStream.close();
    }
}

From source file:org.eclipse.userstorage.tests.util.USSServer.java

public static void main(String[] args) throws Exception {
    Log.setLog(new NOOPLogger());

    USSServer server = new USSServer(8080, new File(System.getProperty("java.io.tmpdir"), "uss-server"));
    server.addUser(FixedCredentialsProvider.DEFAULT_CREDENTIALS);

    Set<String> applicationTokens = server.getApplicationTokens();
    applicationTokens.add(StorageTests.APPLICATION_TOKEN);
    applicationTokens.add("cNhDr0INs8T109P8h6E1r_GvU3I"); // Oomph

    System.out.println(server.getFolder());
    System.out.println("Listening on port " + server.start());
    server.join();/*from w  w  w . j a va  2 s. co  m*/
}

From source file:ListAlgorithms.java

public static void main(String[] args) {
    Provider[] providers = Security.getProviders();
    Set<String> ciphers = new HashSet<String>();
    Set<String> keyAgreements = new HashSet<String>();
    Set<String> macs = new HashSet<String>();
    Set<String> messageDigests = new HashSet<String>();
    Set<String> signatures = new HashSet<String>();

    for (int i = 0; i != providers.length; i++) {
        Iterator it = providers[i].keySet().iterator();

        while (it.hasNext()) {
            String entry = (String) it.next();

            if (entry.startsWith("Alg.Alias.")) {
                entry = entry.substring("Alg.Alias.".length());
            }//from   w  ww .  j a  va  2  s  .  co m

            if (entry.startsWith("Cipher.")) {
                ciphers.add(entry.substring("Cipher.".length()));
            } else if (entry.startsWith("KeyAgreement.")) {
                keyAgreements.add(entry.substring("KeyAgreement.".length()));
            } else if (entry.startsWith("Mac.")) {
                macs.add(entry.substring("Mac.".length()));
            } else if (entry.startsWith("MessageDigest.")) {
                messageDigests.add(entry.substring("MessageDigest.".length()));
            } else if (entry.startsWith("Signature.")) {
                signatures.add(entry.substring("Signature.".length()));
            }
        }
    }

    printSet("Ciphers", ciphers);
    printSet("KeyAgreeents", keyAgreements);
    printSet("Macs", macs);
    printSet("MessageDigests", messageDigests);
    printSet("Signatures", signatures);
}

From source file:org.ptm.translater.App.java

public static void main(String... args) {
    GenericXmlApplicationContext ctx = new GenericXmlApplicationContext();
    ctx.load("file:src/main/resources/spring/datasource.xml");
    ctx.refresh();//from   w  ww .java 2s  .c  o  m

    GenericXmlApplicationContext ctx2 = new GenericXmlApplicationContext();
    ctx2.load("file:src/main/resources/spring/datasource2.xml");
    ctx2.refresh();

    ArchiveDao archiveDao = ctx.getBean("archiveDao", ArchiveDao.class);
    List<Archive> archives = archiveDao.findAll();

    UserDao userDao = ctx2.getBean("userDao", UserDao.class);
    TagDao tagDao = ctx2.getBean("tagDao", TagDao.class);
    PhotoDao photoDao = ctx2.getBean("photoDao", PhotoDao.class);

    List<Tag> tagz = tagDao.findAll();
    Map<String, Long> hashTags = new HashMap<String, Long>();
    for (Tag tag : tagz)
        hashTags.put(tag.getName(), tag.getId());

    MongoCache cache = new MongoCache();
    Calendar calendar = Calendar.getInstance();

    Map<String, String> associates = new HashMap<String, String>();

    for (Archive archive : archives) {
        AppUser appUser = new AppUser();
        appUser.setName(archive.getName());
        appUser.setEmail(archive.getUid() + "@mail.th");
        appUser.setPassword("123456");

        Role role = new Role();
        role.setRoleId("ROLE_USER");
        appUser.setRole(role);

        userDao.save(appUser);
        System.out.println("\tCreate user " + appUser);

        for (Photo photo : archive.getPhotos()) {
            // ?  ??? 
            if (cache.contains(photo.getUid()))
                continue;

            System.out.println("\tNew photo");
            org.ptm.translater.ch2.domain.Photo photo2 = new org.ptm.translater.ch2.domain.Photo();
            photo2.setAppUser(appUser);
            photo2.setName(photo.getTitle());
            photo2.setLicense((byte) 7);
            photo2.setDescription(photo.getDescription());

            SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
            try {
                calendar.setTime(sdf.parse(photo.getTaken()));

                if (calendar.get(Calendar.YEAR) != 0 && calendar.get(Calendar.YEAR) > 1998)
                    continue;
                photo2.setYear(calendar.get(Calendar.YEAR));
                photo2.setMonth(calendar.get(Calendar.MONTH) + 1);
                photo2.setDay(calendar.get(Calendar.DAY_OF_MONTH));
            } catch (Exception ex) {
                ex.printStackTrace();
            }

            if (photo.getLongitude() != null && photo.getLongitude().length() > 0) {
                //                    String key = photo.getLongitude()+"#"+photo.getLatitude();
                photo2.setLatitude(photo.getLatitude());
                photo2.setLongitude(photo.getLongitude());
                //                    if (associates.containsKey(key)) {
                //                        photo2.setAddress(associates.get(key));
                //                    } else {
                //                        Geocoder geocoder = new Geocoder();
                //                        GeocoderRequestBuilder geocoderRequest = new GeocoderRequestBuilder();
                //                        GeocoderRequest request =
                //                            geocoderRequest.setLocation(new LatLng(photo.getLongitude(), photo.getLatitude())).getGeocoderRequest();
                //
                //                        GeocodeResponse response = geocoder.geocode(request);
                //                        if (response.getResults().size() > 0) {
                //                            photo2.setAddress(response.getResults().get(0).getFormattedAddress());
                //                        }
                //                        try { Thread.sleep(2000); } catch (InterruptedException ex) { ex.printStackTrace(); }
                //                    }
            }

            System.out.println("\tFind tags");
            Set<Tag> tags = new HashSet<Tag>();
            for (org.ptm.translater.ch1.domain.Tag tag : photo.getTags()) {
                Tag item = new Tag();
                item.setName(tag.getName());
                if (hashTags.containsKey(tag.getName())) {
                    item.setId(hashTags.get(tag.getName()));
                } else {
                    tagDao.save(item);
                    hashTags.put(item.getName(), item.getId());
                }
                System.out.println("\t\tinit tag " + tag.getName());
                tags.add(item);
            }
            photo2.setTags(tags);
            System.out.println("\tFind " + tags.size() + " tags");
            photoDao.save(photo2);
            System.out.println("\tSave photo");

            Imaginator img = new Imaginator();
            img.setFolder(photo2.getId().toString());
            img.setPath();

            for (PhotoSize ps : photo.getSizes()) {
                if (ps.getLabel().equals("Original")) {
                    img.setImage(ps.getSource());
                    break;
                }
            }
            img.generate();
            System.out.println("\tGenerate image of photo");
            img = null;
            cache.create(photo.getUid());
            cache.create(photo2);

            System.out.println("Generate: " + photo2);
        }
    }
}

From source file:net.sourceforge.doddle_owl.data.JpnWordNetDic.java

public static void main(String[] args) throws Exception {
    JpnWordNetDic.initJPNWNDic();//ww  w.  jav  a 2  s . c o  m
    String id1 = "08675967-n";
    // String id1 = "JPNWN_ROOT";

    Concept c = JpnWordNetDic.getConcept(id1);
    System.out.println(c);

    Set<String> idSet = new HashSet<String>();
    BufferedReader reader = new BufferedReader(
            new InputStreamReader(new FileInputStream(DODDLEConstants.JPWN_HOME + "tree.data"), "UTF-8"));
    while (reader.ready()) {
        String line = reader.readLine();
        if (line.indexOf(id1) != -1) {
            String id = line.split("\t\\|")[0];
            idSet.add(id);
        }
    }
    System.out.println(idSet);
    for (String id : idSet) {
        c = JpnWordNetDic.getConcept(id);
        System.out.println(c);
    }
}