Example usage for java.util Map put

List of usage examples for java.util Map put

Introduction

In this page you can find the example usage for java.util Map put.

Prototype

V put(K key, V value);

Source Link

Document

Associates the specified value with the specified key in this map (optional operation).

Usage

From source file:edu.isi.karma.research.modeling.ModelLearner_LOD.java

public static void main(String[] args) throws Exception {

    ServletContextParameterMap contextParameters = ContextParametersRegistry.getInstance().getDefault();
    contextParameters.setParameterValue(ContextParameter.USER_CONFIG_DIRECTORY, "/Users/mohsen/karma/config");

    OntologyManager ontologyManager = new OntologyManager(contextParameters.getId());
    File ff = new File(Params.ONTOLOGY_DIR);
    File[] files = ff.listFiles();
    if (files == null) {
        logger.error("no ontology to import at " + ff.getAbsolutePath());
        return;//w  ww.  jav  a 2 s .c o  m
    }

    for (File f : files) {
        if (f.getName().endsWith(".owl") || f.getName().endsWith(".rdf") || f.getName().endsWith(".n3")
                || f.getName().endsWith(".ttl") || f.getName().endsWith(".xml")) {
            logger.info("Loading ontology file: " + f.getAbsolutePath());
            ontologyManager.doImport(f, "UTF-8");
        }
    }
    ontologyManager.updateCache();

    String outputPath = Params.OUTPUT_DIR;
    String graphPath = Params.GRAPHS_DIR;

    FileUtils.cleanDirectory(new File(graphPath));

    List<SemanticModel> semanticModels = ModelReader.importSemanticModelsFromJsonFiles(Params.MODEL_DIR,
            Params.MODEL_MAIN_FILE_EXT);

    ModelLearner_LOD modelLearner = null;

    boolean onlyGenerateSemanticTypeStatistics = false;
    boolean onlyUseOntology = false;
    boolean useCorrectType = false;
    int numberOfCandidates = 4;
    boolean onlyEvaluateInternalLinks = false;
    int maxPatternSize = 3;

    if (onlyGenerateSemanticTypeStatistics) {
        getStatistics(semanticModels);
        return;
    }

    String filePath = Params.RESULTS_DIR + "temp/";
    String filename = "";

    filename += "lod-results";
    filename += useCorrectType ? "-correct" : "-k=" + numberOfCandidates;
    filename += onlyUseOntology ? "-ontology" : "-p" + maxPatternSize;
    filename += onlyEvaluateInternalLinks ? "-internal" : "-all";
    filename += ".csv";

    PrintWriter resultFile = new PrintWriter(new File(filePath + filename));

    resultFile.println("source \t p \t r \t t \n");

    for (int i = 0; i < semanticModels.size(); i++) {
        //      for (int i = 0; i <= 10; i++) {
        //      int i = 1; {

        int newSourceIndex = i;
        SemanticModel newSource = semanticModels.get(newSourceIndex);

        logger.info("======================================================");
        logger.info(newSource.getName() + "(#attributes:" + newSource.getColumnNodes().size() + ")");
        System.out.println(newSource.getName() + "(#attributes:" + newSource.getColumnNodes().size() + ")");
        logger.info("======================================================");

        SemanticModel correctModel = newSource;
        List<ColumnNode> columnNodes = correctModel.getColumnNodes();

        List<Node> steinerNodes = new LinkedList<Node>(columnNodes);

        String graphName = graphPath + "lod" + Params.GRAPH_FILE_EXT;

        if (onlyUseOntology) {
            modelLearner = new ModelLearner_LOD(new GraphBuilder(ontologyManager, false), steinerNodes);
        } else if (new File(graphName).exists()) {
            // read graph from file
            try {
                logger.info("loading the graph ...");
                DirectedWeightedMultigraph<Node, DefaultLink> graph = GraphUtil.importJson(graphName);
                modelLearner = new ModelLearner_LOD(new GraphBuilderTopK(ontologyManager, graph), steinerNodes);
            } catch (Exception e) {
                e.printStackTrace();
                resultFile.close();
                return;
            }
        } else {
            logger.info("building the graph ...");
            // create and save the graph to file
            //            GraphBuilder_Popularity b = new GraphBuilder_Popularity(ontologyManager, 
            //                  Params.LOD_OBJECT_PROPERIES_FILE, 
            //                  Params.LOD_DATA_PROPERIES_FILE);
            GraphBuilder_LOD_Pattern b = new GraphBuilder_LOD_Pattern(ontologyManager, Params.PATTERNS_DIR,
                    maxPatternSize);
            modelLearner = new ModelLearner_LOD(b.getGraphBuilder(), steinerNodes);
        }

        long start = System.currentTimeMillis();

        List<SortableSemanticModel> hypothesisList = modelLearner.hypothesize(useCorrectType,
                numberOfCandidates);

        long elapsedTimeMillis = System.currentTimeMillis() - start;
        float elapsedTimeSec = elapsedTimeMillis / 1000F;

        List<SortableSemanticModel> topHypotheses = null;
        if (hypothesisList != null) {

            //            for (SortableSemanticModel sss : hypothesisList) {
            //               ModelEvaluation mmm = sss.evaluate(correctModel);
            //               System.out.println(mmm.getPrecision() + ", " + mmm.getRecall());
            //            }
            topHypotheses = hypothesisList.size() > 10 ? hypothesisList.subList(0, 10) : hypothesisList;
        }

        Map<String, SemanticModel> models = new TreeMap<String, SemanticModel>();

        ModelEvaluation me;
        models.put("1-correct model", correctModel);
        if (topHypotheses != null)
            for (int k = 0; k < topHypotheses.size(); k++) {

                SortableSemanticModel m = topHypotheses.get(k);

                me = m.evaluate(correctModel, onlyEvaluateInternalLinks, false);

                String label = "candidate " + k + "\n" +
                //                     (m.getSteinerNodes() == null ? "" : m.getSteinerNodes().getScoreDetailsString()) +
                        "link coherence:"
                        + (m.getLinkCoherence() == null ? "" : m.getLinkCoherence().getCoherenceValue()) + "\n";
                label += (m.getSteinerNodes() == null || m.getSteinerNodes().getCoherence() == null) ? ""
                        : "node coherence:" + m.getSteinerNodes().getCoherence().getCoherenceValue() + "\n";
                label += "confidence:" + m.getConfidenceScore() + "\n";
                label += m.getSteinerNodes() == null ? ""
                        : "mapping score:" + m.getSteinerNodes().getScore() + "\n";
                label += "cost:" + roundDecimals(m.getCost(), 6) + "\n" +
                //                        "-distance:" + me.getDistance() + 
                        "-precision:" + me.getPrecision() + "-recall:" + me.getRecall();

                models.put(label, m);

                if (k == 0) { // first rank model
                    System.out.println("precision: " + me.getPrecision() + ", recall: " + me.getRecall()
                            + ", time: " + elapsedTimeSec);
                    logger.info("precision: " + me.getPrecision() + ", recall: " + me.getRecall() + ", time: "
                            + elapsedTimeSec);
                    String s = newSource.getName() + "\t" + me.getPrecision() + "\t" + me.getRecall() + "\t"
                            + elapsedTimeSec;
                    resultFile.println(s);

                }
            }

        String outName = outputPath + newSource.getName() + Params.GRAPHVIS_OUT_DETAILS_FILE_EXT;

        GraphVizUtil.exportSemanticModelsToGraphviz(models, newSource.getName(), outName,
                GraphVizLabelType.LocalId, GraphVizLabelType.LocalUri, true, true);

    }

    resultFile.close();

}

From source file:com.joliciel.talismane.terminology.Main.java

public static void main(String[] args) throws Exception {
    String termFilePath = null;/*w  ww  . ja v a  2 s  . co  m*/
    String outFilePath = null;
    Command command = Command.extract;
    int depth = -1;
    String databasePropertiesPath = null;
    String projectCode = null;

    Map<String, String> argMap = TalismaneConfig.convertArgs(args);

    String logConfigPath = argMap.get("logConfigFile");
    if (logConfigPath != null) {
        argMap.remove("logConfigFile");
        Properties props = new Properties();
        props.load(new FileInputStream(logConfigPath));
        PropertyConfigurator.configure(props);
    }

    Map<String, String> innerArgs = new HashMap<String, String>();
    for (Entry<String, String> argEntry : argMap.entrySet()) {
        String argName = argEntry.getKey();
        String argValue = argEntry.getValue();

        if (argName.equals("command"))
            command = Command.valueOf(argValue);
        else if (argName.equals("termFile"))
            termFilePath = argValue;
        else if (argName.equals("outFile"))
            outFilePath = argValue;
        else if (argName.equals("depth"))
            depth = Integer.parseInt(argValue);
        else if (argName.equals("databaseProperties"))
            databasePropertiesPath = argValue;
        else if (argName.equals("projectCode"))
            projectCode = argValue;
        else
            innerArgs.put(argName, argValue);
    }
    if (termFilePath == null && databasePropertiesPath == null)
        throw new TalismaneException("Required argument: termFile or databasePropertiesPath");

    if (termFilePath != null) {
        String currentDirPath = System.getProperty("user.dir");
        File termFileDir = new File(currentDirPath);
        if (termFilePath.lastIndexOf("/") >= 0) {
            String termFileDirPath = termFilePath.substring(0, termFilePath.lastIndexOf("/"));
            termFileDir = new File(termFileDirPath);
            termFileDir.mkdirs();
        }
    }

    long startTime = new Date().getTime();
    try {
        TerminologyServiceLocator terminologyServiceLocator = TerminologyServiceLocator.getInstance();
        TerminologyService terminologyService = terminologyServiceLocator.getTerminologyService();
        TerminologyBase terminologyBase = null;

        if (projectCode == null)
            throw new TalismaneException("Required argument: projectCode");

        File file = new File(databasePropertiesPath);
        FileInputStream fis = new FileInputStream(file);
        Properties dataSourceProperties = new Properties();
        dataSourceProperties.load(fis);
        terminologyBase = terminologyService.getPostGresTerminologyBase(projectCode, dataSourceProperties);

        if (command.equals(Command.analyse) || command.equals(Command.extract)) {
            if (depth < 0)
                throw new TalismaneException("Required argument: depth");

            if (command.equals(Command.analyse)) {
                innerArgs.put("command", "analyse");
            } else {
                innerArgs.put("command", "process");
            }

            TalismaneFrench talismaneFrench = new TalismaneFrench();
            TalismaneConfig config = new TalismaneConfig(innerArgs, talismaneFrench);

            PosTagSet tagSet = TalismaneSession.getPosTagSet();
            Charset outputCharset = config.getOutputCharset();

            TermExtractor termExtractor = terminologyService.getTermExtractor(terminologyBase);
            termExtractor.setMaxDepth(depth);
            termExtractor.setOutFilePath(termFilePath);
            termExtractor.getIncludeChildren().add(tagSet.getPosTag("P"));
            termExtractor.getIncludeChildren().add(tagSet.getPosTag("P+D"));
            termExtractor.getIncludeChildren().add(tagSet.getPosTag("CC"));

            termExtractor.getIncludeWithParent().add(tagSet.getPosTag("DET"));

            if (outFilePath != null) {
                if (outFilePath.lastIndexOf("/") >= 0) {
                    String outFileDirPath = outFilePath.substring(0, outFilePath.lastIndexOf("/"));
                    File outFileDir = new File(outFileDirPath);
                    outFileDir.mkdirs();
                }
                File outFile = new File(outFilePath);
                outFile.delete();
                outFile.createNewFile();

                Writer writer = new BufferedWriter(
                        new OutputStreamWriter(new FileOutputStream(outFilePath), outputCharset));
                TermAnalysisWriter termAnalysisWriter = new TermAnalysisWriter(writer);
                termExtractor.addTermObserver(termAnalysisWriter);
            }

            Talismane talismane = config.getTalismane();
            talismane.setParseConfigurationProcessor(termExtractor);
            talismane.process();
        } else if (command.equals(Command.list)) {

            List<Term> terms = terminologyBase.getTermsByFrequency(2);
            for (Term term : terms) {
                LOG.debug("Term: " + term.getText());
                LOG.debug("Frequency: " + term.getFrequency());
                LOG.debug("Heads: " + term.getHeads());
                LOG.debug("Expansions: " + term.getExpansions());
                LOG.debug("Contexts: " + term.getContexts());
            }
        }
    } finally {
        long endTime = new Date().getTime();
        long totalTime = endTime - startTime;
        LOG.info("Total time: " + totalTime);
    }
}

From source file:ViewImageTest.java

/**
 * Test image(s) (default : JPEG_example_JPG_RIP_100.jpg) are parsed and
 * rendered to an output foler. Result can then be checked with program of
 * your choice./*w  ww . j a  v a  2  s .com*/
 * 
 * @param args
 *            may be empty or contain parameters to override defaults :
 *            <ul>
 *            <li>args[0] : input image file URL or folder containing image
 *            files URL. Default :
 *            viewImageTest/test/JPEG_example_JPG_RIP_100.jpg</li>
 *            <li>args[1] : output format name (for example : "jpg") for
 *            rendered image</li>
 *            <li>args[2] : ouput folder URL</li>
 *            <li>args[3] : max width (in pixels) for rendered image.
 *            Default : no value.</li>
 *            <li>args[4] : max height (in pixels) for rendered image.
 *            Default : no value.</li>
 *            </ul>
 * @throws IOException
 *             when a read/write error occured
 */
public static void main(String args[]) throws IOException {
    File inURL = getInputURL(args);
    String ext = getEncodingExt(args);
    File outDir = getOuputDir(args);
    serverObjects post = makePostParams(args);
    outDir.mkdirs();

    File[] inFiles;
    if (inURL.isFile()) {
        inFiles = new File[1];
        inFiles[0] = inURL;
        System.out.println("Testing ViewImage rendering with input file : " + inURL.getAbsolutePath()
                + " encoded To : " + ext);
    } else if (inURL.isDirectory()) {
        FileFilter filter = FileFileFilter.FILE;
        inFiles = inURL.listFiles(filter);
        System.out.println("Testing ViewImage rendering with input files in folder : " + inURL.getAbsolutePath()
                + " encoded To : " + ext);
    } else {
        inFiles = new File[0];
    }
    if (inFiles.length == 0) {
        throw new IllegalArgumentException(inURL.getAbsolutePath() + " is not a valid file or folder url.");
    }
    System.out.println("Rendered images will be written in dir : " + outDir.getAbsolutePath());

    Map<String, Exception> failures = new HashMap<String, Exception>();
    try {
        for (File inFile : inFiles) {
            /* Delete eventual previous result file */
            File outFile = new File(outDir, inFile.getName() + "." + ext);
            if (outFile.exists()) {
                outFile.delete();
            }

            byte[] resourceb = getBytes(inFile);
            String urlString = inFile.getAbsolutePath();
            EncodedImage img = null;
            Exception error = null;
            try {
                img = ViewImage.parseAndScale(post, true, urlString, ext, false, resourceb);
            } catch (Exception e) {
                error = e;
            }

            if (img == null) {
                failures.put(urlString, error);
            } else {
                FileOutputStream outFileStream = null;
                try {
                    outFileStream = new FileOutputStream(outFile);
                    img.getImage().writeTo(outFileStream);
                } finally {
                    if (outFileStream != null) {
                        outFileStream.close();
                    }
                    img.getImage().close();
                }
            }
        }
        displayResults(inFiles, failures);
    } finally {
        ConcurrentLog.shutdown();
    }

}

From source file:de.tudarmstadt.ukp.experiments.argumentation.convincingness.sampling.Step7aLearningDataProducer.java

@SuppressWarnings("unchecked")
public static void main(String[] args) throws IOException {
    String inputDir = args[0];/*from   w ww  .j  a v  a2 s . com*/
    File outputDir = new File(args[1]);

    if (!outputDir.exists()) {
        outputDir.mkdirs();
    }

    Collection<File> files = IOHelper.listXmlFiles(new File(inputDir));

    // for generating ConvArgStrict use this
    String prefix = "no-eq_DescendingScoreArgumentPairListSorter";

    // for oversampling using graph transitivity properties use this
    //        String prefix = "generated_no-eq_AscendingScoreArgumentPairListSorter";

    Iterator<File> iterator = files.iterator();
    while (iterator.hasNext()) {
        File file = iterator.next();

        if (!file.getName().startsWith(prefix)) {
            iterator.remove();
        }
    }

    int totalGoldPairsCounter = 0;

    Map<String, Integer> goldDataDistribution = new HashMap<>();

    DescriptiveStatistics statsPerTopic = new DescriptiveStatistics();

    int totalPairsWithReasonSameAsGold = 0;

    DescriptiveStatistics ds = new DescriptiveStatistics();

    for (File file : files) {
        List<AnnotatedArgumentPair> argumentPairs = (List<AnnotatedArgumentPair>) XStreamTools.getXStream()
                .fromXML(file);

        int pairsPerTopicCounter = 0;

        String name = file.getName().replaceAll(prefix, "").replaceAll("\\.xml", "");

        PrintWriter pw = new PrintWriter(new File(outputDir, name + ".csv"), "utf-8");

        pw.println("#id\tlabel\ta1\ta2");

        for (AnnotatedArgumentPair argumentPair : argumentPairs) {
            String goldLabel = argumentPair.getGoldLabel();

            if (!goldDataDistribution.containsKey(goldLabel)) {
                goldDataDistribution.put(goldLabel, 0);
            }

            goldDataDistribution.put(goldLabel, goldDataDistribution.get(goldLabel) + 1);

            pw.printf(Locale.ENGLISH, "%s\t%s\t%s\t%s%n", argumentPair.getId(), goldLabel,
                    multipleParagraphsToSingleLine(argumentPair.getArg1().getText()),
                    multipleParagraphsToSingleLine(argumentPair.getArg2().getText()));

            pairsPerTopicCounter++;

            int sameInOnePair = 0;

            // get gold reason statistics
            for (AnnotatedArgumentPair.MTurkAssignment assignment : argumentPair.mTurkAssignments) {
                String label = assignment.getValue();

                if (goldLabel.equals(label)) {
                    sameInOnePair++;
                }
            }

            ds.addValue(sameInOnePair);
            totalPairsWithReasonSameAsGold += sameInOnePair;
        }

        totalGoldPairsCounter += pairsPerTopicCounter;
        statsPerTopic.addValue(pairsPerTopicCounter);

        pw.close();
    }

    System.out.println("Total reasons matching gold " + totalPairsWithReasonSameAsGold);
    System.out.println(ds);

    System.out.println("Total gold pairs: " + totalGoldPairsCounter);
    System.out.println(statsPerTopic);

    int totalPairs = 0;
    for (Integer pairs : goldDataDistribution.values()) {
        totalPairs += pairs;
    }
    System.out.println("Total pairs: " + totalPairs);
    System.out.println(goldDataDistribution);

}

From source file:org.kuali.student.git.importer.ConvertBuildTagBranchesToGitTags.java

/**
 * @param args/*from  ww  w  .j  av  a 2  s.  co m*/
 */
public static void main(String[] args) {

    if (args.length < 3 || args.length > 6) {
        System.err.println("USAGE: <git repository> <bare> <ref mode> [<ref prefix> <username> <password>]");
        System.err.println("\t<bare> : 0 (false) or 1 (true)");
        System.err.println("\t<ref mode> : local or name of remote");
        System.err.println("\t<ref prefix> : refs/heads (default) or say refs/remotes/origin (test clone)");
        System.exit(-1);
    }

    boolean bare = false;

    if (args[1].trim().equals("1")) {
        bare = true;
    }

    String remoteName = args[2].trim();

    String refPrefix = Constants.R_HEADS;

    if (args.length == 4)
        refPrefix = args[3].trim();

    String userName = null;
    String password = null;

    if (args.length == 5)
        userName = args[4].trim();

    if (args.length == 6)
        password = args[5].trim();

    try {

        Repository repo = GitRepositoryUtils.buildFileRepository(new File(args[0]).getAbsoluteFile(), false,
                bare);

        Git git = new Git(repo);

        ObjectInserter objectInserter = repo.newObjectInserter();

        Collection<Ref> repositoryHeads = repo.getRefDatabase().getRefs(refPrefix).values();

        RevWalk rw = new RevWalk(repo);

        Map<String, ObjectId> tagNameToTagId = new HashMap<>();

        Map<String, Ref> tagNameToRef = new HashMap<>();

        for (Ref ref : repositoryHeads) {

            String branchName = ref.getName().substring(refPrefix.length() + 1);

            if (branchName.contains("tag") && branchName.contains("builds")) {

                String branchParts[] = branchName.split("_");

                int buildsIndex = ArrayUtils.indexOf(branchParts, "builds");

                String moduleName = StringUtils.join(branchParts, "_", buildsIndex + 1, branchParts.length);

                RevCommit commit = rw.parseCommit(ref.getObjectId());

                ObjectId tag = GitRefUtils.insertTag(moduleName, commit, objectInserter);

                tagNameToTagId.put(moduleName, tag);

                tagNameToRef.put(moduleName, ref);

            }

        }

        BatchRefUpdate batch = repo.getRefDatabase().newBatchUpdate();

        List<RefSpec> branchesToDelete = new ArrayList<>();

        for (Entry<String, ObjectId> entry : tagNameToTagId.entrySet()) {

            String tagName = entry.getKey();

            // create the reference to the tag object
            batch.addCommand(
                    new ReceiveCommand(null, entry.getValue(), Constants.R_TAGS + tagName, Type.CREATE));

            // delete the original branch object

            Ref branch = tagNameToRef.get(entry.getKey());

            if (remoteName.equals("local")) {

                batch.addCommand(new ReceiveCommand(branch.getObjectId(), null, branch.getName(), Type.DELETE));

            } else {
                String adjustedBranchName = branch.getName().substring(refPrefix.length() + 1);

                branchesToDelete.add(new RefSpec(":" + Constants.R_HEADS + adjustedBranchName));
            }

        }

        // create the tags
        batch.execute(rw, new TextProgressMonitor());

        if (!remoteName.equals("local")) {
            // push the tag to the remote right now
            PushCommand pushCommand = git.push().setRemote(remoteName).setPushTags()
                    .setProgressMonitor(new TextProgressMonitor());

            if (userName != null)
                pushCommand.setCredentialsProvider(new UsernamePasswordCredentialsProvider(userName, password));

            Iterable<PushResult> results = pushCommand.call();

            for (PushResult pushResult : results) {

                if (!pushResult.equals(Result.NEW)) {
                    log.warn("failed to push tag " + pushResult.getMessages());
                }
            }

            // delete the branches from the remote
            results = git.push().setRemote(remoteName).setRefSpecs(branchesToDelete)
                    .setProgressMonitor(new TextProgressMonitor()).call();

            log.info("");

        }

        //         Result result = GitRefUtils.createTagReference(repo, moduleName, tag);
        //         
        //         if (!result.equals(Result.NEW)) {
        //            log.warn("failed to create tag {} for branch {}", moduleName, branchName);
        //            continue;
        //         }
        //         
        //         if (deleteMode) {
        //         result = GitRefUtils.deleteRef(repo, ref);
        //   
        //         if (!result.equals(Result.NEW)) {
        //            log.warn("failed to delete branch {}", branchName);
        //            continue;
        //         }

        objectInserter.release();

        rw.release();

    } catch (Exception e) {

        log.error("unexpected Exception ", e);
    }
}

From source file:org.test.LookupSVNUsers.java

/**
 * @param args//from   ww  w . j a v  a2 s.  c  o m
 * @throws IOException 
 */
public static void main(String[] args) throws IOException {

    if (args.length != 2) {
        log.error("USAGE: <svn users file(input)> <git authors file (output)>");

        System.exit(-1);
    }

    Set<String> unmatchedNameSet = new LinkedHashSet<String>();

    Map<String, GitUser> gitUserMap = new LinkedHashMap<String, GitUser>();

    String svnAuthorsFile = args[0];

    List<String> lines = FileUtils.readLines(new File(svnAuthorsFile));

    for (String line : lines) {

        // intentionally handle both upper and lower case varients of the same name.
        String svnUserName = line.trim();

        if (svnUserName.contains("("))
            continue; // skip over this line as we can't use it on the url

        if (gitUserMap.keySet().contains(svnUserName))
            continue; // skip this duplicate.

        log.info("starting on user = " + svnUserName);

        String gitName = extractFullName(svnUserName);

        if (gitName == null) {

            gitName = extractFullName(svnUserName.toLowerCase());
        }

        if (gitName == null) {
            unmatchedNameSet.add(svnUserName);
        } else {

            gitUserMap.put(svnUserName, new GitUser(svnUserName, gitName));
            log.info("mapped user (" + svnUserName + ") to: " + gitName);

        }

    }

    List<String> mergedList = new ArrayList<String>();

    mergedList.add("# GENERATED ");

    List<String> userNameList = new ArrayList<String>();

    userNameList.addAll(gitUserMap.keySet());

    Collections.sort(userNameList);

    for (String userName : userNameList) {

        GitUser gUser = gitUserMap.get(userName);

        mergedList.add(gUser.getSvnAuthor() + " = " + gUser.getGitUser() + " <" + gUser.getSvnAuthor()
                + "@users.sourceforge.net>");

    }

    for (String username : unmatchedNameSet) {

        log.warn("failed to match SVN User = " + username);

        // add in the unmatched entries as is.
        mergedList.add(username + " = " + username + " <" + username + "@users.sourceforge.net>");
    }

    FileUtils.writeLines(new File(args[1]), "UTF-8", mergedList);

}

From source file:edu.cuhk.hccl.evaluation.EvaluationApp.java

public static void main(String[] args) throws IOException, TasteException {
    File realFile = new File(args[0]);
    File estimateFile = new File(args[1]);

    // Build real-rating map
    Map<String, long[]> realMap = buildRatingMap(realFile);

    // Build estimate-rating map
    Map<String, long[]> estimateMap = buildRatingMap(estimateFile);

    // Compare realMap with estimateMap
    Map<Integer, List<Double>> realList = new HashMap<Integer, List<Double>>();
    Map<Integer, List<Double>> estimateList = new HashMap<Integer, List<Double>>();

    // Use set to store non-duplicate pairs only
    Set<String> noRatingList = new HashSet<String>();

    for (String pair : realMap.keySet()) {
        long[] realRatings = realMap.get(pair);
        long[] estimateRatings = estimateMap.get(pair);

        if (realRatings == null || estimateRatings == null)
            continue;

        for (int i = 0; i < realRatings.length; i++) {
            long real = realRatings[i];
            long estimate = estimateRatings[i];

            // continue if the aspect rating can not be estimated due to incomplete reviews
            if (estimate <= 0) {
                noRatingList.add(pair.replace("@", "\t"));
                continue;
            }/*from  w ww  . ja  va2 s  .  c  o  m*/

            if (real > 0 && estimate > 0) {
                if (!realList.containsKey(i))
                    realList.put(i, new ArrayList<Double>());

                realList.get(i).add((double) real);

                if (!estimateList.containsKey(i))
                    estimateList.put(i, new ArrayList<Double>());

                estimateList.get(i).add((double) estimate);
            }
        }
    }

    System.out.println("[INFO] RMSE, MAE for estimate ratings: ");
    System.out.println("------------------------------");
    System.out.println("Index \t RMSE \t MAE");
    for (int i = 1; i < 6; i++) {
        double rmse = Metric.computeRMSE(realList.get(i), estimateList.get(i));
        double mae = Metric.computeMAE(realList.get(i), estimateList.get(i));

        System.out.printf("%d \t %.3f \t %.3f \n", i, rmse, mae);
    }

    System.out.println("------------------------------");

    if (noRatingList.size() > 0) {
        String noRatingFileName = "evaluation-no-ratings.txt";
        FileUtils.writeLines(new File(noRatingFileName), noRatingList, false);

        System.out.println("[INFO] User-item pairs with no ratings are saved in file: " + noRatingFileName);
    } else {
        System.out.println("[INFO] All user-item pairs have ratings.");
    }
}

From source file:gov.nih.nci.ncicb.tcga.dcc.dam.util.TempClinicalDataLoader.java

public static void main(String[] args) {
    // first get the db connection properties
    String url = urlSet.get(args[1]);
    String user = args[2];/*from w  w  w .  j  a  va2  s  .c om*/
    String word = args[3];

    // make sure we have the Oracle driver somewhere
    try {
        Class.forName("oracle.jdbc.OracleDriver");
        Class.forName("org.postgresql.Driver");
    } catch (Exception x) {
        System.out.println("Unable to load the driver class!");
        System.exit(0);
    }
    // connect to the database
    try {
        dbConnection = DriverManager.getConnection(url, user, word);
        ClinicalBean.setDBConnection(dbConnection);
    } catch (SQLException x) {
        x.printStackTrace();
        System.exit(1);
    }

    final String xmlList = args[0];
    BufferedReader br = null;
    try {
        final Map<String, String> clinicalFiles = new HashMap<String, String>();
        final Map<String, String> biospecimenFiles = new HashMap<String, String>();
        final Map<String, String> fullFiles = new HashMap<String, String>();

        //noinspection IOResourceOpenedButNotSafelyClosed
        br = new BufferedReader(new FileReader(xmlList));

        // read the file list to get all the files to load
        while (br.ready()) {
            final String[] in = br.readLine().split("\\t");
            String xmlfile = in[0];
            String archive = in[1];

            if (xmlfile.contains("_clinical")) {
                clinicalFiles.put(xmlfile, archive);
            } else if (xmlfile.contains("_biospecimen")) {
                biospecimenFiles.put(xmlfile, archive);
            } else {
                fullFiles.put(xmlfile, archive);
            }
        }

        Date dateAdded = Calendar.getInstance().getTime();

        // NOTE!!! This deletes all data before the load starts, assuming we are re-loading everything.
        // a better way would be to figure out what has changed and load that, or to be able to load multiple versions of the data in the schema
        emptyClinicalTables(user);

        // load any "full" files first -- in case some archives aren't split yet
        for (final String file : fullFiles.keySet()) {
            String archive = fullFiles.get(file);
            System.out.println("Full file " + file + " in " + archive);
            // need to re-instantiate the disease-specific beans for each file
            createDiseaseSpecificBeans(xmlList);
            String disease = getDiseaseName(archive);
            processFullXmlFile(file, archive, disease, dateAdded);

            // memory leak or something... have to commit and close all connections and re-get connection
            // after each file to keep from using too much heap space.  this troubles me, but I have never had
            // the time to figure out why it happens
            resetConnections(url, user, word);
        }

        // now process all clinical files, and insert patients and clinical data
        for (final String clinicalFile : clinicalFiles.keySet()) {
            createDiseaseSpecificBeans(xmlList);
            String archive = clinicalFiles.get(clinicalFile);
            System.out.println("Clinical file " + clinicalFile + " in " + archive);
            String disease = getDiseaseName(archive);
            processClinicalXmlFile(clinicalFile, archive, disease, dateAdded);
            resetConnections(url, user, word);
        }

        // now process biospecimen files
        for (final String biospecimenFile : biospecimenFiles.keySet()) {
            createDiseaseSpecificBeans(xmlList);
            String archive = biospecimenFiles.get(biospecimenFile);
            String disease = getDiseaseName(archive);
            System.out.println("Biospecimen file " + biospecimenFile);
            processBiospecimenXmlFile(biospecimenFile, archive, disease, dateAdded);
            resetConnections(url, user, word);
        }

        // this sets relationships between these clinical tables and data browser tables, since we delete
        // and reload every time
        setForeignKeys();
        dbConnection.commit();
        dbConnection.close();
    } catch (Exception e) {
        e.printStackTrace();
        System.exit(-1);
    } finally {
        IOUtils.closeQuietly(br);
    }
}

From source file:ml.shifu.shifu.ShifuCLI.java

public static void main(String[] args) {
    String[] cleanedArgs = cleanArgs(args);
    // invalid input and help options
    if (cleanedArgs.length < 1 || (isHelpOption(cleanedArgs[0]))) {
        printUsage();//from  w  ww  .j a va2s. c o  m
        System.exit(cleanedArgs.length < 1 ? -1 : 0);
    }

    // process -v and -version conditions manually
    if (isVersionOption(cleanedArgs[0])) {
        printLogoAndVersion();
        System.exit(0);
    }

    CommandLineParser parser = new GnuParser();
    Options opts = buildModelSetOptions();
    CommandLine cmd = null;

    try {
        cmd = parser.parse(opts, cleanedArgs);
    } catch (ParseException e) {
        log.error("Invalid command options. Please check help message.");
        printUsage();
        System.exit(1);
    }

    int status = 0;

    try {
        if (cleanedArgs[0].equals(NEW) && cleanedArgs.length >= 2 && StringUtils.isNotEmpty(cleanedArgs[1])) {
            // modelset step
            String modelName = cleanedArgs[1];
            status = createNewModel(modelName, cmd.getOptionValue(MODELSET_CMD_TYPE),
                    cmd.getOptionValue(MODELSET_CMD_M));
            if (status == 0) {
                printModelSetCreatedSuccessfulLog(modelName);
            } else {
                log.warn("Error in create new model set, please check your shifu config or report issue");
            }
            System.exit(status);
            // copyModel(manager, cmd.getOptionValues(MODELSET_CMD_CP));
        } else {
            if (cleanedArgs[0].equals(MODELSET_CMD_CP) && cleanedArgs.length >= 3
                    && StringUtils.isNotEmpty(cleanedArgs[1]) && StringUtils.isNotEmpty(cleanedArgs[2])) {
                String newModelSetName = cleanedArgs[2];
                // modelset step
                copyModel(new String[] { cleanedArgs[1], newModelSetName });
                printModelSetCopiedSuccessfulLog(newModelSetName);
            } else if (cleanedArgs[0].equals(INIT_CMD)) {
                // init step
                if (cmd.getOptions() == null || cmd.getOptions().length == 0) {
                    status = initializeModel();
                    if (status == 0) {
                        log.info(
                                "ModelSet initialization is successful. Please continue next step by using 'shifu stats'.");
                    } else {
                        log.warn(
                                "Error in ModelSet initialization, please check your shifu config or report issue");
                    }
                } else if (cmd.hasOption(INIT_CMD_MODEL)) {
                    initializeModelParam();
                } else {
                    log.error("Invalid command, please check help message.");
                    printUsage();
                }
            } else if (cleanedArgs[0].equals(STATS_CMD)) {
                Map<String, Object> params = new HashMap<String, Object>();
                params.put(Constants.IS_COMPUTE_CORR,
                        cmd.hasOption(CORRELATION) || cmd.hasOption(SHORT_CORRELATION));
                params.put(Constants.IS_REBIN, cmd.hasOption(REBIN));
                params.put(Constants.REQUEST_VARS, cmd.getOptionValue(VARS));
                params.put(Constants.EXPECTED_BIN_NUM, cmd.getOptionValue(N));
                params.put(Constants.IV_KEEP_RATIO, cmd.getOptionValue(IVR));
                params.put(Constants.MINIMUM_BIN_INST_CNT, cmd.getOptionValue(BIC));
                params.put(Constants.IS_COMPUTE_PSI, cmd.hasOption(PSI) || cmd.hasOption(SHORT_PSI));

                // stats step
                status = calModelStats(params);
                if (status == 0) {
                    if (cmd.hasOption(CORRELATION) || cmd.hasOption(SHORT_CORRELATION)) {
                        log.info(
                                "Do model set correlation computing successfully. Please continue next step by using 'shifu normalize or shifu norm'. For tree ensemble model, no need do norm, please continue next step by using 'shifu varsel'");
                    }
                    if (cmd.hasOption(PSI) || cmd.hasOption(SHORT_PSI)) {
                        log.info(
                                "Do model set psi computing successfully. Please continue next step by using 'shifu normalize or shifu norm'. For tree ensemble model, no need do norm, please continue next step by using 'shifu varsel'");
                    } else {
                        log.info(
                                "Do model set statistic successfully. Please continue next step by using 'shifu normalize or shifu norm or shifu transform'. For tree ensemble model, no need do norm, please continue next step by using 'shifu varsel'");
                    }
                } else {
                    log.warn(
                            "Error in model set stats computation, please report issue on http:/github.com/shifuml/shifu/issues.");
                }
            } else if (cleanedArgs[0].equals(NORMALIZE_CMD) || cleanedArgs[0].equals(NORM_CMD)
                    || cleanedArgs[0].equals(TRANSFORM_CMD)) {
                // normalize step
                Map<String, Object> params = new HashMap<String, Object>();
                params.put(Constants.IS_TO_SHUFFLE_DATA, cmd.hasOption(SHUFFLE));
                status = normalizeTrainData(params);
                if (status == 0) {
                    log.info(
                            "Do model set normalization successfully. Please continue next step by using 'shifu varselect or shifu varsel'.");
                } else {
                    log.warn(
                            "Error in model set stats computation, please report issue on http:/github.com/shifuml/shifu/issues.");
                }
            } else if (cleanedArgs[0].equals(VARSELECT_CMD) || cleanedArgs[0].equals(VARSEL_CMD)) {
                Map<String, Object> params = new HashMap<String, Object>();
                params.put(Constants.IS_TO_RESET, cmd.hasOption(RESET));
                params.put(Constants.IS_TO_LIST, cmd.hasOption(LIST));
                params.put(Constants.IS_TO_FILTER_AUTO, cmd.hasOption(FILTER_AUTO));
                params.put(Constants.IS_TO_RECOVER_AUTO, cmd.hasOption(RECOVER_AUTO));
                params.put(Constants.RECURSIVE_CNT, cmd.getOptionValue(RECURSIVE));

                // variable selected step
                status = selectModelVar(params);
                if (status == 0) {
                    log.info(
                            "Do model set variables selection successfully. Please continue next step by using 'shifu train'.");
                } else {
                    log.info("Do variable selection with error, please check error message or report issue.");
                }
            } else if (cleanedArgs[0].equals(TRAIN_CMD)) {
                // train step
                status = trainModel(cmd.hasOption(TRAIN_CMD_DRY), cmd.hasOption(TRAIN_CMD_DEBUG),
                        cmd.hasOption(SHUFFLE));
                if (status == 0) {
                    log.info(
                            "Do model set training successfully. Please continue next step by using 'shifu posttrain' or if no need posttrain you can go through with 'shifu eval'.");
                } else {
                    log.info("Do model training with error, please check error message or report issue.");
                }
            } else if (cleanedArgs[0].equals(CMD_ENCODE)) {
                Map<String, Object> params = new HashMap<String, Object>();
                params.put(ModelDataEncodeProcessor.ENCODE_DATA_SET, cmd.getOptionValue(EVAL_CMD_RUN));
                params.put(ModelDataEncodeProcessor.ENCODE_REF_MODEL, cmd.getOptionValue(REF));
                status = runEncode(params);
            } else if (cleanedArgs[0].equals(CMD_COMBO)) {
                if (cmd.hasOption(MODELSET_CMD_NEW)) {
                    log.info("Create new commbo models");
                    status = createNewCombo(cmd.getOptionValue(MODELSET_CMD_NEW));
                } else if (cmd.hasOption(INIT_CMD)) {
                    log.info("Init commbo models");
                    status = initComboModels();
                } else if (cmd.hasOption(EVAL_CMD_RUN)) {
                    log.info("Run combo model - with toShuffle: {}, with toResume: {}", opts.hasOption(SHUFFLE),
                            opts.hasOption(RESUME));
                    status = runComboModels(cmd.hasOption(SHUFFLE), cmd.hasOption(RESUME));
                    // train combo models
                } else if (cmd.hasOption(EVAL_CMD)) {
                    log.info("Eval combo model.");
                    // eval combo model performance
                    status = evalComboModels(cmd.hasOption(RESUME));
                } else {
                    log.error("Invalid command usage.");
                    printUsage();
                }
            } else if (cleanedArgs[0].equals(POSTTRAIN_CMD)) {
                // post train step
                status = postTrainModel();
                if (status == 0) {
                    log.info(
                            "Do model set post-training successfully. Please configure your eval set in ModelConfig.json and continue next step by using 'shifu eval' or 'shifu eval -new <eval set>' to create a new eval set.");
                } else {
                    log.info("Do model post training with error, please check error message or report issue.");
                }
            } else if (cleanedArgs[0].equals(SAVE)) {
                String newModelSetName = cleanedArgs.length >= 2 ? cleanedArgs[1] : null;
                saveCurrentModel(newModelSetName);
            } else if (cleanedArgs[0].equals(SWITCH)) {
                String newModelSetName = cleanedArgs[1];
                switchCurrentModel(newModelSetName);
            } else if (cleanedArgs[0].equals(SHOW)) {
                ManageModelProcessor p = new ManageModelProcessor(ModelAction.SHOW, null);
                p.run();
            } else if (cleanedArgs[0].equals(EVAL_CMD)) {
                Map<String, Object> params = new HashMap<String, Object>();

                // eval step
                if (cleanedArgs.length == 1) {
                    // run everything
                    status = runEvalSet(cmd.hasOption(TRAIN_CMD_DRY));
                    if (status == 0) {
                        log.info("Run eval performance with all eval sets successfully.");
                    } else {
                        log.info("Do evaluation with error, please check error message or report issue.");
                    }
                } else if (cmd.getOptionValue(MODELSET_CMD_NEW) != null) {
                    // create new eval
                    createNewEvalSet(cmd.getOptionValue(MODELSET_CMD_NEW));
                    log.info(
                            "Create eval set successfully. You can configure EvalConfig.json or directly run 'shifu eval -run <evalSetName>' to get performance info.");
                } else if (cmd.hasOption(EVAL_CMD_RUN)) {
                    runEvalSet(cmd.getOptionValue(EVAL_CMD_RUN), cmd.hasOption(TRAIN_CMD_DRY));
                    log.info("Finish run eval performance with eval set {}.", cmd.getOptionValue(EVAL_CMD_RUN));
                } else if (cmd.hasOption(SCORE)) {
                    params.put(EvalModelProcessor.NOSORT, cmd.hasOption(NOSORT));
                    // run score
                    runEvalScore(cmd.getOptionValue(SCORE), params);
                    log.info("Finish run score with eval set {}.", cmd.getOptionValue(SCORE));
                } else if (cmd.hasOption(CONFMAT)) {
                    // run confusion matrix
                    runEvalConfMat(cmd.getOptionValue(CONFMAT));
                    log.info("Finish run confusion matrix with eval set {}.", cmd.getOptionValue(CONFMAT));
                } else if (cmd.hasOption(PERF)) {
                    // run perfermance
                    runEvalPerf(cmd.getOptionValue(PERF));
                    log.info("Finish run performance maxtrix with eval set {}.", cmd.getOptionValue(PERF));
                } else if (cmd.hasOption(LIST)) {
                    // list all evaluation sets
                    listEvalSet();
                } else if (cmd.hasOption(DELETE)) {
                    // delete some evaluation set
                    deleteEvalSet(cmd.getOptionValue(DELETE));
                } else if (cmd.hasOption(NORM)) {
                    runEvalNorm(cmd.getOptionValue(NORM), cmd.hasOption(STRICT));
                } else {
                    log.error("Invalid command, please check help message.");
                    printUsage();
                }
            } else if (cleanedArgs[0].equals(CMD_EXPORT)) {
                Map<String, Object> params = new HashMap<String, Object>();
                params.put(ExportModelProcessor.IS_CONCISE, cmd.hasOption(EXPORT_CONCISE));
                params.put(ExportModelProcessor.REQUEST_VARS, cmd.getOptionValue(VARS));
                params.put(ExportModelProcessor.EXPECTED_BIN_NUM, cmd.getOptionValue(N));
                params.put(ExportModelProcessor.IV_KEEP_RATIO, cmd.getOptionValue(IVR));
                params.put(ExportModelProcessor.MINIMUM_BIN_INST_CNT, cmd.getOptionValue(BIC));
                status = exportModel(cmd.getOptionValue(MODELSET_CMD_TYPE), params);
                if (status == 0) {
                    log.info("Export models/columnstats/corr successfully.");
                } else {
                    log.warn("Fail to export models/columnstats/corr, please check or report issue.");
                }
            } else if (cleanedArgs[0].equals(CMD_TEST)) {
                Map<String, Object> params = new HashMap<String, Object>();
                params.put(ShifuTestProcessor.IS_TO_TEST_FILTER, cmd.hasOption(FILTER));
                params.put(ShifuTestProcessor.TEST_TARGET, cmd.getOptionValue(FILTER));
                params.put(ShifuTestProcessor.TEST_RECORD_CNT, cmd.getOptionValue(N));
                status = runShifuTest(params);
                if (status == 0) {
                    log.info("Run test for Shifu Successfully.");
                } else {
                    log.warn("Fail to run Shifu test.");
                }
            } else if (cleanedArgs[0].equals(CMD_CONVERT)) {
                int optType = -1;
                if (cmd.hasOption(TO_ZIPB)) {
                    optType = 1;
                } else if (cmd.hasOption(TO_TREEB)) {
                    optType = 2;
                }

                String[] convertArgs = new String[2];
                int j = 0;
                for (int i = 1; i < cleanedArgs.length; i++) {
                    if (!cleanedArgs[i].startsWith("-")) {
                        convertArgs[j++] = cleanedArgs[i];
                    }
                }

                if (optType < 0 || StringUtils.isBlank(convertArgs[0]) || StringUtils.isBlank(convertArgs[1])) {
                    printUsage();
                } else {
                    status = runShifuConvert(optType, convertArgs[0], convertArgs[1]);
                }
            } else if (cleanedArgs[0].equals(CMD_ANALYSIS)) {
                if (cmd.hasOption(FI)) {
                    String modelPath = cmd.getOptionValue(FI);
                    analysisModelFi(modelPath);
                }
            } else {
                log.error("Invalid command, please check help message.");
                printUsage();
            }
        }
        // for some case jvm cannot stop
        System.exit(status);
    } catch (ShifuException e) {
        // need define error code in each step.
        log.error(e.getError().toString() + "; msg: " + e.getMessage(), e.getCause());
        exceptionExit(e);
    } catch (Exception e) {
        exceptionExit(e);
    }
}

From source file:com.moss.schematrax.SchemaUpdater.java

public static void main(String[] args) throws Exception {

    boolean configureLogging = new Boolean(System.getProperty("configureLogging", "true")).booleanValue();
    if (configureLogging) {
        BasicConfigurator.configure();/*from   w  w  w.j a va2  s .c  o m*/
        Logger.getRootLogger().setLevel(Level.INFO);
    }

    Map<String, String> argsMap = new HashMap<String, String>();
    for (int x = 0; x < args.length; x++) {
        String arg = args[x];
        String[] pair = arg.split("=");
        if (pair.length != 2) {
            System.out.println("Error parsing command line arguments in value pair:" + arg);
            System.out.println(usage);
            System.exit(1);
        }
        String name = pair[0];
        String value = pair[1];
        argsMap.put(name, value);
    }

    String mode = (String) argsMap.get("mode");
    String databaseTypeName = (String) argsMap.get("dbtype");
    String databaseName = (String) argsMap.get("catalog");
    String logon = (String) argsMap.get("logon");
    String password = (String) argsMap.get("password");
    String host = (String) argsMap.get("host");
    if (args.length < 5 || databaseTypeName == null || databaseName == null || logon == null || password == null
            || host == null || mode == null) {
        System.out.println(usage);
    }

    String schematraxFileLocation = (String) argsMap.get("schematraxFile");

    if (schematraxFileLocation == null)
        schematraxFileLocation = "";

    String schemaName = (String) argsMap.get("schema");
    if (schemaName == null)
        schemaName = logon;

    DatabaseType dbType = DatabaseType.getDatabaseType(databaseTypeName);

    String schemaVersion = (String) argsMap.get("version");

    JdbcConnectionConfig connectionConfig = new JdbcConnectionConfig();
    connectionConfig.setJdbcDriverClassName(dbType.getJdbcDriver().getClassName());
    connectionConfig
            .setJdbcUrl(dbType.getJdbcDriver().createJdbcUrl(host, null, databaseName, logon, password));
    connectionConfig.setLogon(logon);
    connectionConfig.setPassword(password);

    SchemaUpdater updater = new SchemaUpdater(new File(schematraxFileLocation).toURL());

    if (mode.equals("update")) {
        if (schemaVersion == null) {
            System.err.println("You must specify a schema version");
            System.err.println(usage);
        }
        updater.updateSchema(dbType, connectionConfig, schemaName, schemaVersion);
    } else if (mode.equals("create")) {
        updater.createSchema(dbType, connectionConfig, schemaName, schemaVersion);
    } else {
        System.err.println("Invalid mode:" + mode);
        System.err.println(usage);
    }
}