Example usage for java.util TreeMap TreeMap

List of usage examples for java.util TreeMap TreeMap

Introduction

In this page you can find the example usage for java.util TreeMap TreeMap.

Prototype

public TreeMap() 

Source Link

Document

Constructs a new, empty tree map, using the natural ordering of its keys.

Usage

From source file:net.ontopia.topicmaps.cmdlineutils.rdbms.RDBMSIndexTool.java

public static void main(String[] argv) throws Exception {

    // Initialize logging
    CmdlineUtils.initializeLogging();//w  w w .  j  a v  a 2  s  . c o  m

    // Register logging options
    CmdlineOptions options = new CmdlineOptions("RDBMSIndexTool", argv);
    CmdlineUtils.registerLoggingOptions(options);

    // Parse command line options
    try {
        options.parse();
    } catch (CmdlineOptions.OptionsException e) {
        System.err.println("Error: " + e.getMessage());
        System.exit(1);
    }

    // Get command line arguments
    String[] args = options.getArguments();

    if (args.length != 1) {
        usage();
        System.exit(3);
    }

    // load database schema project
    ClassLoader cloader = RDBMSIndexTool.class.getClassLoader();
    InputStream istream = cloader.getResourceAsStream("net/ontopia/topicmaps/impl/rdbms/config/schema.xml");
    Project dbp = DatabaseProjectReader.loadProject(istream);

    // open database connection
    String propfile = args[0];
    ConnectionFactoryIF cf = new DefaultConnectionFactory(PropertyUtils.loadProperties(new File(propfile)),
            true);

    Connection conn = cf.requestConnection();
    try {
        DatabaseMetaData dbm = conn.getMetaData();
        boolean downcase = dbm.storesLowerCaseIdentifiers();

        Map extra_indexes = new TreeMap();
        Map missing_indexes = new TreeMap();

        Iterator tables = dbp.getTables().iterator();
        while (tables.hasNext()) {
            Table table = (Table) tables.next();
            String table_name = (downcase ? table.getName().toLowerCase() : table.getName());
            //! System.out.println("T :"  + table_name);

            // get primary keys from database
            Map pkeys = getPrimaryKeys(table_name, dbm);

            // get indexes from database
            Map indexes = getIndexes(table_name, dbm);

            Map dindexes = new HashMap();
            if (table.getPrimaryKeys() != null) {
                String pkey = table_name + '(' + StringUtils.join(table.getPrimaryKeys(), ',') + ')';
                if (!pkeys.containsKey(pkey))
                    System.out.println("PKM: " + pkey);
            }

            Iterator iter = table.getIndexes().iterator();
            while (iter.hasNext()) {
                Index index = (Index) iter.next();
                String i = table_name + '(' + StringUtils.join(index.getColumns(), ',') + ')';
                String index_name = (downcase ? index.getName().toLowerCase() : index.getName());
                dindexes.put(i, index_name);
            }

            Set extra = new HashSet(indexes.keySet());
            extra.removeAll(dindexes.keySet());
            extra.removeAll(pkeys.keySet());
            if (!extra.isEmpty()) {
                Iterator i = extra.iterator();
                while (i.hasNext()) {
                    Object k = i.next();
                    extra_indexes.put(k, indexes.get(k));
                }
            }

            Set missing = new HashSet(dindexes.keySet());
            missing.addAll(pkeys.keySet());
            missing.removeAll(indexes.keySet());
            if (!missing.isEmpty()) {
                Iterator i = missing.iterator();
                while (i.hasNext()) {
                    Object k = i.next();
                    missing_indexes.put(k, dindexes.get(k));
                }
            }

        }
        if (!extra_indexes.isEmpty())
            System.out.println("/* --- Extra indexes ----------------------------------------- */");
        Iterator eiter = extra_indexes.keySet().iterator();
        while (eiter.hasNext()) {
            Object k = eiter.next();
            System.out.println("drop index " + extra_indexes.get(k) + "; /* " + k + " */");
        }

        if (!missing_indexes.isEmpty())
            System.out.println("/* --- Missing indexes---------------------------------------- */");
        Iterator miter = missing_indexes.keySet().iterator();
        while (miter.hasNext()) {
            Object k = miter.next();
            System.out.println("create index " + missing_indexes.get(k) + " on " + k + ";");
        }

    } finally {
        conn.rollback();
        conn.close();
    }

}

From source file:com.acapulcoapp.alloggiatiweb.FileReader.java

public static void main(String[] args) throws UnknownHostException, IOException {
    // TODO code application logic here

    SpringApplication app = new SpringApplication(AcapulcoappApp.class);
    SimpleCommandLinePropertySource source = new SimpleCommandLinePropertySource(args);
    addDefaultProfile(app, source);/*  w ww  .j a v  a 2s . c om*/

    ConfigurableApplicationContext context = app.run(args);

    initBeans(context);

    Map<LocalDate, List<List<String>>> map = new TreeMap<>();

    List<File> files = new ArrayList<>(FileUtils.listFiles(new File("/Users/chiccomask/Downloads/ALLOGGIATI"),
            new String[] { "txt" }, true));

    Collections.reverse(files);

    int count = 0;

    for (File file : files) {

        //            List<String> allLines = FileUtils.readLines(file, "windows-1252");
        List<String> allLines = FileUtils.readLines(file, "UTF-8");

        for (int i = 0; i < allLines.size();) {

            count++;

            List<String> record = new ArrayList<>();

            String line = allLines.get(i);
            String type = TIPO_ALLOGGIO.parse(line);

            switch (type) {
            case "16":
                record.add(line);
                i++;
                break;
            case "17": {
                record.add(line);
                boolean out = false;
                while (!out) {
                    i++;
                    if (i < allLines.size()) {
                        String subline = allLines.get(i);
                        String subtype = TIPO_ALLOGGIO.parse(subline);
                        if (!subtype.equals("19")) {
                            out = true;
                        } else {
                            record.add(subline);
                        }
                    } else {
                        out = true;
                    }
                }
                break;
            }
            case "18": {
                record.add(line);
                boolean out = false;
                while (!out) {
                    i++;
                    if (i < allLines.size()) {
                        String subline = allLines.get(i);
                        String subtype = TIPO_ALLOGGIO.parse(subline);
                        if (!subtype.equals("20")) {
                            out = true;
                        } else {
                            record.add(subline);
                        }
                    } else {
                        out = true;
                    }
                }
                break;
            }
            default:
                break;
            }

            LocalDate arrived = LocalDate.parse(DATA_ARRIVO.parse(line),
                    DateTimeFormatter.ofPattern(DATE_PATTERN));
            if (!map.containsKey(arrived)) {
                map.put(arrived, new ArrayList<>());
            }
            map.get(arrived).add(record);
        }
    }

    for (LocalDate date : map.keySet()) {

        System.out.println();
        System.out.println("process day " + date);

        for (List<String> record : map.get(date)) {

            System.out.println();
            System.out.println("process record ");
            for (String line : record) {
                System.out.println(line);
            }

            CheckinRecord checkinRecord = new CheckinRecord();

            //non lo setto per adesso
            String firstLine = record.get(0);

            String typeStr = TIPO_ALLOGGIO.parse(firstLine);
            CheckinType cht = checkinTypeRepository.find(typeStr);
            checkinRecord.setCheckinType(cht);

            int days = Integer.parseInt(PERMANENZA.parse(firstLine));
            checkinRecord.setDays(days);
            checkinRecord.setArrived(date);

            boolean isMain = true;

            List<Person> others = new ArrayList<>();

            for (String line : record) {
                Person p = extractPerson(line);

                if (p.getDistrictOfBirth() == null) {
                    System.out.println("district of birth not found " + p);
                }

                List<Person> duplicates = personRepository.findDuplicates(p.getSurname(), p.getName(),
                        p.getDateOfBirth());

                if (duplicates.isEmpty()) {
                    System.out.println("add new person " + p.getId() + " " + p);
                    personRepository.saveAndFlush(p);
                } else if (duplicates.size() == 1) {

                    Person found = duplicates.get(0);

                    if (p.getIdentityDocument() != null) {
                        //we sorted by date so we suppose 
                        //the file version is newer so we update the entity
                        p.setId(found.getId());
                        System.out.println("update person " + p.getId() + " " + p);
                        personRepository.saveAndFlush(p);

                    } else if (found.getIdentityDocument() != null) {
                        //on db there are more data so I use them.
                        p = found;
                        System.out.println("use already saved person " + p.getId() + " " + p);
                    } else {
                        p.setId(found.getId());
                        System.out.println("update person " + p.getId() + " " + p);
                        personRepository.saveAndFlush(p);
                    }

                } else {
                    throw new RuntimeException("More duplicated for " + p.getName());
                }

                if (isMain) {
                    checkinRecord.setMainPerson(p);
                    isMain = false;
                } else {
                    others.add(p);
                }
            }

            checkinRecord.setOtherPeople(new HashSet<>(others));

            if (checkinRecordRepository.alreadyExists(checkinRecord.getMainPerson(), date) != null) {
                System.out.println("already exists " + date + " p " + checkinRecord.getMainPerson());
            } else {
                System.out.println("save record ");
                checkinRecordRepository.saveAndFlush(checkinRecord);
            }
        }
    }

    //
    //            if (type.equals("16")) {
    //                List<String> record = new ArrayList<>();
    //                record.add(line);
    //                keepOpen = false;
    //            }
    //
    //            map.get(arrived).add(record);
    //        map.values().forEach((list) -> {
    //
    //            for (String line : list) {
    //
    //                Person p = null;
    //
    //                try {
    //
    //                    p = extractPerson(line);
    //
    //                    List<Person> duplicates = personRepository.findDuplicates(p.getSurname(), p.getName(), p.getDateOfBirth());
    //
    //                    if (duplicates.isEmpty()) {
    //                        personRepository.saveAndFlush(p);
    //
    //                    } else if (duplicates.size() > 1) {
    //                        System.out.println();
    //                        System.out.println("MULIPLE DUPLICATED");
    //
    //                        for (Person dd : duplicates) {
    //                            System.out.println(dd);
    //                        }
    //                        System.out.println("* " + p);
    //                        throw new RuntimeException();
    //                    } else {
    //
    ////                        if (!duplicates.get(0).getDistrictOfBirth().equals(p.getDistrictOfBirth())) {
    ////                        int index = 0;
    ////
    ////                        System.out.println();
    ////                        System.out.println("DUPLICATED");
    ////
    ////                        for (Person dd : duplicates) {
    ////                            System.out.println(dd);
    ////                            index++;
    ////                        }
    ////                        System.out.println("* " + p);
    ////                        System.out.println(file.getAbsolutePath() + " " + p);
    ////
    ////                        System.out.println();
    ////                        System.out.println();
    ////                        }
    ////                        duplicates.remove(0);
    ////                        personRepository.deleteInBatch(duplicates);
    ////                System.out.println();
    ////                System.out.println("Seleziona scelta");
    ////                Scanner s = new Scanner(System.in);
    ////                int selected;
    ////                try {
    ////                    selected = s.nextInt();
    ////                } catch (InputMismatchException e) {
    ////                    selected = 0;
    ////                }
    ////
    ////                if (duplicates.size() <= selected) {
    ////                    personRepository.deleteInBatch(duplicates);
    ////                    personRepository.saveAndFlush(p);
    ////                } else {
    ////                    duplicates.remove(selected);
    ////                    personRepository.deleteInBatch(duplicates);
    ////                }
    //                    }
    //
    //                } catch (Exception e) {
    //
    //                    System.out.println();
    ////                    System.out.println("ERROR READING lineCount=" + allLines.indexOf(line) + " line=" + line);
    ////                    System.out.println(file.getAbsolutePath());
    //                    System.out.println(p);
    //                    e.printStackTrace();
    //                    System.out.println();
    //                }
    //            }
    //        });
    context.registerShutdownHook();

    System.exit(0);
}

From source file:io.apiman.tools.i18n.TemplateScanner.java

public static void main(String[] args) throws IOException {
    if (args == null || args.length != 1) {
        System.out.println("Template directory not provided (no path provided).");
        System.exit(1);//from w  w w  .  j a  v a  2 s . c o m
    }
    File templateDir = new File(args[0]);
    if (!templateDir.isDirectory()) {
        System.out.println("Template directory not provided (provided path is not a directory).");
        System.exit(1);
    }

    if (!new File(templateDir, "dash.html").isFile()) {
        System.out.println("Template directory not provided (dash.html not found).");
        System.exit(1);
    }

    File outputDir = new File(templateDir, "../../../../../../tools/i18n/target");
    if (!outputDir.isDirectory()) {
        System.out.println("Output directory not found: " + outputDir);
        System.exit(1);
    }
    File outputFile = new File(outputDir, "scanner-messages.properties");
    if (outputFile.isFile() && !outputFile.delete()) {
        System.out.println("Couldn't delete the old messages.properties: " + outputFile);
        System.exit(1);
    }

    System.out.println("Starting scan.");
    System.out.println("Scanning template directory: " + templateDir.getAbsolutePath());

    String[] extensions = { "html", "include" };
    Collection<File> files = FileUtils.listFiles(templateDir, extensions, true);

    TreeMap<String, String> strings = new TreeMap<>();

    for (File file : files) {
        System.out.println("\tScanning file: " + file);
        scanFile(file, strings);
    }

    outputMessages(strings, outputFile);

    System.out.println("Scan complete.  Scanned " + files.size() + " files and discovered " + strings.size()
            + " translation strings.");
}

From source file:com.github.fritaly.svngraph.SvnGraph.java

public static void main(String[] args) throws Exception {
    if (args.length != 2) {
        System.out.println(String.format("%s <input-file> <output-file>", SvnGraph.class.getSimpleName()));
        System.exit(1);/*from   w  w  w.j a  v a2s .  c  o  m*/
    }

    final File input = new File(args[0]);

    if (!input.exists()) {
        throw new IllegalArgumentException(
                String.format("The given file '%s' doesn't exist", input.getAbsolutePath()));
    }

    final File output = new File(args[1]);

    final Document document = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(input);

    final History history = new History(document);

    final Set<String> rootPaths = history.getRootPaths();

    System.out.println(rootPaths);

    for (String path : rootPaths) {
        System.out.println(path);
        System.out.println(history.getHistory(path).getRevisions());
        System.out.println();
    }

    int count = 0;

    FileWriter fileWriter = null;
    GraphMLWriter graphWriter = null;

    try {
        fileWriter = new FileWriter(output);

        graphWriter = new GraphMLWriter(fileWriter);

        final NodeStyle tagStyle = graphWriter.getNodeStyle();
        tagStyle.setFillColor(Color.WHITE);

        graphWriter.graph();

        // map associating node labels to their corresponding node id in the graph
        final Map<String, String> nodeIdsPerLabel = new TreeMap<>();

        // the node style associated to each branch
        final Map<String, NodeStyle> nodeStyles = new TreeMap<>();

        for (Revision revision : history.getSignificantRevisions()) {
            System.out.println(revision.getNumber() + " - " + revision.getMessage());

            // TODO Render also the deletion of branches
            // there should be only 1 significant update per revision (the one with action ADD)
            for (Update update : revision.getSignificantUpdates()) {
                if (update.isCopy()) {
                    // a merge is also considered a copy
                    final RevisionPath source = update.getCopySource();

                    System.out.println(String.format("  > %s %s from %s@%d", update.getAction(),
                            update.getPath(), source.getPath(), source.getRevision()));

                    final String sourceRoot = Utils.getRootName(source.getPath());

                    if (sourceRoot == null) {
                        // skip the revisions whose associated root is
                        // null (happens whether a branch was created
                        // outside the 'branches' directory for
                        // instance)
                        System.err.println(String.format("Skipped revision %d because of a null root",
                                source.getRevision()));
                        continue;
                    }

                    final String sourceLabel = computeNodeLabel(sourceRoot, source.getRevision());

                    // create a node for the source (path, revision)
                    final String sourceId;

                    if (nodeIdsPerLabel.containsKey(sourceLabel)) {
                        // retrieve the id of the existing node
                        sourceId = nodeIdsPerLabel.get(sourceLabel);
                    } else {
                        // create the new node
                        if (Utils.isTagPath(source.getPath())) {
                            graphWriter.setNodeStyle(tagStyle);
                        } else {
                            if (!nodeStyles.containsKey(sourceRoot)) {
                                final NodeStyle style = new NodeStyle();
                                style.setFillColor(randomColor());

                                nodeStyles.put(sourceRoot, style);
                            }

                            graphWriter.setNodeStyle(nodeStyles.get(sourceRoot));
                        }

                        sourceId = graphWriter.node(sourceLabel);

                        nodeIdsPerLabel.put(sourceLabel, sourceId);
                    }

                    // and another for the newly created directory
                    final String targetRoot = Utils.getRootName(update.getPath());

                    if (targetRoot == null) {
                        System.err.println(String.format("Skipped revision %d because of a null root",
                                revision.getNumber()));
                        continue;
                    }

                    final String targetLabel = computeNodeLabel(targetRoot, revision.getNumber());

                    if (Utils.isTagPath(update.getPath())) {
                        graphWriter.setNodeStyle(tagStyle);
                    } else {
                        if (!nodeStyles.containsKey(targetRoot)) {
                            final NodeStyle style = new NodeStyle();
                            style.setFillColor(randomColor());

                            nodeStyles.put(targetRoot, style);
                        }

                        graphWriter.setNodeStyle(nodeStyles.get(targetRoot));
                    }

                    final String targetId;

                    if (nodeIdsPerLabel.containsKey(targetLabel)) {
                        // retrieve the id of the existing node
                        targetId = nodeIdsPerLabel.get(targetLabel);
                    } else {
                        // create the new node
                        if (Utils.isTagPath(update.getPath())) {
                            graphWriter.setNodeStyle(tagStyle);
                        } else {
                            if (!nodeStyles.containsKey(targetRoot)) {
                                final NodeStyle style = new NodeStyle();
                                style.setFillColor(randomColor());

                                nodeStyles.put(targetRoot, style);
                            }

                            graphWriter.setNodeStyle(nodeStyles.get(targetRoot));
                        }

                        targetId = graphWriter.node(targetLabel);

                        nodeIdsPerLabel.put(targetLabel, targetId);
                    }

                    // create an edge between the 2 nodes
                    graphWriter.edge(sourceId, targetId);
                } else {
                    System.out.println(String.format("  > %s %s", update.getAction(), update.getPath()));
                }
            }

            System.out.println();

            count++;
        }

        // Dispatch the revisions per corresponding branch
        final Map<String, Set<Long>> revisionsPerBranch = new TreeMap<>();

        for (String nodeLabel : nodeIdsPerLabel.keySet()) {
            if (nodeLabel.contains("@")) {
                final String branchName = StringUtils.substringBefore(nodeLabel, "@");
                final long revision = Long.parseLong(StringUtils.substringAfter(nodeLabel, "@"));

                if (!revisionsPerBranch.containsKey(branchName)) {
                    revisionsPerBranch.put(branchName, new TreeSet<Long>());
                }

                revisionsPerBranch.get(branchName).add(revision);
            } else {
                throw new IllegalStateException(nodeLabel);
            }
        }

        // Recreate the missing edges between revisions from a same branch
        for (String branchName : revisionsPerBranch.keySet()) {
            final List<Long> branchRevisions = new ArrayList<>(revisionsPerBranch.get(branchName));

            for (int i = 0; i < branchRevisions.size() - 1; i++) {
                final String nodeLabel1 = String.format("%s@%d", branchName, branchRevisions.get(i));
                final String nodeLabel2 = String.format("%s@%d", branchName, branchRevisions.get(i + 1));

                graphWriter.edge(nodeIdsPerLabel.get(nodeLabel1), nodeIdsPerLabel.get(nodeLabel2));
            }
        }

        graphWriter.closeGraph();

        System.out.println(String.format("Found %d significant revisions", count));
    } finally {
        if (graphWriter != null) {
            graphWriter.close();
        }
        if (fileWriter != null) {
            fileWriter.close();
        }
    }

    System.out.println("Done");
}

From source file:de.tudarmstadt.ukp.experiments.dip.wp1.documents.Step8GoldDataAggregator.java

public static void main(String[] args) throws Exception {
    String inputDir = args[0] + "/";
    // output dir
    File outputDir = new File(args[1]);
    File turkersConfidence = new File(args[2]);
    if (outputDir.exists()) {
        outputDir.delete();//  w w w  .  j  a v  a2  s  .com
    }
    outputDir.mkdir();

    List<String> annotatorsIDs = new ArrayList<>();
    //        for (File f : FileUtils.listFiles(new File(inputDir), new String[] { "xml" }, false)) {
    //            QueryResultContainer queryResultContainer = QueryResultContainer
    //                    .fromXML(FileUtils.readFileToString(f, "utf-8"));
    //            for (QueryResultContainer.SingleRankedResult rankedResults : queryResultContainer.rankedResults) {
    //                for (QueryResultContainer.MTurkRelevanceVote relevanceVote : rankedResults.mTurkRelevanceVotes) {
    //                    if (!annotatorsIDs.contains(relevanceVote.turkID))
    //                        annotatorsIDs.add(relevanceVote.turkID);
    //                }
    //            }
    //        }
    HashMap<String, Integer> countVotesForATurker = new HashMap<>();
    // creates temporary file with format for mace
    // Hashmap annotations: key is the id of a document and a sentence
    // Value is an array votes[] of turkers decisions: true or false (relevant or not)
    // the length of this array equals the number of annotators in List<String> annotatorsIDs.
    // If an annotator worked on the task his decision is written in the array otherwise the value is NULL

    // key: queryID + clueWebID + sentenceID
    // value: true and false annotations
    TreeMap<String, Annotations> annotations = new TreeMap<>();

    for (File f : FileUtils.listFiles(new File(inputDir), new String[] { "xml" }, false)) {
        QueryResultContainer queryResultContainer = QueryResultContainer
                .fromXML(FileUtils.readFileToString(f, "utf-8"));
        System.out.println("Reading " + f.getName());
        for (QueryResultContainer.SingleRankedResult rankedResults : queryResultContainer.rankedResults) {
            String documentID = rankedResults.clueWebID;
            for (QueryResultContainer.MTurkRelevanceVote relevanceVote : rankedResults.mTurkRelevanceVotes) {
                Integer turkerID;
                if (!annotatorsIDs.contains(relevanceVote.turkID)) {
                    annotatorsIDs.add(relevanceVote.turkID);
                    turkerID = annotatorsIDs.size() - 1;
                } else {
                    turkerID = annotatorsIDs.indexOf(relevanceVote.turkID);
                }
                Integer count = countVotesForATurker.get(relevanceVote.turkID);
                if (count == null) {
                    count = 0;
                }
                count++;
                countVotesForATurker.put(relevanceVote.turkID, count);

                String id;
                List<Integer> trueVotes;
                List<Integer> falseVotes;
                for (QueryResultContainer.SingleSentenceRelevanceVote singleSentenceRelevanceVote : relevanceVote.singleSentenceRelevanceVotes)
                    if (!"".equals(singleSentenceRelevanceVote.sentenceID)) {

                        id = f.getName() + "_" + documentID + "_" + singleSentenceRelevanceVote.sentenceID;
                        Annotations turkerVotes = annotations.get(id);
                        if (turkerVotes == null) {
                            trueVotes = new ArrayList<>();
                            falseVotes = new ArrayList<>();
                            turkerVotes = new Annotations(trueVotes, falseVotes);
                        }
                        trueVotes = turkerVotes.trueAnnotations;
                        falseVotes = turkerVotes.falseAnnotations;
                        if ("true".equals(singleSentenceRelevanceVote.relevant)) {
                            // votes[turkerID] = true;
                            trueVotes.add(turkerID);
                        } else if ("false".equals(singleSentenceRelevanceVote.relevant)) {
                            //   votes[turkerID] = false;
                            falseVotes.add(turkerID);
                        } else {
                            throw new IllegalStateException("Annotation value of sentence "
                                    + singleSentenceRelevanceVote.sentenceID + " in " + rankedResults.clueWebID
                                    + " equals " + singleSentenceRelevanceVote.relevant);
                        }
                        try {
                            int allVotesCount = trueVotes.size() + falseVotes.size();
                            if (allVotesCount > 5) {
                                System.err.println(id + " doesn't have 5 annotators: true: " + trueVotes.size()
                                        + " false: " + falseVotes.size());

                                // nasty hack, we're gonna strip some data; true votes first
                                /* we can't do that, it breaks something down the line
                                int toRemove = allVotesCount - 5;
                                if (trueVotes.size() >= toRemove) {
                                trueVotes = trueVotes
                                        .subList(0, trueVotes.size() - toRemove);
                                }
                                else if (
                                    falseVotes.size() >= toRemove) {
                                falseVotes = falseVotes
                                        .subList(0, trueVotes.size() - toRemove);
                                }
                                */
                                System.err.println("Adjusted: " + id + " doesn't have 5 annotators: true: "
                                        + trueVotes.size() + " false: " + falseVotes.size());
                            }
                        } catch (IllegalStateException e) {
                            e.printStackTrace();
                        }
                        turkerVotes.trueAnnotations = trueVotes;
                        turkerVotes.falseAnnotations = falseVotes;
                        annotations.put(id, turkerVotes);
                    } else {
                        throw new IllegalStateException(
                                "Empty Sentence ID in " + f.getName() + " for turker " + turkerID);
                    }

            }
        }

    }
    File tmp = printHashMap(annotations, annotatorsIDs.size());

    String file = TEMP_DIR + "/" + tmp.getName();
    MACE.main(new String[] { "--prefix", file });

    //gets the keys of the documents and sentences
    ArrayList<String> lines = (ArrayList<String>) FileUtils.readLines(new File(file + ".prediction"));
    int i = 0;
    TreeMap<String, TreeMap<String, ArrayList<HashMap<String, String>>>> ids = new TreeMap<>();
    ArrayList<HashMap<String, String>> sentences;
    if (lines.size() != annotations.size()) {
        throw new IllegalStateException(
                "The size of prediction file is " + lines.size() + "but expected " + annotations.size());
    }
    for (Map.Entry entry : annotations.entrySet()) { //1001.xml_clueweb12-1905wb-13-07360_8783
        String key = (String) entry.getKey();
        String[] IDs = key.split("_");
        if (IDs.length > 2) {
            String queryID = IDs[0];
            String clueWebID = IDs[1];
            String sentenceID = IDs[2];
            TreeMap<String, ArrayList<HashMap<String, String>>> clueWebIDs = ids.get(queryID);
            if (clueWebIDs == null) {
                clueWebIDs = new TreeMap<>();
            }
            sentences = clueWebIDs.get(clueWebID);
            if (sentences == null) {
                sentences = new ArrayList<>();
            }
            HashMap<String, String> sentence = new HashMap<>();
            sentence.put(sentenceID, lines.get(i));
            sentences.add(sentence);
            clueWebIDs.put(clueWebID, sentences);
            ids.put(queryID, clueWebIDs);
        } else {
            throw new IllegalStateException("Wrong ID " + key);
        }

        i++;
    }

    for (Map.Entry entry : ids.entrySet()) {
        TreeMap<Integer, String> value = (TreeMap<Integer, String>) entry.getValue();
        String queryID = (String) entry.getKey();
        QueryResultContainer queryResultContainer = QueryResultContainer
                .fromXML(FileUtils.readFileToString(new File(inputDir, queryID), "utf-8"));
        for (QueryResultContainer.SingleRankedResult rankedResults : queryResultContainer.rankedResults) {
            for (Map.Entry val : value.entrySet()) {
                String clueWebID = (String) val.getKey();
                if (clueWebID.equals(rankedResults.clueWebID)) {
                    List<QueryResultContainer.SingleSentenceRelevanceVote> goldEstimatedLabels = new ArrayList<>();
                    List<QueryResultContainer.SingleSentenceRelevanceVote> turkersVotes = new ArrayList<>();
                    int size = 0;
                    int hitSize = 0;
                    String hitID = "";
                    for (QueryResultContainer.MTurkRelevanceVote vote : rankedResults.mTurkRelevanceVotes) {
                        if (!hitID.equals(vote.hitID)) {
                            hitID = vote.hitID;
                            hitSize = vote.singleSentenceRelevanceVotes.size();
                            size = size + hitSize;
                            turkersVotes.addAll(vote.singleSentenceRelevanceVotes);
                        } else {
                            if (vote.singleSentenceRelevanceVotes.size() != hitSize) {
                                hitSize = vote.singleSentenceRelevanceVotes.size();
                                size = size + hitSize;
                                turkersVotes.addAll(vote.singleSentenceRelevanceVotes);
                            }
                        }
                    }
                    ArrayList<HashMap<String, String>> sentenceList = (ArrayList<HashMap<String, String>>) val
                            .getValue();
                    if (sentenceList.size() != turkersVotes.size()) {
                        try {
                            throw new IllegalStateException("Expected size of annotations is "
                                    + turkersVotes.size() + "but found " + sentenceList.size()
                                    + " for document " + rankedResults.clueWebID + " in " + queryID);
                        } catch (IllegalStateException ex) {
                            ex.printStackTrace();
                        }
                    }
                    for (QueryResultContainer.SingleSentenceRelevanceVote s : turkersVotes) {
                        String valSentence = null;
                        for (HashMap<String, String> anno : sentenceList) {
                            if (anno.keySet().contains(s.sentenceID)) {
                                valSentence = anno.get(s.sentenceID);
                            }
                        }
                        QueryResultContainer.SingleSentenceRelevanceVote singleSentenceVote = new QueryResultContainer.SingleSentenceRelevanceVote();
                        singleSentenceVote.sentenceID = s.sentenceID;
                        if (("false").equals(valSentence)) {
                            singleSentenceVote.relevant = "false";
                        } else if (("true").equals(valSentence)) {
                            singleSentenceVote.relevant = "true";
                        } else {
                            throw new IllegalStateException("Annotation value of sentence "
                                    + singleSentenceVote.sentenceID + " equals " + val.getValue());
                        }
                        goldEstimatedLabels.add(singleSentenceVote);
                    }
                    rankedResults.goldEstimatedLabels = goldEstimatedLabels;
                }
            }
        }
        File outputFile = new File(outputDir, queryID);
        FileUtils.writeStringToFile(outputFile, queryResultContainer.toXML(), "utf-8");
        System.out.println("Finished " + outputFile);
    }

    ArrayList<String> annotators = (ArrayList<String>) FileUtils.readLines(new File(file + ".competence"));
    FileWriter fileWriter;
    StringBuilder sb = new StringBuilder();
    for (int j = 0; j < annotatorsIDs.size(); j++) {
        String[] s = annotators.get(0).split("\t");
        Float score = Float.parseFloat(s[j]);
        String turkerID = annotatorsIDs.get(j);
        System.out.println(turkerID + " " + score + " " + countVotesForATurker.get(turkerID));
        sb.append(turkerID).append(" ").append(score).append(" ").append(countVotesForATurker.get(turkerID))
                .append("\n");
    }
    fileWriter = new FileWriter(turkersConfidence);
    fileWriter.append(sb.toString());
    fileWriter.close();

}

From source file:com.cloud.test.utils.SubmitCert.java

public static void main(String[] args) {
    // Parameters
    List<String> argsList = Arrays.asList(args);
    Iterator<String> iter = argsList.iterator();
    while (iter.hasNext()) {
        String arg = iter.next();

        if (arg.equals("-c")) {
            certFileName = iter.next();/*from  w  w  w.  ja  v a  2s  . co m*/
        }

        if (arg.equals("-s")) {
            secretKey = iter.next();
        }

        if (arg.equals("-a")) {
            apiKey = iter.next();
        }

        if (arg.equals("-action")) {
            url = "Action=" + iter.next();
        }
    }

    Properties prop = new Properties();
    try {
        prop.load(new FileInputStream("conf/tool.properties"));
    } catch (IOException ex) {
        s_logger.error("Error reading from conf/tool.properties", ex);
        System.exit(2);
    }

    host = prop.getProperty("host");
    port = prop.getProperty("port");

    if (url.equals("Action=SetCertificate") && certFileName == null) {
        s_logger.error("Please set path to certificate (including file name) with -c option");
        System.exit(1);
    }

    if (secretKey == null) {
        s_logger.error("Please set secretkey  with -s option");
        System.exit(1);
    }

    if (apiKey == null) {
        s_logger.error("Please set apikey with -a option");
        System.exit(1);
    }

    if (host == null) {
        s_logger.error("Please set host in tool.properties file");
        System.exit(1);
    }

    if (port == null) {
        s_logger.error("Please set port in tool.properties file");
        System.exit(1);
    }

    TreeMap<String, String> param = new TreeMap<String, String>();

    String req = "GET\n" + host + ":" + prop.getProperty("port") + "\n/" + prop.getProperty("accesspoint")
            + "\n";
    String temp = "";

    if (certFileName != null) {
        cert = readCert(certFileName);
        param.put("cert", cert);
    }

    param.put("AWSAccessKeyId", apiKey);
    param.put("Expires", prop.getProperty("expires"));
    param.put("SignatureMethod", prop.getProperty("signaturemethod"));
    param.put("SignatureVersion", "2");
    param.put("Version", prop.getProperty("version"));

    StringTokenizer str1 = new StringTokenizer(url, "&");
    while (str1.hasMoreTokens()) {
        String newEl = str1.nextToken();
        StringTokenizer str2 = new StringTokenizer(newEl, "=");
        String name = str2.nextToken();
        String value = str2.nextToken();
        param.put(name, value);
    }

    //sort url hash map by key
    Set c = param.entrySet();
    Iterator it = c.iterator();
    while (it.hasNext()) {
        Map.Entry me = (Map.Entry) it.next();
        String key = (String) me.getKey();
        String value = (String) me.getValue();
        try {
            temp = temp + key + "=" + URLEncoder.encode(value, "UTF-8") + "&";
        } catch (Exception ex) {
            s_logger.error("Unable to set parameter " + value + " for the command " + param.get("command"), ex);
        }

    }
    temp = temp.substring(0, temp.length() - 1);
    String requestToSign = req + temp;
    String signature = UtilsForTest.signRequest(requestToSign, secretKey);
    String encodedSignature = "";
    try {
        encodedSignature = URLEncoder.encode(signature, "UTF-8");
    } catch (Exception ex) {
        ex.printStackTrace();
    }

    String url = "http://" + host + ":" + prop.getProperty("port") + "/" + prop.getProperty("accesspoint") + "?"
            + temp + "&Signature=" + encodedSignature;
    s_logger.info("Sending request with url:  " + url + "\n");
    sendRequest(url);
}

From source file:cht.Parser.java

public static void main(String[] args) throws IOException {

    // TODO get from google drive
    boolean isUnicode = false;
    boolean isRemoveInputFileOnComplete = false;
    int rowNum;/*www . j a v  a  2  s .c o m*/
    int colNum;
    Gson gson = new GsonBuilder().setPrettyPrinting().create();

    Properties prop = new Properties();

    try {
        prop.load(new FileInputStream("config.txt"));
    } catch (IOException ex) {
        ex.printStackTrace();
    }

    String inputFilePath = prop.getProperty("inputFile");
    String outputDirectory = prop.getProperty("outputDirectory");
    System.out.println(outputDirectory);
    // optional
    String unicode = prop.getProperty("unicode");
    String removeInputFileOnComplete = prop.getProperty("removeInputFileOnComplete");

    inputFilePath = inputFilePath.trim();
    outputDirectory = outputDirectory.trim();

    if (unicode != null) {
        isUnicode = Boolean.parseBoolean(unicode.trim());
    }
    if (removeInputFileOnComplete != null) {
        isRemoveInputFileOnComplete = Boolean.parseBoolean(removeInputFileOnComplete.trim());
    }

    Writer out = null;
    FileInputStream in = null;
    final String newLine = System.getProperty("line.separator").toString();
    final String separator = File.separator;
    try {
        in = new FileInputStream(inputFilePath);

        Workbook workbook = new XSSFWorkbook(in);

        Sheet sheet = workbook.getSheetAt(0);

        rowNum = sheet.getLastRowNum() + 1;
        colNum = sheet.getRow(0).getPhysicalNumberOfCells();

        for (int j = 1; j < colNum; ++j) {
            String outputFilename = sheet.getRow(0).getCell(j).getStringCellValue();
            // guess directory
            int slash = outputFilename.indexOf('/');
            if (slash != -1) { // has directory
                outputFilename = outputFilename.substring(0, slash) + separator
                        + outputFilename.substring(slash + 1);
            }

            String outputPath = FilenameUtils.concat(outputDirectory, outputFilename);
            System.out.println("--Writing " + outputPath);
            out = new OutputStreamWriter(new FileOutputStream(outputPath), "UTF-8");
            TreeMap<String, Object> map = new TreeMap<String, Object>();
            for (int i = 1; i < rowNum; i++) {
                try {
                    String key = sheet.getRow(i).getCell(0).getStringCellValue();
                    //String value = "";
                    Cell tmp = sheet.getRow(i).getCell(j);
                    if (tmp != null) {
                        // not empty string!
                        value = sheet.getRow(i).getCell(j).getStringCellValue();
                    }
                    if (!key.equals("") && !key.startsWith("#") && !key.startsWith(".")) {
                        value = isUnicode ? StringEscapeUtils.escapeJava(value) : value;

                        int firstdot = key.indexOf(".");
                        String keyName, keyAttribute;
                        if (firstdot > 0) {// a.b.c.d 
                            keyName = key.substring(0, firstdot); // a
                            keyAttribute = key.substring(firstdot + 1); // b.c.d
                            TreeMap oldhash = null;
                            Object old = null;
                            if (map.get(keyName) != null) {
                                old = map.get(keyName);
                                if (old instanceof TreeMap == false) {
                                    System.out.println("different type of key:" + key);
                                    continue;
                                }
                                oldhash = (TreeMap) old;
                            } else {
                                oldhash = new TreeMap();
                            }

                            int firstdot2 = keyAttribute.indexOf(".");
                            String rootName, childName;
                            if (firstdot2 > 0) {// c, d.f --> d, f
                                rootName = keyAttribute.substring(0, firstdot2);
                                childName = keyAttribute.substring(firstdot2 + 1);
                            } else {// c, d  -> d, null
                                rootName = keyAttribute;
                                childName = null;
                            }

                            TreeMap<String, Object> object = myPut(oldhash, rootName, childName);
                            map.put(keyName, object);

                        } else {// c, d  -> d, null
                            keyName = key;
                            keyAttribute = null;
                            // simple string mode
                            map.put(key, value);
                        }

                    }

                } catch (Exception e) {
                    // just ingore empty rows
                }

            }
            String json = gson.toJson(map);
            // output json
            out.write(json + newLine);
            out.close();
        }
        in.close();

        System.out.println("\n---Complete!---");
        System.out.println("Read input file from " + inputFilePath);
        System.out.println(colNum - 1 + " output files ate generated at " + outputDirectory);
        System.out.println(rowNum + " records are generated for each output file.");
        System.out.println("output file is ecoded as unicode? " + (isUnicode ? "yes" : "no"));
        if (isRemoveInputFileOnComplete) {
            File input = new File(inputFilePath);
            input.deleteOnExit();
            System.out.println("Deleted " + inputFilePath);
        }

    } catch (FileNotFoundException e) {
        e.printStackTrace();
    } catch (IOException e) {
        e.printStackTrace();
    } finally {
        if (in != null) {
            in.close();
        }
    }

}

From source file:de.tudarmstadt.ukp.experiments.argumentation.convincingness.sampling.Step5GoldLabelEstimator.java

@SuppressWarnings("unchecked")
public static void main(String[] args) throws Exception {
    String inputDir = args[0];//from  w  w w.  ja  v  a  2  s . co  m
    File outputDir = new File(args[1]);

    if (!outputDir.exists()) {
        outputDir.mkdirs();
    }

    // we will process only a subset first
    List<AnnotatedArgumentPair> allArgumentPairs = new ArrayList<>();

    Collection<File> files = IOHelper.listXmlFiles(new File(inputDir));

    for (File file : files) {
        allArgumentPairs.addAll((List<AnnotatedArgumentPair>) XStreamTools.getXStream().fromXML(file));
    }

    // collect turkers and csv
    List<String> turkerIDs = extractAndSortTurkerIDs(allArgumentPairs);
    String preparedCSV = prepareCSV(allArgumentPairs, turkerIDs);

    // save CSV and run MACE
    Path tmpDir = Files.createTempDirectory("mace");
    File maceInputFile = new File(tmpDir.toFile(), "input.csv");
    FileUtils.writeStringToFile(maceInputFile, preparedCSV, "utf-8");

    File outputPredictions = new File(tmpDir.toFile(), "predictions.txt");
    File outputCompetence = new File(tmpDir.toFile(), "competence.txt");

    // run MACE
    MACE.main(new String[] { "--iterations", "500", "--threshold", String.valueOf(MACE_THRESHOLD), "--restarts",
            "50", "--outputPredictions", outputPredictions.getAbsolutePath(), "--outputCompetence",
            outputCompetence.getAbsolutePath(), maceInputFile.getAbsolutePath() });

    // read back the predictions and competence
    List<String> predictions = FileUtils.readLines(outputPredictions, "utf-8");

    // check the output
    if (predictions.size() != allArgumentPairs.size()) {
        throw new IllegalStateException("Wrong size of the predicted file; expected " + allArgumentPairs.size()
                + " lines but was " + predictions.size());
    }

    String competenceRaw = FileUtils.readFileToString(outputCompetence, "utf-8");
    String[] competence = competenceRaw.split("\t");
    if (competence.length != turkerIDs.size()) {
        throw new IllegalStateException(
                "Expected " + turkerIDs.size() + " competence number, got " + competence.length);
    }

    // rank turkers by competence
    Map<String, Double> turkerIDCompetenceMap = new TreeMap<>();
    for (int i = 0; i < turkerIDs.size(); i++) {
        turkerIDCompetenceMap.put(turkerIDs.get(i), Double.valueOf(competence[i]));
    }

    // sort by value descending
    Map<String, Double> sortedCompetences = IOHelper.sortByValue(turkerIDCompetenceMap, false);
    System.out.println("Sorted turker competences: " + sortedCompetences);

    // assign the gold label and competence

    for (int i = 0; i < allArgumentPairs.size(); i++) {
        AnnotatedArgumentPair annotatedArgumentPair = allArgumentPairs.get(i);
        String goldLabel = predictions.get(i).trim();

        // might be empty
        if (!goldLabel.isEmpty()) {
            // so far the gold label has format aXXX_aYYY_a1, aXXX_aYYY_a2, or aXXX_aYYY_equal
            // strip now only the gold label
            annotatedArgumentPair.setGoldLabel(goldLabel);
        }

        // update turker competence
        for (AnnotatedArgumentPair.MTurkAssignment assignment : annotatedArgumentPair.mTurkAssignments) {
            String turkID = assignment.getTurkID();

            int turkRank = getTurkerRank(turkID, sortedCompetences);
            assignment.setTurkRank(turkRank);

            double turkCompetence = turkerIDCompetenceMap.get(turkID);
            assignment.setTurkCompetence(turkCompetence);
        }
    }

    // now sort the data back according to their original file name
    Map<String, List<AnnotatedArgumentPair>> fileNameAnnotatedPairsMap = new HashMap<>();
    for (AnnotatedArgumentPair argumentPair : allArgumentPairs) {
        String fileName = IOHelper.createFileName(argumentPair.getDebateMetaData(),
                argumentPair.getArg1().getStance());

        if (!fileNameAnnotatedPairsMap.containsKey(fileName)) {
            fileNameAnnotatedPairsMap.put(fileName, new ArrayList<AnnotatedArgumentPair>());
        }

        fileNameAnnotatedPairsMap.get(fileName).add(argumentPair);
    }

    // and save them to the output file
    for (Map.Entry<String, List<AnnotatedArgumentPair>> entry : fileNameAnnotatedPairsMap.entrySet()) {
        String fileName = entry.getKey();
        List<AnnotatedArgumentPair> argumentPairs = entry.getValue();

        File outputFile = new File(outputDir, fileName);

        // and save all sampled pairs into a XML file
        XStreamTools.toXML(argumentPairs, outputFile);

        System.out.println("Saved " + argumentPairs.size() + " pairs to " + outputFile);
    }

}

From source file:tpt.dbweb.cat.evaluation.ComparisonResult.java

public static void main(String[] args) throws JsonGenerationException, JsonMappingException, IOException {
    ComparisonResult result = new ComparisonResult();
    result.docidToMetricToResult.computeIfAbsent("doc", k -> new TreeMap<>()).put("part",
            new ValueEvaluationStatistics(Fraction.ONE, Fraction.ONE));

    System.out.println(mapper.writeValueAsString(new Fraction(1.0, 2.0)));
    String ves = mapper.writeValueAsString(new ValueEvaluationStatistics(Fraction.ONE, new Fraction(1.0, 2.0)));
    System.out.println(ves);//w  w w . ja  v a2 s  .c om

    System.out.println(mapper.readValue(ves, ValueEvaluationStatistics.class));

    result.write(Paths.get("/tmp/test.json"));
    read(Paths.get("/tmp/test.json"));
}

From source file:com.github.fritaly.graphml4j.samples.GradleDependencies.java

public static void main(String[] args) throws Exception {
    if (args.length != 1) {
        System.out.println(String.format("%s <output-file>", GradleDependencies.class.getSimpleName()));
        System.exit(1);/*from   ww  w .ja  v a2 s .  c  o m*/
    }

    final File file = new File(args[0]);

    System.out.println("Writing GraphML file to " + file.getAbsolutePath() + " ...");

    FileWriter fileWriter = null;
    GraphMLWriter graphWriter = null;
    Reader reader = null;
    LineNumberReader lineReader = null;

    try {
        fileWriter = new FileWriter(file);
        graphWriter = new GraphMLWriter(fileWriter);

        // Customize the rendering of nodes
        final NodeStyle nodeStyle = graphWriter.getNodeStyle();
        nodeStyle.setWidth(250.0f);

        graphWriter.setNodeStyle(nodeStyle);

        // The dependency graph has been generated by Gradle with the
        // command "gradle dependencies". The output of this command has
        // been saved to a text file which will be parsed to rebuild the
        // dependency graph
        reader = new InputStreamReader(GradleDependencies.class.getResourceAsStream("gradle-dependencies.txt"));
        lineReader = new LineNumberReader(reader);

        String line = null;

        // Stack containing the node identifiers per depth inside the
        // dependency graph (the topmost dependency is the first one in the
        // stack)
        final Stack<String> parentIds = new Stack<String>();

        // Open the graph
        graphWriter.graph();

        // Map storing the node identifiers per label
        final Map<String, String> nodeIdsByLabel = new TreeMap<String, String>();

        while ((line = lineReader.readLine()) != null) {
            // Determine the depth of the current dependency inside the
            // graph. The depth can be inferred from the indentation used by
            // Gradle. Each level of depth adds 5 more characters of
            // indentation
            final int initialLength = line.length();

            // Remove the strings used by Gradle to indent dependencies
            line = StringUtils.replace(line, "+--- ", "");
            line = StringUtils.replace(line, "|    ", "");
            line = StringUtils.replace(line, "\\--- ", "");
            line = StringUtils.replace(line, "     ", "");

            // The depth can easily be inferred now
            final int depth = (initialLength - line.length()) / 5;

            // Remove unnecessary node ids
            while (depth <= parentIds.size()) {
                parentIds.pop();
            }

            // Compute a nice label from the dependency (group, artifact,
            // version) tuple
            final String label = computeLabel(line);

            // Has this dependency already been added to the graph ?
            if (!nodeIdsByLabel.containsKey(label)) {
                // No, add the node
                nodeIdsByLabel.put(label, graphWriter.node(label));
            }

            final String nodeId = nodeIdsByLabel.get(label);

            parentIds.push(nodeId);

            if (parentIds.size() > 1) {
                // Generate an edge between the current node and its parent
                graphWriter.edge(parentIds.get(parentIds.size() - 2), nodeId);
            }
        }

        // Close the graph
        graphWriter.closeGraph();

        System.out.println("Done");
    } finally {
        // Calling GraphMLWriter.close() is necessary to dispose the underlying resources
        graphWriter.close();
        fileWriter.close();
        lineReader.close();
        reader.close();
    }
}