Example usage for java.util Map get

List of usage examples for java.util Map get

Introduction

In this page you can find the example usage for java.util Map get.

Prototype

V get(Object key);

Source Link

Document

Returns the value to which the specified key is mapped, or null if this map contains no mapping for the key.

Usage

From source file:com.thed.zapi.cloud.sample.CycleExecutionReportByVersion.java

public static void main(String[] args) throws JSONException, URISyntaxException, ParseException, IOException {

    String API_GET_EXECUTIONS = "{SERVER}/public/rest/api/1.0/executions/search/cycle/";
    String API_GET_CYCLES = "{SERVER}/public/rest/api/1.0/cycles/search?";
    // Delimiter used in CSV file
    final String NEW_LINE_SEPARATOR = "\n";
    final String fileName = "F:\\cycleExecutionReport.csv";

    /** Declare JIRA,Zephyr URL,access and secret Keys */
    // JIRA Cloud URL of the instance
    String jiraBaseURL = "https://demo.atlassian.net";
    // Replace zephyr baseurl <ZAPI_Cloud_URL> shared with the user for ZAPI Cloud Installation
    String zephyrBaseUrl = "<ZAPI_Cloud_URL>";
    // zephyr accessKey , we can get from Addons >> zapi section
    String accessKey = "YjE2MjdjMGEtNzExNy0zYjY1LWFkMzQtNjcwMDM3OTljFkbWluIGFkbWlu";
    // zephyr secretKey , we can get from Addons >> zapi section
    String secretKey = "qufnbimi96Ob2hq3ISF08yZ8Qw4c1eHGeGlk";

    /** Declare parameter values here */
    String userName = "admin";
    String versionId = "-1";
    String projectId = "10100";
    String projectName = "Support";
    String versionName = "Unscheduled";

    ZFJCloudRestClient client = ZFJCloudRestClient.restBuilder(zephyrBaseUrl, accessKey, secretKey, userName)
            .build();/* www  .  j av  a2 s  . c  o  m*/
    /**
     * Get List of Cycles by Project and Version
     */

    final String getCyclesUri = API_GET_CYCLES.replace("{SERVER}", zephyrBaseUrl) + "projectId=" + projectId
            + "&versionId=" + versionId;

    Map<String, String> cycles = getCyclesByProjectVersion(getCyclesUri, client, accessKey);
    // System.out.println("cycles :"+ cycles.toString());

    /**
     * Iterating over the Cycles and writing the report to CSV
     * 
     */

    FileWriter fileWriter = null;
    System.out.println("Writing CSV file.....");
    try {
        fileWriter = new FileWriter(fileName);

        // Write the CSV file header

        fileWriter.append("Cycle Execution Report By Version and Project");
        fileWriter.append(NEW_LINE_SEPARATOR);
        fileWriter.append("PROJECT:" + "," + projectName);
        fileWriter.append(NEW_LINE_SEPARATOR);
        fileWriter.append("VERSION:" + "," + versionName);
        fileWriter.append(NEW_LINE_SEPARATOR);

        JSONArray executions;
        int totalUnexecutedCount = 0;
        int totalExecutionCount = 0;

        for (String key : cycles.keySet()) {
            int executionCount = 0;
            int unexecutedCount = 0;
            final String getExecutionsUri = API_GET_EXECUTIONS.replace("{SERVER}", zephyrBaseUrl) + key
                    + "?projectId=" + projectId + "&versionId=" + versionId;
            fileWriter.append("Cycle:" + "," + cycles.get(key));
            fileWriter.append(NEW_LINE_SEPARATOR);
            executions = getExecutionsByCycleId(getExecutionsUri, client, accessKey);
            // System.out.println("executions :" + executions.toString());

            HashMap<String, Integer> counter = new HashMap<String, Integer>();

            String[] statusName = new String[executions.length()];
            for (int i = 0; i < executions.length(); i++) {
                JSONObject executionObj = executions.getJSONObject(i).getJSONObject("execution");
                // System.out.println("executionObj
                // "+executionObj.toString());
                JSONObject statusObj = executionObj.getJSONObject("status");
                // System.out.println("statusObj :"+statusObj.toString());
                statusName[i] = statusObj.getString("name");
            }

            if (statusName.length != 0) {
                // System.out.println(statusName.toString());
                for (String a : statusName) {
                    if (counter.containsKey(a)) {
                        int oldValue = counter.get(a);
                        counter.put(a, oldValue + 1);
                    } else {
                        counter.put(a, 1);
                    }
                }
                for (String status : counter.keySet()) {
                    fileWriter.append(" " + "," + " " + "," + status + "," + counter.get(status));
                    fileWriter.append(NEW_LINE_SEPARATOR);
                    if (status.equalsIgnoreCase("UNEXECUTED")) {
                        unexecutedCount += counter.get(status);
                    } else {
                        executionCount += counter.get(status);
                    }

                }
            }
            totalExecutionCount += executionCount;
            totalUnexecutedCount += unexecutedCount;

            fileWriter.append(NEW_LINE_SEPARATOR);
        }

        fileWriter.append(NEW_LINE_SEPARATOR);
        fileWriter.append("TOTAL CYCLES:" + "," + cycles.size());
        fileWriter.append(NEW_LINE_SEPARATOR);
        fileWriter.append("TOTAL EXECUTIONS:" + "," + totalExecutionCount);
        fileWriter.append(NEW_LINE_SEPARATOR);
        fileWriter.append("TOTAL ASSIGNED:" + "," + (totalUnexecutedCount + totalExecutionCount));

        System.out.println("CSV file was created successfully !!!");
    } catch (Exception e) {
        System.out.println("Error in CsvFileWriter !!!");
        e.printStackTrace();
    } finally {
        try {
            fileWriter.flush();
            fileWriter.close();
        } catch (IOException e) {
            System.out.println("Error while flushing/closing fileWriter !!!");
            e.printStackTrace();
        }
    }

}

From source file:edu.msu.cme.rdp.taxatree.TreeBuilder.java

public static void main(String[] args) throws IOException {
    if (args.length != 3) {
        System.err.println("USAGE: TreeBuilder <idmapping> <merges.bin> <newick_out>");
        return;/*w ww  . j  a v  a2  s . c  o  m*/
    }

    IdMapping<Integer> idMapping = IdMapping.fromFile(new File(args[0]));
    DataInputStream mergeStream = new DataInputStream(new BufferedInputStream(new FileInputStream(args[1])));
    TaxonHolder lastMerged = null;
    int taxid = 0;
    final Map<Integer, Double> distMap = new HashMap();
    Map<Integer, TaxonHolder> taxonMap = new HashMap();

    try {
        while (true) {
            if (mergeStream.readBoolean()) { // Singleton
                int cid = mergeStream.readInt();
                int intId = mergeStream.readInt();
                TaxonHolder<Taxon> holder;

                List<String> seqids = idMapping.getIds(intId);
                if (seqids.size() == 1) {
                    holder = new TaxonHolder(new Taxon(taxid++, seqids.get(0), ""));
                } else {
                    holder = new TaxonHolder(new Taxon(taxid++, "", ""));
                    for (String seqid : seqids) {
                        int id = taxid++;
                        distMap.put(id, 0.0);
                        TaxonHolder th = new TaxonHolder(new Taxon(id, seqid, ""));
                        th.setParent(holder);
                        holder.addChild(th);
                    }
                }

                lastMerged = holder;
                taxonMap.put(cid, holder);
            } else {
                int ci = mergeStream.readInt();
                int cj = mergeStream.readInt();
                int ck = mergeStream.readInt();
                double dist = (double) mergeStream.readInt() / DistanceCalculator.MULTIPLIER;

                TaxonHolder holder = new TaxonHolder(new Taxon(taxid++, "", ""));

                taxonMap.put(ck, holder);
                holder.addChild(taxonMap.get(ci));
                taxonMap.get(ci).setParent(holder);
                distMap.put(ci, dist);
                holder.addChild(taxonMap.get(cj));
                taxonMap.get(cj).setParent(holder);
                distMap.put(cj, dist);

                lastMerged = holder;
            }
        }
    } catch (EOFException e) {

    }

    if (lastMerged == null) {
        throw new IOException("No merges in file");
    }

    PrintStream newickTreeOut = new PrintStream(new File(args[2]));
    NewickPrintVisitor visitor = new NewickPrintVisitor(newickTreeOut, false, new NewickDistanceFactory() {

        public float getDistance(int i) {
            return distMap.get(i).floatValue();
        }

    });

    lastMerged.biDirectionDepthFirst(visitor);
    newickTreeOut.close();
}

From source file:edu.upenn.cis.orchestra.workloadgenerator.Generator.java

/**
 * Here's what happens in each iteration:
 * <ol>//  ww  w .  ja va2  s . c  om
 * <li>Add in new peers.</li>
 * <li>Delete peers, but not any added in step 1.</li>
 * <li>Add bypasses to the graph that steps 1 and 2 generated.</li>
 * <li>Delete bypasses from the graph that steps 1 and 2, but not 3,
 * generated.</li>
 * </ol>
 * It may have been equivalent, and simpler, to do it like this:
 * <ol>
 * <li>Delete peers.</li>
 * <li>Add peers.</li>
 * <li>Delete bypasses.</li>
 * <li>Add bypasses.</li>
 * </ol>
 * 
 * @param args command line arguments. See <code>buildOptions(...)</code>.
 */
public static void main(String[] args) {
    try {
        Map<String, Object> params = parseCommandLine(args);
        Generator generator = new Generator(params, (Boolean) params.get("inout"));
        List<List<Integer>> cycles = new ArrayList<List<Integer>>();
        generator.generate();
        MetadataXml metadataXml = new MetadataXml(params, (Boolean) params.get("inout"));
        metadataXml.metadataXml(generator.getLogicalSchemas(), generator.getPeers(), generator.getMappings());

        Generator curGenerator = generator;
        Generator prevGenerator = null;
        for (int i = 0; i < (Integer) params.get("iterations"); i++) {
            prevGenerator = curGenerator;
            curGenerator = new Generator(params, (Boolean) params.get("inout"), prevGenerator.getPeers().size(),
                    prevGenerator.getPeers().size() + (Integer) params.get("addPeers"), prevGenerator);

            curGenerator.generate(i + 1);
            curGenerator.deletePeers((Integer) params.get("deletePeers"));
            curGenerator.addAndDeleteBypasses((Integer) params.get("addBypasses"),
                    (Integer) params.get("deleteBypasses"));

            metadataXml.metadataXml(curGenerator.getLogicalSchemas(), curGenerator.getPeers(),
                    curGenerator.getMappings(), String.valueOf(i + 1));

            // System.out.println("logical schemas: "
            // + curGenerator.getLogicalSchemas() + "\n" + "peers: "
            // + curGenerator.getPeers() + "\n");
            // for (int j = 0; j < curGenerator._mappings.size(); j++) {
            // System.out.println("mappings[" + j + "]: "
            // + curGenerator._mappings.get(j));
            // }
        }
        curGenerator.create(curGenerator);
        curGenerator.fill();
        curGenerator.destroy();
        curGenerator.cycles(cycles);

        curGenerator._journal.write(new FileWriter(params.get("filename").toString() + ".schemaDeltas"),
                params);
    } catch (Throwable t) {
        t.printStackTrace(System.out);
        System.exit(-1);
    }
}

From source file:eu.smartfp7.foursquare.AttendanceCrawler.java

/**
 * The main takes an undefined number of cities as arguments, then initializes
 * the specific crawling of all the trending venues of these cities.
 * The trending venues must have been previously identified using the `DownloadPages`
 * program./*from  w w w  .  j av  a 2s  .  c  o  m*/
 * 
 * Current valid cities are: london, amsterdam, goldcoast, sanfrancisco.
 * 
 */
public static void main(String[] args) throws Exception {
    Settings settings = Settings.getInstance();
    String folder = settings.getFolder();

    // We keep info and error logs, so that we know what happened in case
    // of incoherence in the time series.
    Map<String, FileWriter> info_logs = new HashMap<String, FileWriter>();
    Map<String, FileWriter> error_logs = new HashMap<String, FileWriter>();

    // For each city we monitor, we store the venue IDs that we got from
    // a previous crawl.
    Map<String, Collection<String>> city_venues = new HashMap<String, Collection<String>>();

    // Contains the epoch time when the last API call has been made for each 
    // venue. Ensures that we get data only once each hour. 
    Map<String, Long> venue_last_call = new HashMap<String, Long>();

    // Contains the epoch time when we last checked if time series were broken
    // for each city.
    // We do these checks once every day before the batch forecasting begins.
    Map<String, Long> sanity_checks = new HashMap<String, Long>();

    // We also keep in memory the number of checkins for the last hour for
    // each venue.
    Map<String, Integer> venue_last_checkin = new HashMap<String, Integer>();

    Map<Long, Integer> APICallsCount = new HashMap<Long, Integer>();

    DateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");

    int total_venues = 0;
    long total_calls = 0;
    long time_spent_on_API = 0;

    for (String c : args) {
        settings.checkFileHierarchy(c);

        city_venues.put(c, loadVenues(c));
        total_venues += city_venues.get(c).size();

        info_logs.put(c,
                new FileWriter(folder + c + File.separator + "log" + File.separator + "info.log", true));
        error_logs.put(c,
                new FileWriter(folder + c + File.separator + "log" + File.separator + "error.log", true));

        Calendar cal = Calendar.getInstance();

        info_logs.get(c).write("[" + df.format(cal.getTime()) + "] Crawler initialization for " + c + ". "
                + city_venues.get(c).size() + " venues loaded.\n");
        info_logs.get(c).flush();

        // If we interrupted the program for some reason, we can get back
        // the in-memory data.
        // Important: the program must not be interrupted for more than one
        // hour, or we will lose time series data.
        for (String venue_id : city_venues.get(c)) {
            String ts_file = folder + c + File.separator + "attendances_crawl" + File.separator + venue_id
                    + ".ts";

            if (new File(ts_file).exists()) {
                BufferedReader buffer = new BufferedReader(new FileReader(ts_file));
                String mem = null, line = null;
                for (; (line = buffer.readLine()) != null; mem = line)
                    ;
                buffer.close();

                if (mem == null)
                    continue;

                String[] tmp = mem.split(",");
                venue_last_call.put(venue_id, df.parse(tmp[0]).getTime());
                venue_last_checkin.put(venue_id, Integer.parseInt(tmp[3]));

                VenueUtil.fixBrokenTimeSeriesVenue(new File(ts_file));
            } // if
        } // for

        sanity_checks.put(c, cal.getTimeInMillis());
    } // for

    if (total_venues > 5000) {
        System.out.println(
                "Too much venues for a single API account (max 5000).\nPlease create a new Foursquare API account and use these credentials.\nExiting now.");
        return;
    }

    while (true) {

        for (String c : args) {
            // We create a FIFO queue and pop venue IDs one at a time.
            LinkedList<String> city_venues_buffer = new LinkedList<String>(city_venues.get(c));
            String venue_id = null;

            // Artificial wait to avoid processors looping at 100% of their capacity
            // when there is no more venues to crawl for the current hour.
            Thread.sleep(3000);

            while ((venue_id = city_venues_buffer.pollFirst()) != null) {
                // We get the current time according to the city's time zone
                Calendar cal = Calendar.getInstance();
                cal.add(Calendar.MILLISECOND,
                        TimeZone.getTimeZone(settings.getCityTimezone(c)).getOffset(cal.getTime().getTime())
                                - Calendar.getInstance().getTimeZone().getOffset(cal.getTime().getTime()));
                //TimeZone.getTimeZone("Europe/London").getOffset(cal.getTime().getTime()));

                long current_time = DateUtils.truncate(cal.getTime(), Calendar.HOUR).getTime();

                // We query Foursquare only once per hour per venue.
                if (venue_last_call.get(venue_id) != null
                        && current_time < venue_last_call.get(venue_id) + 3600000)
                    continue;

                intelligentWait(total_venues, cal.getTime().getTime(),
                        (total_calls == 0 ? 0 : Math.round(time_spent_on_API / total_calls)));

                Venue venue = null;

                try {
                    long beforeCall = System.currentTimeMillis();
                    venue = new Venue(getFoursquareVenueById(venue_id, c));

                    // If there is no last call, this is the beginning of the time series
                    // for this venue. We get the number of people "here now" to initialize
                    // the series.
                    if (venue_last_call.get(venue_id) == null) {
                        /** TODO: by doing this, we keep a representation of the venue dating from the beginning
                         *       of the specific crawl. we might want to change this and update this file once
                         *      in a while.
                         */
                        FileWriter info = new FileWriter(folder + c + File.separator + "foursquare_venues"
                                + File.separator + venue_id + ".info");
                        info.write(venue.getFoursquareJson());
                        info.close();

                        FileWriter out = new FileWriter(folder + c + File.separator + "attendances_crawl"
                                + File.separator + venue_id + ".ts");
                        out.write("Date,here_now,hour_checkins,total_checkins\n");
                        out.write(df.format(current_time) + "," + venue.getHereNow() + "," + venue.getHereNow()
                                + "," + venue.getCheckincount() + "\n");
                        out.close();
                    } else {
                        FileWriter out = new FileWriter(folder + c + File.separator + "attendances_crawl"
                                + File.separator + venue_id + ".ts", true);
                        int checks = venue.getCheckincount() - venue_last_checkin.get(venue_id);
                        out.write(df.format(current_time) + "," + venue.getHereNow() + ","
                                + Integer.toString(checks) + "," + venue.getCheckincount() + "\n");
                        out.close();
                    }

                    if (APICallsCount.get(current_time) == null)
                        APICallsCount.put(current_time, 1);
                    else
                        APICallsCount.put(current_time, APICallsCount.get(current_time) + 1);

                    total_calls++;

                    venue_last_call.put(venue_id, current_time);
                    venue_last_checkin.put(venue_id, venue.getCheckincount());

                    time_spent_on_API += System.currentTimeMillis() - beforeCall;
                } catch (Exception e) {
                    // If something bad happens (crawler not available, IO error, ...), we put the
                    // venue_id in the FIFO queue so that it gets reevaluated later.
                    //e.printStackTrace();
                    error_logs.get(c)
                            .write("[" + df.format(cal.getTime().getTime()) + "] Error with venue " + venue_id
                                    + " (" + e.getMessage() + "). " + APICallsCount.get(current_time)
                                    + " API calls so far this hour, " + city_venues_buffer.size()
                                    + " venues remaining in the buffer.\n");
                    error_logs.get(c).flush();

                    System.out.println("[" + df.format(cal.getTime().getTime()) + "] " + c + " -- "
                            + APICallsCount.get(current_time) + " API calls // " + city_venues_buffer.size()
                            + " venues remaining " + " (" + e.getMessage() + ")");

                    if (e instanceof FoursquareAPIException)
                        if (((FoursquareAPIException) e).getHttp_code().equals("400")
                                && ((FoursquareAPIException) e).getError_detail()
                                        .equals("Venue " + venue_id + " has been deleted")) {
                            city_venues.get(c).remove(venue_id);
                            removeVenue(venue_id, c);
                        } else
                            city_venues_buffer.add(venue_id);

                    continue;
                }
            } // while

            // Every day between 0am and 2am, we repair all the broken time series (if there
            // is something to repair).
            Calendar cal = Calendar.getInstance();
            if (city_venues_buffer.peekFirst() == null
                    && (cal.getTimeInMillis() - sanity_checks.get(c)) >= 86400000
                    && cal.get(Calendar.HOUR_OF_DAY) < 2) {
                VenueUtil.fixBrokenTimeSeriesCity(c, folder);
                sanity_checks.put(c, cal.getTimeInMillis());
                info_logs.get(c).write("[" + df.format(cal.getTime()) + "] Sanity check OK.\n");
                info_logs.get(c).flush();
            }
        } // for
    } // while
}

From source file:org.kuali.student.git.importer.ConvertBuildTagBranchesToGitTags.java

/**
 * @param args/*from   w ww .ja  v a 2 s .  c om*/
 */
public static void main(String[] args) {

    if (args.length < 3 || args.length > 6) {
        System.err.println("USAGE: <git repository> <bare> <ref mode> [<ref prefix> <username> <password>]");
        System.err.println("\t<bare> : 0 (false) or 1 (true)");
        System.err.println("\t<ref mode> : local or name of remote");
        System.err.println("\t<ref prefix> : refs/heads (default) or say refs/remotes/origin (test clone)");
        System.exit(-1);
    }

    boolean bare = false;

    if (args[1].trim().equals("1")) {
        bare = true;
    }

    String remoteName = args[2].trim();

    String refPrefix = Constants.R_HEADS;

    if (args.length == 4)
        refPrefix = args[3].trim();

    String userName = null;
    String password = null;

    if (args.length == 5)
        userName = args[4].trim();

    if (args.length == 6)
        password = args[5].trim();

    try {

        Repository repo = GitRepositoryUtils.buildFileRepository(new File(args[0]).getAbsoluteFile(), false,
                bare);

        Git git = new Git(repo);

        ObjectInserter objectInserter = repo.newObjectInserter();

        Collection<Ref> repositoryHeads = repo.getRefDatabase().getRefs(refPrefix).values();

        RevWalk rw = new RevWalk(repo);

        Map<String, ObjectId> tagNameToTagId = new HashMap<>();

        Map<String, Ref> tagNameToRef = new HashMap<>();

        for (Ref ref : repositoryHeads) {

            String branchName = ref.getName().substring(refPrefix.length() + 1);

            if (branchName.contains("tag") && branchName.contains("builds")) {

                String branchParts[] = branchName.split("_");

                int buildsIndex = ArrayUtils.indexOf(branchParts, "builds");

                String moduleName = StringUtils.join(branchParts, "_", buildsIndex + 1, branchParts.length);

                RevCommit commit = rw.parseCommit(ref.getObjectId());

                ObjectId tag = GitRefUtils.insertTag(moduleName, commit, objectInserter);

                tagNameToTagId.put(moduleName, tag);

                tagNameToRef.put(moduleName, ref);

            }

        }

        BatchRefUpdate batch = repo.getRefDatabase().newBatchUpdate();

        List<RefSpec> branchesToDelete = new ArrayList<>();

        for (Entry<String, ObjectId> entry : tagNameToTagId.entrySet()) {

            String tagName = entry.getKey();

            // create the reference to the tag object
            batch.addCommand(
                    new ReceiveCommand(null, entry.getValue(), Constants.R_TAGS + tagName, Type.CREATE));

            // delete the original branch object

            Ref branch = tagNameToRef.get(entry.getKey());

            if (remoteName.equals("local")) {

                batch.addCommand(new ReceiveCommand(branch.getObjectId(), null, branch.getName(), Type.DELETE));

            } else {
                String adjustedBranchName = branch.getName().substring(refPrefix.length() + 1);

                branchesToDelete.add(new RefSpec(":" + Constants.R_HEADS + adjustedBranchName));
            }

        }

        // create the tags
        batch.execute(rw, new TextProgressMonitor());

        if (!remoteName.equals("local")) {
            // push the tag to the remote right now
            PushCommand pushCommand = git.push().setRemote(remoteName).setPushTags()
                    .setProgressMonitor(new TextProgressMonitor());

            if (userName != null)
                pushCommand.setCredentialsProvider(new UsernamePasswordCredentialsProvider(userName, password));

            Iterable<PushResult> results = pushCommand.call();

            for (PushResult pushResult : results) {

                if (!pushResult.equals(Result.NEW)) {
                    log.warn("failed to push tag " + pushResult.getMessages());
                }
            }

            // delete the branches from the remote
            results = git.push().setRemote(remoteName).setRefSpecs(branchesToDelete)
                    .setProgressMonitor(new TextProgressMonitor()).call();

            log.info("");

        }

        //         Result result = GitRefUtils.createTagReference(repo, moduleName, tag);
        //         
        //         if (!result.equals(Result.NEW)) {
        //            log.warn("failed to create tag {} for branch {}", moduleName, branchName);
        //            continue;
        //         }
        //         
        //         if (deleteMode) {
        //         result = GitRefUtils.deleteRef(repo, ref);
        //   
        //         if (!result.equals(Result.NEW)) {
        //            log.warn("failed to delete branch {}", branchName);
        //            continue;
        //         }

        objectInserter.release();

        rw.release();

    } catch (Exception e) {

        log.error("unexpected Exception ", e);
    }
}

From source file:com.github.fritaly.graphml4j.samples.GradleDependencies.java

public static void main(String[] args) throws Exception {
    if (args.length != 1) {
        System.out.println(String.format("%s <output-file>", GradleDependencies.class.getSimpleName()));
        System.exit(1);//w  w w  . j a  va2s.  c  o  m
    }

    final File file = new File(args[0]);

    System.out.println("Writing GraphML file to " + file.getAbsolutePath() + " ...");

    FileWriter fileWriter = null;
    GraphMLWriter graphWriter = null;
    Reader reader = null;
    LineNumberReader lineReader = null;

    try {
        fileWriter = new FileWriter(file);
        graphWriter = new GraphMLWriter(fileWriter);

        // Customize the rendering of nodes
        final NodeStyle nodeStyle = graphWriter.getNodeStyle();
        nodeStyle.setWidth(250.0f);

        graphWriter.setNodeStyle(nodeStyle);

        // The dependency graph has been generated by Gradle with the
        // command "gradle dependencies". The output of this command has
        // been saved to a text file which will be parsed to rebuild the
        // dependency graph
        reader = new InputStreamReader(GradleDependencies.class.getResourceAsStream("gradle-dependencies.txt"));
        lineReader = new LineNumberReader(reader);

        String line = null;

        // Stack containing the node identifiers per depth inside the
        // dependency graph (the topmost dependency is the first one in the
        // stack)
        final Stack<String> parentIds = new Stack<String>();

        // Open the graph
        graphWriter.graph();

        // Map storing the node identifiers per label
        final Map<String, String> nodeIdsByLabel = new TreeMap<String, String>();

        while ((line = lineReader.readLine()) != null) {
            // Determine the depth of the current dependency inside the
            // graph. The depth can be inferred from the indentation used by
            // Gradle. Each level of depth adds 5 more characters of
            // indentation
            final int initialLength = line.length();

            // Remove the strings used by Gradle to indent dependencies
            line = StringUtils.replace(line, "+--- ", "");
            line = StringUtils.replace(line, "|    ", "");
            line = StringUtils.replace(line, "\\--- ", "");
            line = StringUtils.replace(line, "     ", "");

            // The depth can easily be inferred now
            final int depth = (initialLength - line.length()) / 5;

            // Remove unnecessary node ids
            while (depth <= parentIds.size()) {
                parentIds.pop();
            }

            // Compute a nice label from the dependency (group, artifact,
            // version) tuple
            final String label = computeLabel(line);

            // Has this dependency already been added to the graph ?
            if (!nodeIdsByLabel.containsKey(label)) {
                // No, add the node
                nodeIdsByLabel.put(label, graphWriter.node(label));
            }

            final String nodeId = nodeIdsByLabel.get(label);

            parentIds.push(nodeId);

            if (parentIds.size() > 1) {
                // Generate an edge between the current node and its parent
                graphWriter.edge(parentIds.get(parentIds.size() - 2), nodeId);
            }
        }

        // Close the graph
        graphWriter.closeGraph();

        System.out.println("Done");
    } finally {
        // Calling GraphMLWriter.close() is necessary to dispose the underlying resources
        graphWriter.close();
        fileWriter.close();
        lineReader.close();
        reader.close();
    }
}

From source file:gov.nih.nci.ncicb.tcga.dcc.dam.util.TempClinicalDataLoader.java

public static void main(String[] args) {
    // first get the db connection properties
    String url = urlSet.get(args[1]);
    String user = args[2];//ww  w  . jav  a2s.  c  o  m
    String word = args[3];

    // make sure we have the Oracle driver somewhere
    try {
        Class.forName("oracle.jdbc.OracleDriver");
        Class.forName("org.postgresql.Driver");
    } catch (Exception x) {
        System.out.println("Unable to load the driver class!");
        System.exit(0);
    }
    // connect to the database
    try {
        dbConnection = DriverManager.getConnection(url, user, word);
        ClinicalBean.setDBConnection(dbConnection);
    } catch (SQLException x) {
        x.printStackTrace();
        System.exit(1);
    }

    final String xmlList = args[0];
    BufferedReader br = null;
    try {
        final Map<String, String> clinicalFiles = new HashMap<String, String>();
        final Map<String, String> biospecimenFiles = new HashMap<String, String>();
        final Map<String, String> fullFiles = new HashMap<String, String>();

        //noinspection IOResourceOpenedButNotSafelyClosed
        br = new BufferedReader(new FileReader(xmlList));

        // read the file list to get all the files to load
        while (br.ready()) {
            final String[] in = br.readLine().split("\\t");
            String xmlfile = in[0];
            String archive = in[1];

            if (xmlfile.contains("_clinical")) {
                clinicalFiles.put(xmlfile, archive);
            } else if (xmlfile.contains("_biospecimen")) {
                biospecimenFiles.put(xmlfile, archive);
            } else {
                fullFiles.put(xmlfile, archive);
            }
        }

        Date dateAdded = Calendar.getInstance().getTime();

        // NOTE!!! This deletes all data before the load starts, assuming we are re-loading everything.
        // a better way would be to figure out what has changed and load that, or to be able to load multiple versions of the data in the schema
        emptyClinicalTables(user);

        // load any "full" files first -- in case some archives aren't split yet
        for (final String file : fullFiles.keySet()) {
            String archive = fullFiles.get(file);
            System.out.println("Full file " + file + " in " + archive);
            // need to re-instantiate the disease-specific beans for each file
            createDiseaseSpecificBeans(xmlList);
            String disease = getDiseaseName(archive);
            processFullXmlFile(file, archive, disease, dateAdded);

            // memory leak or something... have to commit and close all connections and re-get connection
            // after each file to keep from using too much heap space.  this troubles me, but I have never had
            // the time to figure out why it happens
            resetConnections(url, user, word);
        }

        // now process all clinical files, and insert patients and clinical data
        for (final String clinicalFile : clinicalFiles.keySet()) {
            createDiseaseSpecificBeans(xmlList);
            String archive = clinicalFiles.get(clinicalFile);
            System.out.println("Clinical file " + clinicalFile + " in " + archive);
            String disease = getDiseaseName(archive);
            processClinicalXmlFile(clinicalFile, archive, disease, dateAdded);
            resetConnections(url, user, word);
        }

        // now process biospecimen files
        for (final String biospecimenFile : biospecimenFiles.keySet()) {
            createDiseaseSpecificBeans(xmlList);
            String archive = biospecimenFiles.get(biospecimenFile);
            String disease = getDiseaseName(archive);
            System.out.println("Biospecimen file " + biospecimenFile);
            processBiospecimenXmlFile(biospecimenFile, archive, disease, dateAdded);
            resetConnections(url, user, word);
        }

        // this sets relationships between these clinical tables and data browser tables, since we delete
        // and reload every time
        setForeignKeys();
        dbConnection.commit();
        dbConnection.close();
    } catch (Exception e) {
        e.printStackTrace();
        System.exit(-1);
    } finally {
        IOUtils.closeQuietly(br);
    }
}

From source file:com.techmahindra.vehicletelemetry.chart.CarEventServlet.java

public static void main(String[] args) throws TypeMismatchException {

    List<CarEvent> carData = new LinkedList<>();
    /*carData.add(new CarEvent("city1","model1",41));
    carData.add(new CarEvent("city1","model2",17));
            //  w w  w  . j av  a 2 s .co m
    carData.add(new CarEvent("city2","model1",31));
    carData.add(new CarEvent("city2","model2",39));
            
    carData.add(new CarEvent("Bellevue","model1",47));*/

    /*carData.add(new CarEvent("Seattle","MediumSUV",1038)); 
    carData.add(new CarEvent("Seattle","LargeSUV",2415));
    carData.add(new CarEvent("Seattle","FamilySaloon",2388));
    carData.add(new CarEvent("Seattle","SportsCar",1626)); 
    //aggDrivers.add(new CarEvent("Seattle","sports car",276)); 
    carData.add(new CarEvent("Seattle","Compactcar",204)); 
    carData.add(new CarEvent("Seattle","SmallSUV",1133)); 
    carData.add(new CarEvent("Seattle","StationWagon",1769)); 
    carData.add(new CarEvent("Seattle","CompactCar",839)); 
    carData.add(new CarEvent("Seattle","Hybrid",2603)); 
    carData.add(new CarEvent("Seattle","Coupe",1081)); 
    carData.add(new CarEvent("Seattle","Sedan",2603));
    carData.add(new CarEvent("Seattle","Convertible",1608));
            
    carData.add(new CarEvent("Redmond","MediumSUV",590));
    carData.add(new CarEvent("Redmond","LargeSUV",1407)); 
    carData.add(new CarEvent("Redmond","FamilySaloon",1535)); 
    carData.add(new CarEvent("Redmond","SportsCar",1115)); 
            
    //aggDrivers.add(new CarEvent("Redmond","sports car",102)); 
    carData.add(new CarEvent("Redmond","Compactcar",102)); 
    carData.add(new CarEvent("Redmond","SmallSUV",637)); 
    carData.add(new CarEvent("Redmond","StationWagon",1079));
    carData.add(new CarEvent("Redmond","CompactCar",606));
    carData.add(new CarEvent("Redmond","Hybrid",1635));
    carData.add(new CarEvent("Redmond","Coupe",605)); 
    carData.add(new CarEvent("Redmond","Sedan",1568)); 
    carData.add(new CarEvent("Redmond","Convertible",955)); 
            
    carData.add(new CarEvent("ammamish","SportsCar",1)); 
    carData.add(new CarEvent("ammamish","Sedan",21)); 
            
    carData.add(new CarEvent("ellevue","MediumSUV",778));
    carData.add(new CarEvent("ellevue","LargeSUV",2035));
    carData.add(new CarEvent("ellevue","FamilySaloon",1952)); 
    carData.add(new CarEvent("ellevue","SportsCar",1226));
    carData.add(new CarEvent("ellevue","Compactcar",162));
    //aggDrivers.add(new CarEvent("ellevue","sports car",192)); 
    carData.add(new CarEvent("ellevue","SmallSUV",895));
    carData.add(new CarEvent("ellevue","StationWagon",1469));
    carData.add(new CarEvent("ellevue","CompactCar",629));
    carData.add(new CarEvent("ellevue","Hybrid",1989)); 
    carData.add(new CarEvent("ellevue","Coupe",811)); 
    carData.add(new CarEvent("ellevue","Sedan",2004));
    carData.add(new CarEvent("ellevue","Convertible",1122));*/

    Map<String, List<Map<String, String>>> cityModelsMap = new LinkedHashMap<>();
    for (CarEvent carEvent1 : carData) {
        Map<String, String> modelMap = new HashMap<>();
        List<Map<String, String>> modelCountsList = new ArrayList<>();
        if (!cityModelsMap.containsKey(carEvent1.getCity())) {
            modelMap.put(carEvent1.getModel(), carEvent1.getCount());
            modelCountsList.add(modelMap);
            cityModelsMap.put(carEvent1.getCity(), modelCountsList);
        } else {
            List<Map<String, String>> existingModelCountsList = cityModelsMap.get(carEvent1.getCity());

            modelMap.put(carEvent1.getModel(), carEvent1.getCount());
            modelCountsList.add(modelMap);
            existingModelCountsList.addAll(modelCountsList);
            cityModelsMap.put(carEvent1.getCity(), existingModelCountsList);
        }
    }
    System.out.println("CityModelMap:" + cityModelsMap);
    //CityModelMap:{city1=[{model1=41}, {model11=17}, {model12=36}], city2=[{model2=31}, {model22=37}], city3=[{model3=47}, {model33=31}]}
    DataTable data = new DataTable();
    data.addColumn(new ColumnDescription(CITY_COLUMN, ValueType.TEXT, "city"));
    for (String cityKey : cityModelsMap.keySet()) {
        List<Map<String, String>> existingModelCountsList = cityModelsMap.get(cityKey);
        for (Map existingModelCountMap : existingModelCountsList) {
            Set set = existingModelCountMap.keySet();
            for (Object objModel : set) {
                String model = objModel.toString();
                if (!(data.containsColumn(model))) {
                    data.addColumn(new ColumnDescription(model, ValueType.NUMBER, model));
                    System.out.println("Column added:" + model);
                }
            }
        }
    }

    for (String cityKey : cityModelsMap.keySet()) {
        TableRow row = new TableRow();
        for (ColumnDescription selectionColumn : data.getColumnDescriptions()) {
            String columnName = selectionColumn.getId();
            if (columnName.equals(CITY_COLUMN)) {
                row.addCell(cityKey);
                continue;
            }
            List<Map<String, String>> existingModelCountsList = cityModelsMap.get(cityKey);
            for (Map existingModelCountMap : existingModelCountsList) {
                for (Object objModel : existingModelCountMap.keySet()) {
                    String model = objModel.toString();
                    Integer count = Integer.parseInt(existingModelCountMap.get(objModel).toString());
                    System.out.println("Model :" + model + " Count:" + count);
                    if (columnName.equals(model)) {
                        row.addCell(count);
                        continue;
                    }
                }
            }
        }
        data.addRow(row);
        System.out.println("Adding row");
    }
    System.out.println("Data is now:" + data.toString());

}

From source file:csv.sorting.PrepareWeatherData.java

public static void main(String[] args) throws Exception {

    // Path to read the CSV data from:
    final Path csvStationDataFilePath = FileSystems.getDefault()
            .getPath("C:\\Users\\philipp\\Downloads\\csv\\201503station.txt");
    final Path csvLocalWeatherDataUnsortedFilePath = FileSystems.getDefault()
            .getPath("C:\\Users\\philipp\\Downloads\\csv\\201503hourly.txt");
    final Path csvLocalWeatherDataSortedFilePath = FileSystems.getDefault()
            .getPath("C:\\Users\\philipp\\Downloads\\csv\\201503hourly_sorted.txt");

    // A map between the WBAN and Station for faster Lookups:
    final Map<String, Station> stationMap = getStationMap(csvStationDataFilePath);

    // Holds the List of Sorted DateTimes (including ZoneOffset):
    List<Integer> indices = new ArrayList<>();

    // Comparator for sorting the File:
    Comparator<OffsetDateTime> byMeasurementTime = (e1, e2) -> e1.compareTo(e2);

    // Get the sorted indices from the stream of LocalWeatherData Elements:
    try (Stream<CsvMappingResult<csv.model.LocalWeatherData>> stream = getLocalWeatherData(
            csvLocalWeatherDataUnsortedFilePath)) {

        // Holds the current line index, when processing the input Stream:
        AtomicInteger currentIndex = new AtomicInteger(1);

        // We want to get a list of indices, which sorts the CSV file by measurement time:
        indices = stream//from ww w  .ja v  a  2 s .  c  om
                // Skip the CSV Header:
                .skip(1)
                // Start by enumerating ALL mapping results:
                .map(x -> new ImmutablePair<>(currentIndex.getAndAdd(1), x))
                // Then only take those lines, that are actually valid:
                .filter(x -> x.getRight().isValid())
                // Now take the parsed entity from the CsvMappingResult:
                .map(x -> new ImmutablePair<>(x.getLeft(), x.getRight().getResult()))
                // Take only those measurements, that are also available in the list of stations:
                .filter(x -> stationMap.containsKey(x.getRight().getWban()))
                // Get the OffsetDateTime from the LocalWeatherData, which includes the ZoneOffset of the Station:
                .map(x -> {
                    // Get the matching station:
                    csv.model.Station station = stationMap.get(x.getRight().getWban());
                    // Calculate the OffsetDateTime from the given measurement:
                    OffsetDateTime measurementTime = OffsetDateTime.of(x.getRight().getDate(),
                            x.getRight().getTime(), ZoneOffset.ofHours(0));
                    // Build the Immutable pair with the Index again:
                    return new ImmutablePair<>(x.getLeft(), measurementTime);
                })
                // Now sort the Measurements by their Timestamp:
                .sorted((x, y) -> byMeasurementTime.compare(x.getRight(), y.getRight()))
                // Take only the Index:
                .map(x -> x.getLeft())
                // And turn it into a List:
                .collect(Collectors.toList());
    }

    // Now sorts the File by Line Number:
    writeSortedFileByIndices(csvLocalWeatherDataUnsortedFilePath, indices, csvLocalWeatherDataSortedFilePath);
}

From source file:edu.oregonstate.eecs.mcplan.rl.QLearner.java

public static void main(final String[] argv) {
    final RandomGenerator rng = new MersenneTwister(43);

    final int Nother_taxis = 0;
    final double slip = 0.0;
    final TaxiState state_prototype = TaxiWorlds.dietterich2000(rng, Nother_taxis, slip);

    final int T = 100000;

    final double gamma = 0.9;
    final double Vmax = 20.0;
    final double epsilon = 0.1;
    final double alpha = 0.1;
    final QLearner<TaxiState, PrimitiveTaxiRepresentation, TaxiAction> learner = new QLearner<TaxiState, PrimitiveTaxiRepresentation, TaxiAction>(
            new int[] { 0 }, rng, new PrimitiveTaxiRepresenter(state_prototype), new TaxiActionGenerator(),
            gamma, Vmax, epsilon, alpha);

    //      final int scale = 20;
    //      final TaxiVisualization vis = new TaxiVisualization( null, state_prototype.topology, state_prototype.locations, scale );
    //      final EpisodeListener<TaxiState, TaxiAction> updater = vis.updater( 0 );

    final AverageRewardAccumulator<TaxiState, TaxiAction> avg = new AverageRewardAccumulator<TaxiState, TaxiAction>(
            1);/*from   w  w w .  ja va  2  s  . c  o  m*/
    final double lag = -Double.MAX_VALUE;

    final Map<PrimitiveTaxiRepresentation, TObjectDoubleMap<TaxiAction>> old_values = new HashMap<PrimitiveTaxiRepresentation, TObjectDoubleMap<TaxiAction>>();

    int ns = 500;
    for (int i = 0; i < Nother_taxis; ++i) {
        ns *= 25 - i - 1;
    }
    final int Nstates = ns;

    int count = 0;
    while (true) {
        final TaxiState state = TaxiWorlds.dietterich2000(rng, Nother_taxis, slip);
        final TaxiSimulator sim = new TaxiSimulator(rng, state, slip, T);
        final Episode<TaxiState, TaxiAction> episode = new Episode<TaxiState, TaxiAction>(sim,
                JointPolicy.create(learner), T);
        episode.addListener(avg);
        //         episode.addListener( updater );
        //      episode.addListener( new LoggingEpisodeListener<TaxiState, TaxiAction>() );
        episode.run();

        //         final double diff = Math.abs( avg.reward[0].mean() - lag );

        //         System.out.println( "Episode " + count + ": avg reward = " + avg.reward[0].mean() );

        count += 1;

        if ((count % 10000 == 0) && learner.values.size() == Nstates) {
            //            System.out.println( "learner.values.size() == " + Nstates );
            boolean complete = true;
            double norm = 0.0;
            for (final Map.Entry<PrimitiveTaxiRepresentation, TObjectDoubleMap<TaxiAction>> e : learner.values
                    .entrySet()) {
                final TObjectDoubleMap<TaxiAction> new_q = e.getValue();
                TObjectDoubleMap<TaxiAction> old_q = old_values.get(e.getKey());
                if (old_q == null) {
                    old_q = new TObjectDoubleHashMap<TaxiAction>();
                    old_values.put(e.getKey(), old_q);
                    complete = false;
                }
                final TObjectDoubleIterator<TaxiAction> itr = new_q.iterator();
                while (itr.hasNext()) {
                    itr.advance();
                    final TaxiAction a = itr.key();
                    final double new_qa = itr.value();
                    final double old_qa = old_q.get(a);
                    final double diff = new_qa - old_qa;
                    norm += diff * diff;
                    old_q.put(a, new_qa);
                }
            }
            System.out.println("Qnorm = " + norm);
            if (complete && norm < 1e-6) {
                break;
            }
        }
    }

}