List of usage examples for java.util HashMap toString
public String toString()
From source file:Main.java
public static void main(String[] args) { HashMap<Character, Integer> map = new HashMap<Character, Integer>(); String test = "BUNANA"; char[] chars = test.toCharArray(); for (int i = 0; i < chars.length; i++) { if (!map.containsKey(chars[i])) { map.put(chars[i], 1);/* w w w. j a v a 2 s. c o m*/ } map.put(chars[i], map.get(chars[i]) + 1); } System.out.println(map.toString()); }
From source file:nl.systemsgenetics.cellTypeSpecificAlleleSpecificExpression.ReadGenoAndAsFromIndividual.java
public static void readGenoAndAsFromIndividual(String loc_of_bam1, String genotype_loc, String coupling_location, String outputLocation, String snpLocation) throws IOException, Exception { if (GlobalVariables.verbosity >= 1) { //Print ASREADS header System.out.println("---- Starting ASREADS for the following settings: ----"); System.out.println("\t input bam: " + loc_of_bam1); System.out.println("\t genotype location: " + genotype_loc); System.out.println("\t coupling file: " + coupling_location); System.out.println("\t output location: " + outputLocation); if (!snpLocation.equals("")) { System.out.println("\t snp Location: " + snpLocation); } else {/*from w w w .j av a 2s. c o m*/ System.out.println("\t snp Location: " + "NONE"); } System.out.println("------------------------------------------------------"); } //parse command line arguments String loc_of_bam; loc_of_bam = loc_of_bam1; System.out.println("Location of bam file: "); System.out.println(loc_of_bam); if (!new File(loc_of_bam).exists()) { throw new IllegalArgumentException("ERROR! Location of bam file is not an existing file. Exitting."); } else { if (GlobalVariables.verbosity >= 10) { System.out.println("Location of bam file is an existing file, will continue."); } } RandomAccessGenotypeData TTdataSet; VcfGenotypeData VCFdataSet; HashMap<String, GeneticVariant> variantIdMap; String[] individual_names; String tabixLoc = genotype_loc + ".tbi"; //open vcf dataset //based on extention and existance of both files. if (FilenameUtils.isExtension(genotype_loc, "gz") && new File(tabixLoc).exists() && new File(genotype_loc).exists()) { try { VCFdataSet = new VcfGenotypeData(new File(genotype_loc), new File(tabixLoc), 0.99); variantIdMap = VCFdataSet.getVariantIdMap(); individual_names = VCFdataSet.getSampleNames(); } catch (IOException ex) { System.err.println("Error reading vcf dataset: " + genotype_loc); throw new IllegalArgumentException(); } } else if (new File(genotype_loc + "/GenotypeMatrix.dat").exists()) { //assuming trityper dataset based on the genotype matrix try { TTdataSet = new TriTyperGenotypeData(new File(genotype_loc)); variantIdMap = TTdataSet.getVariantIdMap(); individual_names = TTdataSet.getSampleNames(); } catch (IOException ex) { System.err.println("Error reading trityper dataset: " + genotype_loc); throw new IllegalArgumentException(); } } else { throw new IllegalDataException("could not find a Trityper or vcf file in the genotype location"); } //get the variants in the variantIdMAP Set<String> snpNames = variantIdMap.keySet(); ArrayList<String> SNPsToAnalyze; SNPsToAnalyze = new ArrayList<String>(); //If available, read the file with rs numbers. if (!snpLocation.equals("")) { ArrayList<String> includeSNPs = UtilityMethods.readFileIntoStringArrayList(snpLocation); int snpsNotFound = 0; for (String snp_to_include : includeSNPs) { if (snpNames.contains(snp_to_include)) { SNPsToAnalyze.add(snp_to_include); } else { snpsNotFound++; } } if (GlobalVariables.verbosity >= 1) { System.out.println("WARNING: Did not find " + Integer.toString(snpsNotFound) + " out of " + Integer.toString(includeSNPs.size()) + " SNPs in the include file."); } } else { for (String snp_to_include : snpNames) { SNPsToAnalyze.add(snp_to_include); } } //String path = "/gcc/groups/lld/tmp01/projects/bamFiles/"; //sample_map contains all the individuals that are in the sample file. HashMap sample_map = convert_individual_names(individual_names, coupling_location); if (GlobalVariables.verbosity >= 10) { System.out.println("Sample names were loaded."); } if (GlobalVariables.verbosity >= 100) { System.out.println(sample_map.toString()); } //Twice because my files have the .MERGED.sorted.bam suffix attached to them. String sample_name = FilenameUtils .getBaseName(FilenameUtils.getBaseName(FilenameUtils.getBaseName(loc_of_bam))); if (GlobalVariables.verbosity >= 10) { System.out.println("sample_name: " + sample_name); System.out.println("sample_map: " + sample_map.toString()); } Object sample_idx = sample_map.get(sample_name); if (sample_idx == null) { throw new IllegalArgumentException("Couldn't find the filename in the sample names. Quitting."); } int sample_index = Integer.parseInt(sample_idx.toString()); if (GlobalVariables.verbosity >= 10) { System.out.println("sample_index: " + sample_index); } //bam file path and filename String path_and_filename = loc_of_bam; File sample_file = new File(path_and_filename); SamReader bam_file = SamReaderFactory.makeDefault().open(sample_file); if (GlobalVariables.verbosity >= 10) { System.out.println("Initialized for reading bam file"); } PrintWriter writer = new PrintWriter(outputLocation, "UTF-8"); int i = 0; for (String i_snp : SNPsToAnalyze) { //System.out.println(i_snp); GeneticVariant this_variant = variantIdMap.get(i_snp); String chromosome = this_variant.getSequenceName(); String position = String.valueOf(this_variant.getStartPos()); // We only do analyses if we find a SNP and it is biallelic // However this is trityper data, so if we use // the allele count is used for the check of something. //DO NOT ENTER A SEPARATED GENOMIC DATASET OTHERWISE THIS WILL BREAK. if (this_variant.isSnp() & this_variant.isBiallelic()) { String row_of_table = get_allele_specific_overlap_at_snp(this_variant, sample_index, chromosome, position, bam_file); //commented out the phasing part. writer.println(chromosome + "\t" + position + "\t" + i_snp + "\t" + this_variant.getVariantAlleles().getAllelesAsChars()[0] + "\t" + this_variant.getVariantAlleles().getAllelesAsChars()[1] + "\t" + row_of_table + "\t" + Arrays.toString(this_variant.getSampleVariants().get(sample_index).getAllelesAsChars()) //+ "\t" + //Boolean.toString(this_variant.getSamplePhasing().get(sample_index)) ); } i++; if ((i % 10000 == 0) && (GlobalVariables.verbosity >= 10)) { System.out.println("Finished " + Integer.toString(i) + " SNPs"); } } writer.close(); }
From source file:com.datatorrent.demos.ads.HdfsHashMapOutputOperator.java
@Override public byte[] getBytesForTuple(HashMap<Object, Object> t) { return t.toString().getBytes(); }
From source file:jsentvar.JsonReaderTest.java
/** * Test of reader method, of class JsonReader. * @throws java.io.IOException/*from w ww . j ava2s . com*/ */ @Test public void testReader() throws IOException { System.out.println("JsonReader.reader"); String inputFileName = "resources/test/text_doc0.json"; JsonReader instance = new JsonReader(); String expResult = FileUtils.readFileToString(new File("resources/test/doc0Result.txt"), "utf8").trim(); HashMap<String, HashMap<Integer, Integer>> result0 = instance.reader(inputFileName); String result = result0.toString(); System.out.println("Esperat:\n" + expResult); System.out.println("Obtingut:\n" + result); assertEquals(expResult, result); }
From source file:jsentvar.GenerateTestsResults.java
public void jsonReaderResult() throws IOException { String possFile = "resources/text_doc0.json"; JsonReader jreader = new JsonReader(); HashMap<String, HashMap<Integer, Integer>> poss = jreader.reader(possFile); System.out.println(poss.toString()); FileUtils.writeStringToFile(new File("resources/test/doc0Result.txt"), poss.toString(), "utf8"); }
From source file:org.openmrs.module.dataaggregation.web.controller.DataAggregationManageController.java
@RequestMapping(value = "/module/dataaggregation/manage", method = RequestMethod.GET) public void manage(ModelMap model) { model.addAttribute("user", Context.getAuthenticatedUser()); DataAggregationService service = Context.getService(DataAggregationService.class); model.addAttribute("patients", service.getAllPatientNames()); DataAggregationService serv = Context.getService(DataAggregationService.class); HashMap<String, Integer> diseaseBurden = serv.getDiseaseBurden();//I have no idea why it gives this error, the method exists //convert the data to a string that is roughly csv model.addAttribute("diseaseBurden", diseaseBurden.toString()); }
From source file:org.apache.storm.command.rebalance.java
@SuppressWarnings("rawtypes") public void realMain(String[] args) throws Exception { CmdLineParser parser = new CmdLineParser(this); parser.setUsageWidth(80);//from ww w . ja va 2 s .com try { parser.parseArgument(args); } catch (CmdLineException e) { System.err.println(e.getMessage()); _help = true; } if (_help) { parser.printUsage(System.err); System.err.println(); return; } if (_numWorkers <= 0) { throw new IllegalArgumentException("Need at least one worker"); } if (_name == null || _name.isEmpty()) { throw new IllegalArgumentException("name must be something"); } String info = "Topology " + _name + " is rebalancing "; NimbusClient client = null; Map conf = Utils.readStormConfig(); try { client = NimbusClient.getConfiguredClient(conf); RebalanceOptions options = new RebalanceOptions(); options.set_wait_secs(_wait); info += " with delaySesc " + _wait + " "; options.set_num_workers(_numWorkers); info += " number of workers " + _numWorkers; if (_executor != null) { HashMap<String, Integer> executors = parseExecutor(_executor); options.set_num_executors(executors); info += " with executor " + executors.toString() + " "; } client.getClient().rebalance(_name, options); System.out.println(info); } catch (Exception e) { System.out.println(CoreUtil.stringifyError(e)); printUsage(); } finally { if (client != null) { client.close(); } } }
From source file:se.vgregion.service.innovationsslussen.ldap.TextFrag.java
@SuppressWarnings("unchecked") @Override//from w w w . j a v a 2 s . c o m public String toString() { try { if (toStringRunning) { return " recursive call to " + getClass().getSimpleName() + ".toString()"; } toStringRunning = true; HashMap hm = new HashMap(new BeanMap(this)); hm.remove("class"); return hm.toString(); } finally { toStringRunning = false; } }
From source file:ru.apertum.qsystem.reports.formirovators.DistributionJobDay.java
@Override public String validate(String driverClassName, String url, String username, String password, HttpRequest request, HashMap<String, String> params) { // ? /*from w w w . ja v a 2s. c o m*/ QLog.l().logger().trace("? \"" + params.toString() + "\"."); if (params.size() == 1) { // date Date date; String sdate; try { date = Uses.format_dd_MM_yyyy.parse(params.get("date")); sdate = (new java.text.SimpleDateFormat("yyyy-MM-dd")).format(date); } catch (ParseException ex) { return "<br> ! ? ? (..)."; } paramMap.put("sdate", sdate); paramMap.put("date", date); } else { return "<br> !"; } return null; }
From source file:ru.apertum.qsystem.reports.formirovators.DistributionMedDayServices.java
@Override public String validate(String driverClassName, String url, String username, String password, HttpRequest request, HashMap<String, String> params) { // ? /*w w w.java 2s.co m*/ QLog.l().logger().trace("? \"" + params.toString() + "\"."); if (params.size() == 1) { // date/service_id/service Date date; String sdate; try { date = Uses.format_dd_MM_yyyy.parse(params.get("date")); sdate = (new java.text.SimpleDateFormat("yyyy-MM-dd")).format(date); } catch (NumberFormatException | ParseException ex) { return "<br> ! ? ? (..)."; } paramMap.put("sdate", sdate); paramMap.put("date", date); } else { return "<br> !"; } return null; }