List of usage examples for java.util HashMap HashMap
public HashMap()
From source file:com.mycompany.javaapplicaton3.LerArquivo2.java
/** * @param args//from www . ja v a 2 s . com * @throws Exception */ public static void main(String[] args) throws Exception { String SAMPLE_PERSON_DATA_FILE_PATH = "C:/Users/lprates/Documents/arquivo2013.xlsx"; // Input File initialize File file = new File(SAMPLE_PERSON_DATA_FILE_PATH); InputStream inputStream = new FileInputStream(file); // Excel Cell Mapping Map<String, String> cellMapping0150 = new HashMap<String, String>(); cellMapping0150.put("HEADER", "REG,COD_PART,NOME,COD_PAIS,CNPJ,CPF,IE,COD_MUN,SUFRAMA,END,NUM,COMPL,BAIRRO,BPE_VIGENCIA_FIN"); cellMapping0150.put("A", "reg"); cellMapping0150.put("B", "codPart"); cellMapping0150.put("C", "nome"); cellMapping0150.put("D", "codPais"); cellMapping0150.put("E", "cnpj"); cellMapping0150.put("F", "cpf"); cellMapping0150.put("G", "ie"); cellMapping0150.put("H", "codMun"); cellMapping0150.put("I", "suframa"); cellMapping0150.put("J", "end"); cellMapping0150.put("K", "num"); cellMapping0150.put("L", "compl"); cellMapping0150.put("M", "bairro"); cellMapping0150.put("N", "bpeVigenciaFin"); // Excel Cell Mapping Map<String, String> cellMapping0200 = new HashMap<String, String>(); cellMapping0200.put("HEADER", "REG,COD_ITEM,DESCR_ITEM,COD_BARRA,COD_ANT_ITEM,UNID_INV,TIPO_ITEM,COD_NCM,EX_IPI,COD_GEN,COD_LST,ALIQ_ICMS"); cellMapping0200.put("A", "reg"); cellMapping0200.put("B", "codItem"); cellMapping0200.put("C", "descrItem"); cellMapping0200.put("D", "codBarra"); cellMapping0200.put("E", "codAntItem"); cellMapping0200.put("F", "unidInv"); cellMapping0200.put("G", "tipoItem"); cellMapping0200.put("H", "codNcm"); cellMapping0200.put("I", "exIpi"); cellMapping0200.put("J", "codGen"); cellMapping0200.put("K", "codLst"); cellMapping0200.put("L", "aliqIcms"); // Excel Cell Mapping Map<String, String> cellMappingC100_C170 = new HashMap<String, String>(); cellMappingC100_C170.put("A", "campo1"); cellMappingC100_C170.put("B", "campo2"); cellMappingC100_C170.put("C", "campo3"); cellMappingC100_C170.put("D", "campo4"); cellMappingC100_C170.put("E", "campo5"); cellMappingC100_C170.put("F", "campo6"); cellMappingC100_C170.put("G", "campo7"); cellMappingC100_C170.put("H", "campo8"); cellMappingC100_C170.put("I", "campo9"); cellMappingC100_C170.put("J", "campo10"); cellMappingC100_C170.put("K", "campo11"); cellMappingC100_C170.put("L", "campo12"); cellMappingC100_C170.put("M", "campo13"); cellMappingC100_C170.put("N", "campo14"); cellMappingC100_C170.put("O", "campo15"); cellMappingC100_C170.put("P", "campo16"); cellMappingC100_C170.put("Q", "campo17"); cellMappingC100_C170.put("R", "campo18"); cellMappingC100_C170.put("S", "campo19"); cellMappingC100_C170.put("T", "campo20"); cellMappingC100_C170.put("U", "campo21"); cellMappingC100_C170.put("V", "campo22"); cellMappingC100_C170.put("W", "campo23"); cellMappingC100_C170.put("X", "campo24"); cellMappingC100_C170.put("Y", "campo25"); cellMappingC100_C170.put("Z", "campo26"); cellMappingC100_C170.put("AA", "campo27"); cellMappingC100_C170.put("AB", "campo28"); cellMappingC100_C170.put("AC", "campo29"); cellMappingC100_C170.put("AD", "campo30"); cellMappingC100_C170.put("AE", "campo31"); cellMappingC100_C170.put("AF", "campo32"); cellMappingC100_C170.put("AG", "campo33"); cellMappingC100_C170.put("AH", "campo34"); cellMappingC100_C170.put("AI", "campo35"); cellMappingC100_C170.put("AJ", "campo36"); cellMappingC100_C170.put("AK", "campo37"); cellMappingC100_C170.put("AL", "campo38"); cellMappingC100_C170.put("AM", "campo39"); cellMappingC100_C170.put("AN", "campo40"); cellMappingC100_C170.put("AO", "campo41"); cellMappingC100_C170.put("AP", "campo42"); cellMappingC100_C170.put("AQ", "campo43"); cellMappingC100_C170.put("AR", "campo44"); cellMappingC100_C170.put("AS", "campo45"); cellMappingC100_C170.put("AT", "campo46"); cellMappingC100_C170.put("AU", "campo47"); cellMappingC100_C170.put("AV", "campo48"); cellMappingC100_C170.put("AW", "campo49"); cellMappingC100_C170.put("AX", "campo50"); cellMappingC100_C170.put("AY", "campo51"); cellMappingC100_C170.put("AZ", "campo52"); cellMappingC100_C170.put("BA", "campo53"); cellMappingC100_C170.put("BB", "campo54"); cellMappingC100_C170.put("BC", "campo55"); cellMappingC100_C170.put("BD", "campo56"); cellMappingC100_C170.put("BE", "campo57"); cellMappingC100_C170.put("BF", "campo58"); cellMappingC100_C170.put("BG", "campo59"); cellMappingC100_C170.put("BH", "campo60"); cellMappingC100_C170.put("BI", "campo61"); cellMappingC100_C170.put("BJ", "campo62"); cellMappingC100_C170.put("BK", "campo63"); cellMappingC100_C170.put("BL", "campo64"); cellMappingC100_C170.put("BM", "campo65"); cellMappingC100_C170.put("BN", "campo66"); cellMappingC100_C170.put("BO", "campo67"); cellMappingC100_C170.put("BP", "campo68"); cellMappingC100_C170.put("BQ", "campo69"); cellMappingC100_C170.put("BR", "campo70"); cellMappingC100_C170.put("BS", "campo71"); // The package open is instantaneous, as it should be. OPCPackage pkg = null; try { ExcelWorkSheetHandler<Reg0150> workSheetHandler = new ExcelWorkSheetHandler<Reg0150>(Reg0150.class, cellMapping0150); pkg = OPCPackage.open(inputStream); ExcelSheetCallback sheetCallback = new ExcelSheetCallback() { private int sheetNumber = 0; public void startSheet(int sheetNum, String sheetName) { this.sheetNumber = sheetNum; System.out.println("Started processing sheet number=" + sheetNumber + " and Sheet Name is '" + sheetName + "'"); } @Override public void endSheet() { System.out.println("Processing completed for sheet number=" + sheetNumber); } public void startSheet(int sheetNum) { System.out.println("Started processing sheet number=" + sheetNum); } }; /*** Leitura Registro 0150 ***/ System.out.println("Constructor: pkg, workSheetHandler, sheetCallback"); ExcelReader example1 = new ExcelReader(pkg, workSheetHandler, sheetCallback); example1.process("0150"); if (workSheetHandler.getValueList().isEmpty()) { // No data present LOG.error("sHandler.getValueList() is empty"); } else { LOG.info(workSheetHandler.getValueList().size() + " no. of records read from given excel worksheet successfully."); // Displaying data ead from Excel file displayPersonList(workSheetHandler.getValueList()); } /*** Leitura Registro 0200 ***/ ExcelWorkSheetHandler<Reg0200> workSheetHandler0200 = new ExcelWorkSheetHandler<Reg0200>(Reg0200.class, cellMapping0200); ExcelReader example2 = new ExcelReader(pkg, workSheetHandler0200, null); example2.process("0200"); if (workSheetHandler0200.getValueList().isEmpty()) { LOG.error("sHandler.getValueList() is empty"); } else { LOG.info(workSheetHandler0200.getValueList().size() + " no. of records read from given excel worksheet successfully."); displayPersonList0200(workSheetHandler0200.getValueList()); } /*** Leitura Registro C100 e C170 ***/ ExcelWorkSheetHandler<RegC100_C170> workSheetHandlerC100_C170 = new ExcelWorkSheetHandler<RegC100_C170>( RegC100_C170.class, cellMappingC100_C170, 4); workSheetHandlerC100_C170.setVerifiyHeader(false); ExcelReader example3 = new ExcelReader(pkg, workSheetHandlerC100_C170, null); example3.process("201302"); if (workSheetHandlerC100_C170.getValueList().isEmpty()) { LOG.error("sHandler.getValueList() is empty"); } else { LOG.info(workSheetHandlerC100_C170.getValueList().size() + " no. of records read from given excel worksheet successfully."); displayPersonListC100_C170(workSheetHandlerC100_C170.getValueList()); } } catch (RuntimeException are) { LOG.error(are.getMessage(), are.getCause()); } catch (InvalidFormatException ife) { LOG.error(ife.getMessage(), ife.getCause()); } catch (IOException ioe) { LOG.error(ioe.getMessage(), ioe.getCause()); } finally { IOUtils.closeQuietly(inputStream); try { if (null != pkg) { pkg.close(); } } catch (IOException e) { // just ignore IO exception } } }
From source file:enrichment.Disambiguate.java
/**prerequisites: * cd silk_2.5.3/*_links// w w w. ja v a 2 s. co m * cat *.nt|sort -t' ' -k3 > $filename * * @param args $filename * @throws IOException * @throws URISyntaxException */ public static void main(String[] args) { File file = new File(args[0]); if (file.isDirectory()) { args = file.list(new OnlyExtFilenameFilter("nt")); } BufferedReader in; for (int q = 0; q < args.length; q++) { String filename = null; if (file.isDirectory()) { filename = file.getPath() + File.separator + args[q]; } else { filename = args[q]; } try { FileWriter output = new FileWriter(filename + "_disambiguated.nt"); String prefix = "@prefix rdrel: <http://rdvocab.info/RDARelationshipsWEMI/> .\n" + "@prefix dbpedia: <http://de.dbpedia.org/resource/> .\n" + "@prefix frbr: <http://purl.org/vocab/frbr/core#> .\n" + "@prefix lobid: <http://lobid.org/resource/> .\n" + "@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .\n" + "@prefix foaf: <http://xmlns.com/foaf/0.1/> .\n" + "@prefix mo: <http://purl.org/ontology/mo/> .\n" + "@prefix wikipedia: <https://de.wikipedia.org/wiki/> ."; output.append(prefix + "\n\n"); in = new BufferedReader(new InputStreamReader(new FileInputStream(filename))); HashMap<String, HashMap<String, ArrayList<String>>> hm = new HashMap<String, HashMap<String, ArrayList<String>>>(); String s; HashMap<String, ArrayList<String>> hmLobid = new HashMap<String, ArrayList<String>>(); Stack<String> old_object = new Stack<String>(); while ((s = in.readLine()) != null) { String[] triples = s.split(" "); String object = triples[2].substring(1, triples[2].length() - 1); if (old_object.size() > 0 && !old_object.firstElement().equals(object)) { hmLobid = new HashMap<String, ArrayList<String>>(); old_object = new Stack<String>(); } old_object.push(object); String subject = triples[0].substring(1, triples[0].length() - 1); System.out.print("\nSubject=" + object); System.out.print("\ntriples[2]=" + triples[2]); hmLobid.put(subject, getAllCreators(new URI(subject))); hm.put(object, hmLobid); } // get all dbpedia resources for (String key_one : hm.keySet()) { System.out.print("\n==============\n==== " + key_one + "\n==============="); int resources_cnt = hm.get(key_one).keySet().size(); ArrayList<String>[] creators = new ArrayList[resources_cnt]; HashMap<String, Integer> creators_backed = new HashMap<String, Integer>(); int x = 0; // get all lobid_resources subsumed under the dbpedia resource for (String subject_uri : hm.get(key_one).keySet()) { creators[x] = new ArrayList<String>(); System.out.print("\n subject_uri=" + subject_uri); Iterator<String> ite = hm.get(key_one).get(subject_uri).iterator(); int y = 0; // get all creators of the lobid resource while (ite.hasNext()) { String creator = ite.next(); System.out.print("\n " + creator); if (creators_backed.containsKey(creator)) { y = creators_backed.get(creator); } else { y = creators_backed.size(); creators_backed.put(creator, y); } while (creators[x].size() <= y) { creators[x].add("-"); } creators[x].set(y, creator); y++; } x++; } if (creators_backed.size() == 1) { System.out .println("\n" + "Every resource pointing to " + key_one + " has the same creator!"); for (String key_two : hm.get(key_one).keySet()) { output.append("<" + key_two + "> rdrel:workManifested <" + key_one + "> .\n"); output.append("<" + key_two + "> mo:wikipedia <" + key_one.replaceAll("dbpedia\\.org/resource", "wikipedia\\.org/wiki") + "> .\n"); } } /*else { for (int a = 0; a < creators.length; a++) { System.out.print(creators[a].toString()+","); } }*/ } output.flush(); if (output != null) { output.close(); } } catch (Exception e) { System.out.print("Exception while working on " + filename + ": \n"); e.printStackTrace(System.out); } } }
From source file:com.harpatec.examples.Main.java
/** * Load the Spring Integration Application Context * /*from w ww .j ava 2 s .co m*/ * @param args - command line arguments * @throws InterruptedException * @throws IOException * @throws JsonMappingException * @throws JsonGenerationException */ public static void main(final String... args) throws InterruptedException, JsonGenerationException, JsonMappingException, IOException { final AbstractApplicationContext context = new ClassPathXmlApplicationContext( "classpath:META-INF/spring/integration/*-context.xml"); context.registerShutdownHook(); LOGGER.debug("Dropping the collection of MessageRecords"); MongoTemplate mongoTemplate = context.getBean(MongoTemplate.class); mongoTemplate.dropCollection(MessageRecord.class); mongoTemplate.indexOps(MessageRecord.class).ensureIndex(new Index().on("key", Order.ASCENDING).unique()); mongoTemplate.indexOps(MessageRecord.class).ensureIndex(new Index().on("completionTime", Order.ASCENDING)); RabbitTemplate inboundTemplate = (RabbitTemplate) context.getBean("amqpTemplateInbound"); Map<String, Object> messageMap = new HashMap<String, Object>(); messageMap.put("count", "4"); LOGGER.debug("Submitting first message which should pass DuplicateMessageFilter ok."); submitMessage(inboundTemplate, messageMap); Thread.sleep(5 * 1000); LOGGER.debug("Submitting a duplicate message which should get caught by the DuplicateMessageFilter."); submitMessage(inboundTemplate, messageMap); Thread.sleep(5 * 1000); messageMap.put("count", "0"); LOGGER.debug("Submitting a message which will not go all the way through the message flow."); submitMessage(inboundTemplate, messageMap); Thread.sleep(5 * 1000); messageMap.put("count", "1"); messageMap.put("fail", "true"); LOGGER.debug("Submitting a message which should signal that an Exception should be thrown."); submitMessage(inboundTemplate, messageMap); Thread.sleep(6 * 60 * 1000); System.exit(0); }
From source file:org.mitre.mpf.wfm.rest_client.RegisterComponent.java
public static void main(String[] args) { String filePath = "/home/mpf/mpf/trunk/java-hello-world/src/main/resources/HelloWorldComponent.json"; //"/home/mpf/mpf/trunk/extraction/hello/cpp/src/helloComponent.json"; String url = "http://localhost:8080/workflow-manager/rest/component/registerViaFile"; final String credentials = "Basic bXBmOm1wZjEyMw=="; Map<String, String> params = new HashMap<String, String>(); System.out.println("Starting rest-client!"); //not necessary for localhost //System.setProperty("http.proxyHost","gatekeeper.mitre.org"); //System.setProperty("http.proxyPort","80"); RequestInterceptor authorize = new RequestInterceptor() { @Override/*from ww w . ja v a2s. c om*/ public void intercept(HttpRequestBase request) { request.addHeader("Authorization", credentials); } }; RestClient client = RestClient.builder().requestInterceptor(authorize).build(); if (args.length > 0) { filePath = args[0]; System.out.println("args[0] = " + args[0]); } params.put("filePath", filePath); Map<String, String> stringVal = null; try { stringVal = client.get(url, params, Map.class); } catch (RestClientException e) { log.error("RestClientException occurred"); e.printStackTrace(); } catch (IOException e) { log.error("IOException occurred"); e.printStackTrace(); } System.out.println(stringVal.get("message")); }
From source file:org.mitre.mpf.wfm.rest_client.UnregisterComponent.java
public static void main(String[] args) { String filePath = "/home/mpf/mpf/trunk/java-hello-world/src/main/resources/HelloWorldComponent.json"; //"/home/mpf/mpf/trunk/extraction/hello/cpp/src/helloComponent.json"; String url = "http://localhost:8080/workflow-manager/rest/component/unregisterViaFile"; final String credentials = "Basic bXBmOm1wZjEyMw=="; Map<String, String> params = new HashMap<String, String>(); System.out.println("Starting rest-client!"); //not necessary for localhost //System.setProperty("http.proxyHost","gatekeeper.mitre.org"); //System.setProperty("http.proxyPort","80"); RequestInterceptor authorize = new RequestInterceptor() { @Override/*from ww w . ja v a 2 s. c o m*/ public void intercept(HttpRequestBase request) { request.addHeader("Authorization", credentials); } }; RestClient client = RestClient.builder().requestInterceptor(authorize).build(); if (args.length > 0) { filePath = args[0]; } log.info(filePath); params.put("filePath", filePath); Map<String, String> stringVal = null; try { stringVal = client.get(url, params, Map.class); } catch (RestClientException e) { log.error("RestClientException occurred"); e.printStackTrace(); } catch (IOException e) { log.error("IOException occurred"); e.printStackTrace(); } System.out.println(stringVal.get("message")); }
From source file:org.bimserver.build.CreateGitHubRelease.java
public static void main(String[] args) { String username = args[0];/*from w ww.jav a2s.c o m*/ String password = args[1]; String repo = args[2]; String project = args[3]; String tagname = args[4]; String name = args[5]; String body = args[6]; String draft = args[7]; String prerelease = args[8]; String filesString = args[9]; String[] filenames = filesString.split(";"); GitHubClient gitHubClient = new GitHubClient("api.github.com"); gitHubClient.setCredentials(username, password); Map<String, String> map = new HashMap<String, String>(); map.put("tag_name", tagname); // map.put("target_commitish", "test"); map.put("name", name); map.put("body", body); // map.put("draft", draft); // map.put("prerelease", prerelease); try { String string = "/repos/" + repo + "/" + project + "/releases"; System.out.println(string); JsonObject gitHubResponse = gitHubClient.post(string, map, JsonObject.class); System.out.println(gitHubResponse); String id = gitHubResponse.get("id").getAsString(); HttpHost httpHost = new HttpHost("uploads.github.com", 443, "https"); BasicCredentialsProvider basicCredentialsProvider = new BasicCredentialsProvider(); basicCredentialsProvider.setCredentials(new AuthScope(httpHost), new UsernamePasswordCredentials(username, password)); HostnameVerifier hostnameVerifier = new AllowAllHostnameVerifier(); SSLContextBuilder builder = new SSLContextBuilder(); builder.loadTrustMaterial(null, new TrustSelfSignedStrategy()); SSLConnectionSocketFactory sslsf = new SSLConnectionSocketFactory(builder.build()); CloseableHttpClient client = HttpClients.custom() .setDefaultCredentialsProvider(basicCredentialsProvider) .setHostnameVerifier((X509HostnameVerifier) hostnameVerifier).setSSLSocketFactory(sslsf) .build(); AuthCache authCache = new BasicAuthCache(); BasicScheme basicAuth = new BasicScheme(); authCache.put(httpHost, basicAuth); HttpClientContext context = HttpClientContext.create(); context.setCredentialsProvider(basicCredentialsProvider); context.setAuthCache(authCache); for (String filename : filenames) { File file = new File(filename); String url = "https://uploads.github.com/repos/" + repo + "/" + project + "/releases/" + id + "/assets?name=" + file.getName(); HttpPost post = new HttpPost(url); post.setHeader("Accept", "application/vnd.github.manifold-preview"); post.setHeader("Content-Type", "application/zip"); post.setEntity(new InputStreamEntity(new FileInputStream(file), file.length())); HttpResponse execute = client.execute(httpHost, post, context); execute.getEntity().getContent().close(); } } catch (IOException e) { e.printStackTrace(); } catch (NoSuchAlgorithmException e) { e.printStackTrace(); } catch (KeyStoreException e) { e.printStackTrace(); } catch (KeyManagementException e) { e.printStackTrace(); } }
From source file:com.px100systems.data.utility.RestoreUtility.java
public static void main(String[] args) { if (args.length < 3) { System.err.println("Usage: java -cp ... com.px100systems.data.utility.RestoreUtility " + "<springXmlConfigFile> <persisterBeanName> <backupDirectory> [compare]"); return;/* w w w. j a va 2 s.c om*/ } FileSystemXmlApplicationContext ctx = new FileSystemXmlApplicationContext("file:" + args[0]); try { PersistenceProvider persister = ctx.getBean(args[1], PersistenceProvider.class); File directory = new File(args[2]); if (!directory.isDirectory()) { System.err.println(directory.getName() + " is not a directory"); return; } List<File> files = new ArrayList<File>(); //noinspection ConstantConditions for (File file : directory.listFiles()) if (BackupFile.isBackup(file)) files.add(file); if (files.isEmpty()) { System.err.println(directory.getName() + " directory has no backup files"); return; } if (args.length == 4 && args[3].equalsIgnoreCase("compare")) { final Map<String, Map<Long, RawRecord>> units = new HashMap<String, Map<Long, RawRecord>>(); for (String storage : persister.storage()) { System.out.println("Storage " + storage); persister.loadByStorage(storage, new PersistenceProvider.LoadCallback() { @Override public void process(RawRecord record) { Map<Long, RawRecord> unitList = units.get(record.getUnitName()); if (unitList == null) { unitList = new HashMap<Long, RawRecord>(); units.put(record.getUnitName(), unitList); } unitList.put(record.getId(), record); } }); for (final Map.Entry<String, Map<Long, RawRecord>> unit : units.entrySet()) { BackupFile file = null; for (int i = 0, n = files.size(); i < n; i++) if (BackupFile.isBackup(files.get(i), unit.getKey())) { file = new BackupFile(files.get(i)); files.remove(i); break; } if (file == null) throw new RuntimeException("Could not find backup file for unit " + unit.getKey()); final Long[] count = new Long[] { 0L }; file.read(new PersistenceProvider.LoadCallback() { @Override public void process(RawRecord record) { RawRecord r = unit.getValue().get(record.getId()); if (r == null) throw new RuntimeException("Could not find persisted record " + record.getId() + " for unit " + unit.getKey()); if (!r.equals(record)) throw new RuntimeException( "Record " + record.getId() + " mismatch for unit " + unit.getKey()); count[0] = count[0] + 1; } }); if (count[0] != unit.getValue().size()) throw new RuntimeException("Extra persisted records for unit " + unit.getKey()); System.out.println(" Unit " + unit.getKey() + ": OK"); } units.clear(); } if (!files.isEmpty()) { System.err.println("Extra backups: "); for (File file : files) System.err.println(" " + file.getName()); } } else { persister.init(); for (File file : files) { InMemoryDatabase.readBackupFile(file, persister); System.out.println("Loaded " + file.getName()); } } } catch (Exception e) { throw new RuntimeException(e); } finally { ctx.close(); } }
From source file:gedi.lfc.quick.ShiroguchiCounter.java
public static void main(String[] args) throws IOException { String path = "/home/users/erhard/biostor/seq/ngade/shiroguchi_randombarcodes/data/"; MemoryIntervalTreeStorage<int[]> reads = new MemoryIntervalTreeStorage<int[]>(int[].class); String[] files = { "Shiroguchi_A_collapsed.bed", "Shiroguchi_B_collapsed.bed", "Shiroguchi_A_uncollapsed.bed", "Shiroguchi_B_uncollapsed.bed" }; for (int i = 0; i < 4; i++) { Iterator<String> it = new LineOrientedFile(path + files[i]).lineIterator(); while (it.hasNext()) { String[] f = StringUtils.split(it.next(), '\t'); Chromosome chr = Chromosome.obtain(f[0]); ArrayGenomicRegion region = new ArrayGenomicRegion(Integer.parseInt(f[1]), Integer.parseInt(f[2])); int c = Integer.parseInt(StringUtils.splitField(f[3], '|', 0)); int[] counts = reads.getData(chr, region); if (counts == null) reads.add(chr, region, counts = new int[4]); counts[i] += c;/*w ww. ja va 2s . c om*/ } } HashMap<String, String> map = new HashMap<String, String>(); new LineOrientedFile(path + "U00096.2.genes.csv").lineIterator().forEachRemaining(s -> { String[] f = StringUtils.split(s, '\t'); map.put(f[0], f[7]); }); LineOrientedFile fragments = new LineOrientedFile("fragments.csv"); fragments.startWriting(); fragments.writef("Gene\tonlyA\tonlyB\tBoth\tLength\n"); LineOrientedFile bias = new LineOrientedFile("bias.csv"); bias.startWriting(); bias.writef("OriginalA\tBiasA\tOriginalB\tBiasB\n"); IntArrayList biasFactors = new IntArrayList(); ArrayList<GeneData> geneData = new ArrayList<GeneData>(); MemoryIntervalTreeStorage<Transcript> genes = new BiomartExonFileReader(path + "U00096.2.exons.csv", false) .readIntoMemoryTakeFirst(); for (ImmutableReferenceGenomicRegion<Transcript> g : genes.getReferenceGenomicRegions()) { ArrayList<ImmutableReferenceGenomicRegion<int[]>> frag = reads .getReferenceRegionsIntersecting(g.getReference().toStrandIndependent(), g.getRegion()); GeneData gd = new GeneData(); int l = g.getRegion().getTotalLength(); for (ImmutableReferenceGenomicRegion<int[]> r : frag) { if (r.getData()[0] == 0) gd.onlyB++; if (r.getData()[1] == 0) gd.onlyA++; if (r.getData()[0] == 0 && r.getData()[1] == 0) throw new RuntimeException(); bias.writef("%d\t%.0f\t%d\t%.0f\n", r.getData()[0], r.getData()[2] / (double) r.getData()[0], r.getData()[1], r.getData()[3] / (double) r.getData()[1]); if (r.getData()[0] > 0) { biasFactors.add(r.getData()[2] / r.getData()[0]); } if (r.getData()[1] > 0) { biasFactors.add(r.getData()[3] / r.getData()[1]); } } gd.both = frag.size() - gd.onlyA - gd.onlyB; fragments.writef("%s\t%d\t%d\t%d\t%d\n", map.get(g.getData().getTranscriptId()), gd.onlyA, gd.onlyB, gd.both, l); if (gd.onlyA + gd.onlyB + gd.both > 0) geneData.add(gd); } fragments.finishWriting(); bias.finishWriting(); double fc = 1.4; int rep = 5; int nDiff = 1000; int n = 10000; int N = 6000; double noise = 0.05; LineOrientedFile countMatrix = new LineOrientedFile("countMatrix.csv"); countMatrix.startWriting(); LineOrientedFile downCountMatrix = new LineOrientedFile("countMatrix_downsampled.csv"); downCountMatrix.startWriting(); RandomNumbers rnd = new RandomNumbers(); for (int i = 0; i < n; i++) { GeneData gd = geneData.get(rnd.getUnif(0, geneData.size())); // int N = gd.both==0?Integer.MAX_VALUE/2:(int) (gd.onlyA+gd.onlyB+gd.both+gd.onlyA*gd.onlyB/gd.both); double p1 = (gd.onlyA + gd.both) / (double) N; double p2 = i < nDiff ? p1 / fc : p1; ArrayList<ReadData> list = new ArrayList<ReadData>(); for (int r = 0; r < rep * 2; r++) { int k = rnd.getBinom(N, r < rep ? p1 : p2) + 1; int hit = N == -1 ? 0 : rnd.getBinom(k, list.size() / (double) N); rnd.shuffle(list); for (int x = 0; x < hit; x++) list.get(x).reads[r] = (int) rnd.getNormal(list.get(x).bias, list.get(x).bias * noise); for (int x = 0; x < k - hit; x++) list.add(new ReadData(biasFactors.getInt(rnd.getUnif(0, biasFactors.size())), rep * 2, r)); } int[] c = new int[rep * 2]; for (ReadData d : list) { for (int r = 0; r < c.length; r++) { c[r] += d.reads[r]; } } double[] down = new double[rep * 2]; for (ReadData d : list) { double max = ArrayUtils.max(d.reads); for (int r = 0; r < down.length; r++) { down[r] += d.reads[r] / max; } } countMatrix.writeLine(StringUtils.concat("\t", c)); downCountMatrix.writeLine(StringUtils.concat("\t", down)); } countMatrix.finishWriting(); downCountMatrix.finishWriting(); }
From source file:com.facebook.infrastructure.net.io.TcpReader.java
public static void main(String[] args) throws Throwable { Map<TcpReaderState, StartState> stateMap = new HashMap<TcpReaderState, StartState>(); stateMap.put(TcpReaderState.CONTENT, new ContentState(null, 10)); stateMap.put(TcpReaderState.START, new ProtocolState(null)); stateMap.put(TcpReaderState.CONTENT_LENGTH, new ContentLengthState(null)); StartState state = stateMap.get(TcpReaderState.CONTENT); System.out.println(state.getClass().getName()); state = stateMap.get(TcpReaderState.CONTENT_LENGTH); System.out.println(state.getClass().getName()); }
From source file:TwitterClustering.java
public static void main(String[] args) throws FileNotFoundException, IOException { // TODO code application logic here File outFile = new File(args[3]); Scanner s = new Scanner(new File(args[1])).useDelimiter(","); JSONParser parser = new JSONParser(); Set<Cluster> clusterSet = new HashSet<Cluster>(); HashMap<String, Tweet> tweets = new HashMap(); FileWriter fw = new FileWriter(outFile.getAbsoluteFile()); BufferedWriter bw = new BufferedWriter(fw); // init//from ww w .j av a 2 s .c o m try { Object obj = parser.parse(new FileReader(args[2])); JSONArray jsonArray = (JSONArray) obj; for (int i = 0; i < jsonArray.size(); i++) { Tweet twt = new Tweet(); JSONObject jObj = (JSONObject) jsonArray.get(i); String text = jObj.get("text").toString(); long sum = 0; for (int y = 0; y < text.toCharArray().length; y++) { sum += (int) text.toCharArray()[y]; } String[] token = text.split(" "); String tID = jObj.get("id").toString(); Set<String> mySet = new HashSet<String>(Arrays.asList(token)); twt.setAttributeValue(sum); twt.setText(mySet); twt.setTweetID(tID); tweets.put(tID, twt); } // preparing initial clusters int i = 0; while (s.hasNext()) { String id = s.next();// id Tweet t = tweets.get(id.trim()); clusterSet.add(new Cluster(i + 1, t, new LinkedList())); i++; } Iterator it = tweets.entrySet().iterator(); for (int l = 0; l < 2; l++) { // limit to 25 iterations while (it.hasNext()) { Map.Entry me = (Map.Entry) it.next(); // calculate distance to each centroid Tweet p = (Tweet) me.getValue(); HashMap<Cluster, Float> distMap = new HashMap(); for (Cluster clust : clusterSet) { distMap.put(clust, jaccardDistance(p.getText(), clust.getCentroid().getText())); } HashMap<Cluster, Float> sorted = (HashMap<Cluster, Float>) sortByValue(distMap); sorted.keySet().iterator().next().getMembers().add(p); } // calculate new centroid and update Clusterset for (Cluster clust : clusterSet) { TreeMap<String, Long> tDistMap = new TreeMap(); Tweet newCentroid = null; Long avgSumDist = new Long(0); for (int j = 0; j < clust.getMembers().size(); j++) { avgSumDist += clust.getMembers().get(j).getAttributeValue(); tDistMap.put(clust.getMembers().get(j).getTweetID(), clust.getMembers().get(j).getAttributeValue()); } if (clust.getMembers().size() != 0) { avgSumDist /= (clust.getMembers().size()); } ArrayList<Long> listValues = new ArrayList<Long>(tDistMap.values()); if (tDistMap.containsValue(findClosestNumber(listValues, avgSumDist))) { // found closest newCentroid = tweets .get(getKeyByValue(tDistMap, findClosestNumber(listValues, avgSumDist))); clust.setCentroid(newCentroid); } } } // create an iterator Iterator iterator = clusterSet.iterator(); // check values while (iterator.hasNext()) { Cluster c = (Cluster) iterator.next(); bw.write(c.getId() + "\t"); System.out.print(c.getId() + "\t"); for (Tweet t : c.getMembers()) { bw.write(t.getTweetID() + ", "); System.out.print(t.getTweetID() + ","); } bw.write("\n"); System.out.println(""); } System.out.println(""); System.out.println("SSE " + sumSquaredErrror(clusterSet)); } catch (Exception e) { e.printStackTrace(); } finally { bw.close(); fw.close(); } }