List of usage examples for java.util HashMap put
public V put(K key, V value)
From source file:joshua.ui.hypergraph_visualizer.HyperGraphViewer.java
public static void main(String[] argv) { if (argv.length < 4) { System.err.println(USAGE); System.exit(1);//from w w w . ja va2 s . co m } String itemsFile = argv[0]; String rulesFile = argv[1]; int firstSentence = Integer.parseInt(argv[2]); int lastSentence = Integer.parseInt(argv[3]); HashMap<Integer, Integer> chosenSentences = new HashMap<Integer, Integer>(); for (int i = firstSentence; i < lastSentence; i++) { chosenSentences.put(i, i); } Vocabulary vocab = new Vocabulary(); DiskHyperGraph dhg = new DiskHyperGraph(vocab, 0, true, null); dhg.initRead(itemsFile, rulesFile, chosenSentences); JungHyperGraph hg = new JungHyperGraph(dhg.readHyperGraph(), vocab); JFrame frame = new JFrame("Joshua Hypergraph"); frame.getContentPane().add(new HyperGraphViewer(hg, vocab)); frame.setSize(500, 500); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); frame.setVisible(true); return; }
From source file:org.ala.harvester.RawFileHarvester.java
/** * Main method for testing this particular Harvester * * @param args// w w w . ja va2 s . c om */ public static void main(String[] args) throws Exception { if (args.length != 3) { printUsage(); System.exit(1); } String infosourceId = args[0]; String siteMap = args[1]; String documentMapperClass = args[2]; String[] locations = { "classpath*:spring.xml" }; ApplicationContext context = new ClassPathXmlApplicationContext(locations); RawFileHarvester h = (RawFileHarvester) context.getBean("rawFileHarvester"); h.setDocumentMapper((DocumentMapper) Class.forName(documentMapperClass).newInstance()); HashMap<String, String> connectParams = new HashMap<String, String>(); connectParams.put("sitemap", siteMap); h.setConnectionParams(connectParams); h.start(Integer.parseInt(infosourceId)); }
From source file:com.impetus.kundera.ycsb.benchmark.CouchDBNativeClient.java
public static void main(String[] args) { CouchDBNativeClient cli = new CouchDBNativeClient(); Properties props = new Properties(); props.setProperty("hosts", "localhost"); props.setProperty("port", "5984"); cli.setProperties(props);/*from ww w . ja va 2s . co m*/ try { cli.init(); } catch (Exception e) { e.printStackTrace(); System.exit(0); } HashMap<String, ByteIterator> vals = new HashMap<String, ByteIterator>(); vals.put("age", new StringByteIterator("57")); vals.put("middlename", new StringByteIterator("bradley")); vals.put("favoritecolor", new StringByteIterator("blue")); int res = cli.insert("usertable", "BrianFrankCooper", vals); System.out.println("Result of insert: " + res); HashMap<String, ByteIterator> result = new HashMap<String, ByteIterator>(); HashSet<String> fields = new HashSet<String>(); fields.add("middlename"); fields.add("age"); fields.add("favoritecolor"); res = cli.read("usertable", "BrianFrankCooper", null, result); System.out.println("Result of read: " + res); for (String s : result.keySet()) { System.out.println("[" + s + "]=[" + result.get(s) + "]"); } res = cli.delete("usertable", "BrianFrankCooper"); System.out.println("Result of delete: " + res); }
From source file:com.ibm.crail.storage.StorageServer.java
public static void main(String[] args) throws Exception { Logger LOG = CrailUtils.getLogger(); CrailConfiguration conf = new CrailConfiguration(); CrailConstants.updateConstants(conf); CrailConstants.printConf();/*from w ww . j a v a 2s . co m*/ CrailConstants.verify(); int splitIndex = 0; for (String param : args) { if (param.equalsIgnoreCase("--")) { break; } splitIndex++; } //default values StringTokenizer tokenizer = new StringTokenizer(CrailConstants.STORAGE_TYPES, ","); if (!tokenizer.hasMoreTokens()) { throw new Exception("No storage types defined!"); } String storageName = tokenizer.nextToken(); int storageType = 0; HashMap<String, Integer> storageTypes = new HashMap<String, Integer>(); storageTypes.put(storageName, storageType); for (int type = 1; tokenizer.hasMoreElements(); type++) { String name = tokenizer.nextToken(); storageTypes.put(name, type); } int storageClass = -1; //custom values if (args != null) { Option typeOption = Option.builder("t").desc("storage type to start").hasArg().build(); Option classOption = Option.builder("c").desc("storage class the server will attach to").hasArg() .build(); Options options = new Options(); options.addOption(typeOption); options.addOption(classOption); CommandLineParser parser = new DefaultParser(); try { CommandLine line = parser.parse(options, Arrays.copyOfRange(args, 0, splitIndex)); if (line.hasOption(typeOption.getOpt())) { storageName = line.getOptionValue(typeOption.getOpt()); storageType = storageTypes.get(storageName).intValue(); } if (line.hasOption(classOption.getOpt())) { storageClass = Integer.parseInt(line.getOptionValue(classOption.getOpt())); } } catch (ParseException e) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("Storage tier", options); System.exit(-1); } } if (storageClass < 0) { storageClass = storageType; } StorageTier storageTier = StorageTier.createInstance(storageName); if (storageTier == null) { throw new Exception("Cannot instantiate datanode of type " + storageName); } String extraParams[] = null; splitIndex++; if (args.length > splitIndex) { extraParams = new String[args.length - splitIndex]; for (int i = splitIndex; i < args.length; i++) { extraParams[i - splitIndex] = args[i]; } } storageTier.init(conf, extraParams); storageTier.printConf(LOG); RpcClient rpcClient = RpcClient.createInstance(CrailConstants.NAMENODE_RPC_TYPE); rpcClient.init(conf, args); rpcClient.printConf(LOG); ConcurrentLinkedQueue<InetSocketAddress> namenodeList = CrailUtils.getNameNodeList(); ConcurrentLinkedQueue<RpcConnection> connectionList = new ConcurrentLinkedQueue<RpcConnection>(); while (!namenodeList.isEmpty()) { InetSocketAddress address = namenodeList.poll(); RpcConnection connection = rpcClient.connect(address); connectionList.add(connection); } RpcConnection rpcConnection = connectionList.peek(); if (connectionList.size() > 1) { rpcConnection = new RpcDispatcher(connectionList); } LOG.info("connected to namenode(s) " + rpcConnection.toString()); StorageServer server = storageTier.launchServer(); StorageRpcClient storageRpc = new StorageRpcClient(storageType, CrailStorageClass.get(storageClass), server.getAddress(), rpcConnection); HashMap<Long, Long> blockCount = new HashMap<Long, Long>(); long sumCount = 0; while (server.isAlive()) { StorageResource resource = server.allocateResource(); if (resource == null) { break; } else { storageRpc.setBlock(resource.getAddress(), resource.getLength(), resource.getKey()); DataNodeStatistics stats = storageRpc.getDataNode(); long newCount = stats.getFreeBlockCount(); long serviceId = stats.getServiceId(); long oldCount = 0; if (blockCount.containsKey(serviceId)) { oldCount = blockCount.get(serviceId); } long diffCount = newCount - oldCount; blockCount.put(serviceId, newCount); sumCount += diffCount; LOG.info("datanode statistics, freeBlocks " + sumCount); } } while (server.isAlive()) { DataNodeStatistics stats = storageRpc.getDataNode(); long newCount = stats.getFreeBlockCount(); long serviceId = stats.getServiceId(); long oldCount = 0; if (blockCount.containsKey(serviceId)) { oldCount = blockCount.get(serviceId); } long diffCount = newCount - oldCount; blockCount.put(serviceId, newCount); sumCount += diffCount; LOG.info("datanode statistics, freeBlocks " + sumCount); Thread.sleep(2000); } }
From source file:gamlss.algorithm.Gamlss.java
/** * Main method.//from w w w. j a v a 2s . c o m * @param args - command-line arguments */ public static void main(final String[] args) { //String fileName = "Data/dataReduced.csv"; String fileName = "Data/oil.csv"; // String fileName = "Data/sp.csv"; //String fileName = "Data/dataReduced.csv"; CSVFileReader readData = new CSVFileReader(fileName); readData.readFile(); ArrayList<String> data = readData.storeValues; ArrayRealVector y = new ArrayRealVector(data.size()); BlockRealMatrix muX = new BlockRealMatrix(data.size(), 1); BlockRealMatrix sigmaX = new BlockRealMatrix(data.size(), 1); BlockRealMatrix nuX = new BlockRealMatrix(data.size(), 1); BlockRealMatrix tauX = new BlockRealMatrix(data.size(), 1); ArrayRealVector w = new ArrayRealVector(data.size()); BlockRealMatrix muS = new BlockRealMatrix(data.size(), 1); BlockRealMatrix sigmaS = new BlockRealMatrix(data.size(), 1); BlockRealMatrix nuS = new BlockRealMatrix(data.size(), 1); BlockRealMatrix tauS = new BlockRealMatrix(data.size(), 1); for (int i = 0; i < data.size(); i++) { String[] line = data.get(i).split(","); y.setEntry(i, Double.parseDouble(line[0])); muX.setEntry(i, 0, Double.parseDouble(line[1])); muS.setEntry(i, 0, Double.parseDouble(line[1])); sigmaX.setEntry(i, 0, Double.parseDouble(line[1])); sigmaS.setEntry(i, 0, Double.parseDouble(line[1])); nuX.setEntry(i, 0, Double.parseDouble(line[1])); nuS.setEntry(i, 0, Double.parseDouble(line[1])); tauX.setEntry(i, 0, Double.parseDouble(line[1])); tauS.setEntry(i, 0, Double.parseDouble(line[1])); } Hashtable<Integer, BlockRealMatrix> designMatrices = new Hashtable<Integer, BlockRealMatrix>(); designMatrices.put(DistributionSettings.MU, muX); designMatrices.put(DistributionSettings.SIGMA, sigmaX); designMatrices.put(DistributionSettings.NU, nuX); designMatrices.put(DistributionSettings.TAU, tauX); HashMap<Integer, BlockRealMatrix> smoothMatrices = new HashMap<Integer, BlockRealMatrix>(); smoothMatrices.put(DistributionSettings.MU, muS); smoothMatrices.put(DistributionSettings.SIGMA, sigmaS); smoothMatrices.put(DistributionSettings.NU, nuS); smoothMatrices.put(DistributionSettings.TAU, tauS); //smoothMatrices.put(DistributionSettings.MU, null); //smoothMatrices.put(DistributionSettings.SIGMA, null); //smoothMatrices.put(DistributionSettings.NU, null); //smoothMatrices.put(DistributionSettings.TAU, null); DistributionSettings.DISTR = DistributionSettings.SST; Controls.GLOB_DEVIANCE_TOL = 5500; Controls.INTER = 50;//only for the PB smoother Controls.SMOOTHER = Controls.PB; //or PB Controls.IS_SVD = true; Controls.BIG_DATA = true; Controls.JAVA_OPTIMIZATION = false; Controls.GAMLSS_NUM_CYCLES = 50; //Gamlss gamlss = new Gamlss(y, designMatrices, null); Gamlss gamlss = new Gamlss(y, designMatrices, null); //Gamlss gamlss = new Gamlss(y, null, smoothMatrices); gamlss.saveFittedDistributionParameters("Data/oilresults.csv"); }
From source file:org.ala.harvester.CSVHarvester.java
/** * Main method for testing this particular Harvester * * @param args/*www . ja v a2s. com*/ */ public static void main(String[] args) throws Exception { if (args.length != 3) { printUsage(); System.exit(1); } String infosourceId = args[0]; String siteMap = args[1]; String documentMapperClass = args[2]; String[] locations = { "classpath*:spring.xml" }; ApplicationContext context = new ClassPathXmlApplicationContext(locations); CSVHarvester h = (CSVHarvester) context.getBean("csvHarvester"); h.setDocumentMapper((DocumentMapper) Class.forName(documentMapperClass).newInstance()); HashMap<String, String> connectParams = new HashMap<String, String>(); connectParams.put("sitemap", siteMap); h.setConnectionParams(connectParams); h.start(Integer.parseInt(infosourceId)); }
From source file:net.kolola.msgparsercli.MsgParseCLI.java
public static void main(String[] args) { // Parse options OptionParser parser = new OptionParser("f:a:bi?*"); OptionSet options = parser.parse(args); // Get the filename if (!options.has("f")) { System.err.print("Specify a msg file with the -f option"); System.exit(0);//from w w w.ja v a 2 s . co m } File file = new File((String) options.valueOf("f")); MsgParser msgp = new MsgParser(); Message msg = null; try { msg = msgp.parseMsg(file); } catch (UnsupportedOperationException | IOException e) { System.err.print("File does not exist or is not a valid msg file"); //e.printStackTrace(); System.exit(1); } // Show info (as JSON) if (options.has("i")) { Map<String, Object> data = new HashMap<String, Object>(); String date; try { Date st = msg.getClientSubmitTime(); date = st.toString(); } catch (Exception g) { try { date = msg.getDate().toString(); } catch (Exception e) { date = "[UNAVAILABLE]"; } } data.put("date", date); data.put("subject", msg.getSubject()); data.put("from", "\"" + msg.getFromName() + "\" <" + msg.getFromEmail() + ">"); data.put("to", "\"" + msg.getToRecipient().toString()); String cc = ""; for (RecipientEntry r : msg.getCcRecipients()) { if (cc.length() > 0) cc.concat("; "); cc.concat(r.toString()); } data.put("cc", cc); data.put("body_html", msg.getBodyHTML()); data.put("body_rtf", msg.getBodyRTF()); data.put("body_text", msg.getBodyText()); // Attachments List<Map<String, String>> atts = new ArrayList<Map<String, String>>(); for (Attachment a : msg.getAttachments()) { HashMap<String, String> info = new HashMap<String, String>(); if (a instanceof FileAttachment) { FileAttachment fa = (FileAttachment) a; info.put("type", "file"); info.put("filename", fa.getFilename()); info.put("size", Long.toString(fa.getSize())); } else { info.put("type", "message"); } atts.add(info); } data.put("attachments", atts); JSONObject json = new JSONObject(data); try { System.out.print(json.toString(4)); } catch (JSONException e) { e.printStackTrace(); } } // OR return an attachment in BASE64 else if (options.has("a")) { Integer anum = Integer.parseInt((String) options.valueOf("a")); Encoder b64 = Base64.getEncoder(); List<Attachment> atts = msg.getAttachments(); if (atts.size() <= anum) { System.out.print("Attachment " + anum.toString() + " does not exist"); } Attachment att = atts.get(anum); if (att instanceof FileAttachment) { FileAttachment fatt = (FileAttachment) att; System.out.print(b64.encodeToString(fatt.getData())); } else { System.err.print("Attachment " + anum.toString() + " is a message - That's not implemented yet :("); } } // OR print the message body else if (options.has("b")) { System.out.print(msg.getConvertedBodyHTML()); } else { System.err.print( "Specify either -i to return msg information or -a <num> to print an attachment as a BASE64 string"); } }
From source file:net.anymeta.Test.java
/** * @param args// w w w . j a va 2 s .c o m */ public static void main(String[] arg) throws AnyMetaRegistryException, AnyMetaException, JSONException { HashMap<String, Object> args; // Load the API AnyMetaAPI api = AnyMetaAPI.fromRegistry("pluto.local"); // Get information for the currently logged in user. JSONObject o = (JSONObject) api.doMethod("anymeta.user.info"); System.out.println("Logged in as " + o.getString("title")); // args = new HashMap<String, Object>(); args.put("q_kind", "PERSON"); JSONArray a = (JSONArray) api.doMethod("query.execute", args); System.out.println(a.toString()); System.out.println(a.length()); // Lookup an RFID tag. args = new HashMap<String, Object>(); ArrayList<String> ids = new ArrayList<String>(); ids.add("65"); ids.add("94"); args.put("ids", ids); args.put("predicate", "KNOWS"); o = (JSONObject) api.doMethod("contact.link", args); System.out.println(o.toString()); // Upload an image args = new HashMap<String, Object>(); args.put("mime", "image/jpeg"); args.put("data", "@C:\\bla.jpg"); o = (JSONObject) api.doMethod("anymeta.attachment.create", args); System.out.println(o.toString()); }
From source file:ch.epfl.lsir.xin.test.MostPopularTest.java
/** * @param args// ww w . j av a 2s. c om */ public static void main(String[] args) throws Exception { // TODO Auto-generated method stub PrintWriter logger = new PrintWriter(".//results//MostPopular"); PropertiesConfiguration config = new PropertiesConfiguration(); config.setFile(new File(".//conf//MostPopular.properties")); try { config.load(); } catch (ConfigurationException e) { // TODO Auto-generated catch block e.printStackTrace(); } logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Read rating data..."); DataLoaderFile loader = new DataLoaderFile(".//data//MoveLens100k.txt"); loader.readSimple(); DataSetNumeric dataset = loader.getDataset(); System.out.println("Number of ratings: " + dataset.getRatings().size() + " Number of users: " + dataset.getUserIDs().size() + " Number of items: " + dataset.getItemIDs().size()); logger.println("Number of ratings: " + dataset.getRatings().size() + ", Number of users: " + dataset.getUserIDs().size() + ", Number of items: " + dataset.getItemIDs().size()); logger.flush(); TrainTestSplitter splitter = new TrainTestSplitter(dataset); splitter.splitFraction(config.getDouble("TRAIN_FRACTION")); ArrayList<NumericRating> trainRatings = splitter.getTrain(); ArrayList<NumericRating> testRatings = splitter.getTest(); HashMap<String, Integer> userIDIndexMapping = new HashMap<String, Integer>(); HashMap<String, Integer> itemIDIndexMapping = new HashMap<String, Integer>(); //create rating matrix for (int i = 0; i < dataset.getUserIDs().size(); i++) { userIDIndexMapping.put(dataset.getUserIDs().get(i), i); } for (int i = 0; i < dataset.getItemIDs().size(); i++) { itemIDIndexMapping.put(dataset.getItemIDs().get(i), i); } RatingMatrix trainRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(), dataset.getItemIDs().size()); for (int i = 0; i < trainRatings.size(); i++) { trainRatingMatrix.set(userIDIndexMapping.get(trainRatings.get(i).getUserID()), itemIDIndexMapping.get(trainRatings.get(i).getItemID()), trainRatings.get(i).getValue()); } RatingMatrix testRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(), dataset.getItemIDs().size()); for (int i = 0; i < testRatings.size(); i++) { //only consider 5-star rating in the test set // if( testRatings.get(i).getValue() < 5 ) // continue; testRatingMatrix.set(userIDIndexMapping.get(testRatings.get(i).getUserID()), itemIDIndexMapping.get(testRatings.get(i).getItemID()), testRatings.get(i).getValue()); } System.out.println("Training: " + trainRatingMatrix.getTotalRatingNumber() + " vs Test: " + testRatingMatrix.getTotalRatingNumber()); logger.println("Initialize a most popular based recommendation model."); MostPopular algo = new MostPopular(trainRatingMatrix); algo.setLogger(logger); algo.build(); algo.saveModel(".//localModels//" + config.getString("NAME")); logger.println("Save the model."); logger.flush(); HashMap<Integer, ArrayList<ResultUnit>> results = new HashMap<Integer, ArrayList<ResultUnit>>(); for (int i = 0; i < testRatingMatrix.getRow(); i++) { ArrayList<ResultUnit> rec = algo.getRecommendationList(i); if (rec == null) continue; int total = testRatingMatrix.getUserRatingNumber(i); if (total == 0)//this user is ignored continue; results.put(i, rec); } RankResultGenerator generator = new RankResultGenerator(results, algo.getTopN(), testRatingMatrix, trainRatingMatrix); System.out.println("Precision@N: " + generator.getPrecisionN()); System.out.println("Recall@N: " + generator.getRecallN()); System.out.println("MAP@N: " + generator.getMAPN()); System.out.println("MRR@N: " + generator.getMRRN()); System.out.println("NDCG@N: " + generator.getNDCGN()); System.out.println("AUC@N: " + generator.getAUC()); logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "\n" + "Precision@N: " + generator.getPrecisionN() + "\n" + "Recall@N: " + generator.getRecallN() + "\n" + "MAP@N: " + generator.getMAPN() + "\n" + "MRR@N: " + generator.getMRRN() + "\n" + "NDCG@N: " + generator.getNDCGN() + "\n" + "AUC@N: " + generator.getAUC()); logger.flush(); logger.close(); }
From source file:edu.umass.cs.reconfiguration.reconfigurationpackets.CreateServiceName.java
public static void main(String[] args) { try {/*from w ww. j a v a 2 s . c o m*/ Util.assertAssertionsEnabled(); InetSocketAddress isa = new InetSocketAddress(InetAddress.getByName("localhost"), 2345); int numNames = 1000; String[] reconfigurators = { "RC43", "RC22", "RC78", "RC21", "RC143" }; String namePrefix = "someName"; String defaultState = "default_initial_state"; String[] names = new String[numNames]; String[] states = new String[numNames]; for (int i = 0; i < numNames; i++) { names[i] = namePrefix + i; states[i] = defaultState + i; } CreateServiceName bcreate1 = new CreateServiceName(isa, "random0", 0, "hello"); HashMap<String, String> nameStates = new HashMap<String, String>(); for (int i = 0; i < names.length; i++) nameStates.put(names[i], states[i]); CreateServiceName bcreate2 = new CreateServiceName(isa, names[0], 0, states[0], nameStates); System.out.println(bcreate1.toString()); System.out.println(bcreate2.toString()); // translate a batch into consistent constituent batches Collection<Set<String>> batches = ConsistentReconfigurableNodeConfig.splitIntoRCGroups( new HashSet<String>(Arrays.asList(names)), new HashSet<String>(Arrays.asList(reconfigurators))); int totalSize = 0; int numBatches = 0; for (Set<String> batch : batches) System.out.println("batch#" + numBatches++ + " of size " + batch.size() + " (totalSize = " + (totalSize += batch.size()) + ")" + " = " + batch); assert (totalSize == numNames); System.out.println(bcreate2.getSummary()); } catch (Exception e) { e.printStackTrace(); } }