List of usage examples for java.util HashMap put
public V put(K key, V value)
From source file:aws.sample.AmazonDynamoDBSample.java
public static void main(String[] args) throws Exception { init();/* ww w .j a v a 2 s . c o m*/ try { String tableName = "my-favorite-movies-table"; // Create a table with a primary key named 'name', which holds a string CreateTableRequest createTableRequest = new CreateTableRequest().withTableName(tableName) .withKeySchema( new KeySchema(new KeySchemaElement().withAttributeName("name").withAttributeType("S"))) .withProvisionedThroughput( new ProvisionedThroughput().withReadCapacityUnits(10L).withWriteCapacityUnits(10L)); TableDescription createdTableDescription = dynamoDB.createTable(createTableRequest) .getTableDescription(); System.out.println("Created Table: " + createdTableDescription); // Wait for it to become active waitForTableToBecomeAvailable(tableName); // Describe our new table DescribeTableRequest describeTableRequest = new DescribeTableRequest().withTableName(tableName); TableDescription tableDescription = dynamoDB.describeTable(describeTableRequest).getTable(); System.out.println("Table Description: " + tableDescription); // Add an item Map<String, AttributeValue> item = newItem("Bill & Ted's Excellent Adventure", 1989, "****", "James", "Sara"); PutItemRequest putItemRequest = new PutItemRequest(tableName, item); PutItemResult putItemResult = dynamoDB.putItem(putItemRequest); System.out.println("Result: " + putItemResult); // Add another item item = newItem("Airplane", 1980, "*****", "James", "Billy Bob"); putItemRequest = new PutItemRequest(tableName, item); putItemResult = dynamoDB.putItem(putItemRequest); System.out.println("Result: " + putItemResult); // Scan items for movies with a year attribute greater than 1985 HashMap<String, Condition> scanFilter = new HashMap<String, Condition>(); Condition condition = new Condition().withComparisonOperator(ComparisonOperator.GT.toString()) .withAttributeValueList(new AttributeValue().withN("1985")); scanFilter.put("year", condition); ScanRequest scanRequest = new ScanRequest(tableName).withScanFilter(scanFilter); ScanResult scanResult = dynamoDB.scan(scanRequest); System.out.println("Result: " + scanResult); DeleteTableRequest deleteTableRequest = new DeleteTableRequest(tableName); dynamoDB.deleteTable(deleteTableRequest); System.out.println("Delete Table: "); } catch (AmazonServiceException ase) { System.out.println("Caught an AmazonServiceException, which means your request made it " + "to AWS, but was rejected with an error response for some reason."); System.out.println("Error Message: " + ase.getMessage()); System.out.println("HTTP Status Code: " + ase.getStatusCode()); System.out.println("AWS Error Code: " + ase.getErrorCode()); System.out.println("Error Type: " + ase.getErrorType()); System.out.println("Request ID: " + ase.getRequestId()); } catch (AmazonClientException ace) { ace.printStackTrace(); System.out.println("Caught an AmazonClientException, which means the client encountered " + "a serious internal problem while trying to communicate with AWS, " + "such as not being able to access the network."); System.out.println("Error Message: " + ace.getMessage()); } }
From source file:br.com.faccilitacorretor.middleware.dynamo.AmazonDynamoDBSample.java
public static void main(String[] args) throws Exception { init();//ww w . j a va 2 s.co m try { String tableName = "my-favorite-movies-table"; // Create table if it does not exist yet if (Tables.doesTableExist(dynamoDB, tableName)) { System.out.println("Table " + tableName + " is already ACTIVE"); } else { // Create a table with a primary hash key named 'name', which holds a string CreateTableRequest createTableRequest = new CreateTableRequest().withTableName(tableName) .withKeySchema(new KeySchemaElement().withAttributeName("name").withKeyType(KeyType.HASH)) .withAttributeDefinitions(new AttributeDefinition().withAttributeName("name") .withAttributeType(ScalarAttributeType.S)) .withProvisionedThroughput( new ProvisionedThroughput().withReadCapacityUnits(1L).withWriteCapacityUnits(1L)); TableDescription createdTableDescription = dynamoDB.createTable(createTableRequest) .getTableDescription(); System.out.println("Created Table: " + createdTableDescription); // Wait for it to become active System.out.println("Waiting for " + tableName + " to become ACTIVE..."); Tables.awaitTableToBecomeActive(dynamoDB, tableName); } // Describe our new table DescribeTableRequest describeTableRequest = new DescribeTableRequest().withTableName(tableName); TableDescription tableDescription = dynamoDB.describeTable(describeTableRequest).getTable(); System.out.println("Table Description: " + tableDescription); // Add an item Map<String, AttributeValue> item = newItem("Bill & Ted's Excellent Adventure", 1989, "****", "James", "Sara"); PutItemRequest putItemRequest = new PutItemRequest(tableName, item); PutItemResult putItemResult = dynamoDB.putItem(putItemRequest); System.out.println("Result: " + putItemResult); // Add another item item = newItem("Airplane", 1980, "*****", "James", "Billy Bob"); putItemRequest = new PutItemRequest(tableName, item); putItemResult = dynamoDB.putItem(putItemRequest); System.out.println("Result: " + putItemResult); // Scan items for movies with a year attribute greater than 1985 HashMap<String, Condition> scanFilter = new HashMap<String, Condition>(); Condition condition = new Condition().withComparisonOperator(ComparisonOperator.GT.toString()) .withAttributeValueList(new AttributeValue().withN("1985")); scanFilter.put("year", condition); ScanRequest scanRequest = new ScanRequest(tableName).withScanFilter(scanFilter); ScanResult scanResult = dynamoDB.scan(scanRequest); System.out.println("Result: " + scanResult); } catch (AmazonServiceException ase) { System.out.println("Caught an AmazonServiceException, which means your request made it " + "to AWS, but was rejected with an error response for some reason."); System.out.println("Error Message: " + ase.getMessage()); System.out.println("HTTP Status Code: " + ase.getStatusCode()); System.out.println("AWS Error Code: " + ase.getErrorCode()); System.out.println("Error Type: " + ase.getErrorType()); System.out.println("Request ID: " + ase.getRequestId()); } catch (AmazonClientException ace) { System.out.println("Caught an AmazonClientException, which means the client encountered " + "a serious internal problem while trying to communicate with AWS, " + "such as not being able to access the network."); System.out.println("Error Message: " + ace.getMessage()); } }
From source file:tradeok.HttpTool.java
public static void main(String[] args) { String ip = "https://www.okcoin.com"; String url = "/mobile/login.do?random=" + Math.round(Math.random() * 100); String uName = "woderchen@gmail.com"; String pWord = "126001"; int longLogin = 1; // //login website HashMap map = new HashMap(); map.put("loginName", uName); map.put("password", pWord); map.put("longLogin", longLogin); String response = ""; try {/*from w w w. j a v a 2 s . com*/ HttpTool.setProxy("3.36.234.41", 88); response = HttpTool.postJsonBody(ip + url, 60000, map, "utf-8"); } catch (Exception ex) { ex.printStackTrace(); } System.out.println("response:" + response); }
From source file:ch.epfl.lsir.xin.test.UserAverageTest.java
/** * @param args//from ww w.j a va 2s . c o m */ public static void main(String[] args) throws Exception { // TODO Auto-generated method stub PrintWriter logger = new PrintWriter(".//results//UserAverage"); PropertiesConfiguration config = new PropertiesConfiguration(); config.setFile(new File(".//conf//UserAverage.properties")); try { config.load(); } catch (ConfigurationException e) { // TODO Auto-generated catch block e.printStackTrace(); } logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Read rating data..."); DataLoaderFile loader = new DataLoaderFile(".//data//MoveLens100k.txt"); loader.readSimple(); DataSetNumeric dataset = loader.getDataset(); System.out.println("Number of ratings: " + dataset.getRatings().size() + " Number of users: " + dataset.getUserIDs().size() + " Number of items: " + dataset.getItemIDs().size()); logger.println("Number of ratings: " + dataset.getRatings().size() + " Number of users: " + dataset.getUserIDs().size() + " Number of items: " + dataset.getItemIDs().size()); logger.flush(); double totalMAE = 0; double totalRMSE = 0; int F = 5; logger.println(F + "- folder cross validation."); ArrayList<ArrayList<NumericRating>> folders = new ArrayList<ArrayList<NumericRating>>(); for (int i = 0; i < F; i++) { folders.add(new ArrayList<NumericRating>()); } while (dataset.getRatings().size() > 0) { int index = new Random().nextInt(dataset.getRatings().size()); int r = new Random().nextInt(F); folders.get(r).add(dataset.getRatings().get(index)); dataset.getRatings().remove(index); } for (int folder = 1; folder <= F; folder++) { logger.println("Folder: " + folder); System.out.println("Folder: " + folder); ArrayList<NumericRating> trainRatings = new ArrayList<NumericRating>(); ArrayList<NumericRating> testRatings = new ArrayList<NumericRating>(); for (int i = 0; i < folders.size(); i++) { if (i == folder - 1)//test data { testRatings.addAll(folders.get(i)); } else {//training data trainRatings.addAll(folders.get(i)); } } //create rating matrix HashMap<String, Integer> userIDIndexMapping = new HashMap<String, Integer>(); HashMap<String, Integer> itemIDIndexMapping = new HashMap<String, Integer>(); for (int i = 0; i < dataset.getUserIDs().size(); i++) { userIDIndexMapping.put(dataset.getUserIDs().get(i), i); } for (int i = 0; i < dataset.getItemIDs().size(); i++) { itemIDIndexMapping.put(dataset.getItemIDs().get(i), i); } RatingMatrix trainRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(), dataset.getItemIDs().size()); for (int i = 0; i < trainRatings.size(); i++) { trainRatingMatrix.set(userIDIndexMapping.get(trainRatings.get(i).getUserID()), itemIDIndexMapping.get(trainRatings.get(i).getItemID()), trainRatings.get(i).getValue()); } trainRatingMatrix.calculateGlobalAverage(); RatingMatrix testRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(), dataset.getItemIDs().size()); for (int i = 0; i < testRatings.size(); i++) { testRatingMatrix.set(userIDIndexMapping.get(testRatings.get(i).getUserID()), itemIDIndexMapping.get(testRatings.get(i).getItemID()), testRatings.get(i).getValue()); } System.out.println("Training: " + trainRatingMatrix.getTotalRatingNumber() + " vs Test: " + testRatingMatrix.getTotalRatingNumber()); logger.println("Initialize a recommendation model based on user average method."); UserAverage algo = new UserAverage(trainRatingMatrix); algo.setLogger(logger); algo.build(); algo.saveModel(".//localModels//" + config.getString("NAME")); logger.println("Save the model."); System.out.println(trainRatings.size() + " vs. " + testRatings.size()); double RMSE = 0; double MAE = 0; int count = 0; for (int i = 0; i < testRatings.size(); i++) { NumericRating rating = testRatings.get(i); double prediction = algo.predict(userIDIndexMapping.get(rating.getUserID()), itemIDIndexMapping.get(rating.getItemID())); if (Double.isNaN(prediction)) { System.out.println("no prediction"); continue; } MAE = MAE + Math.abs(rating.getValue() - prediction); RMSE = RMSE + Math.pow((rating.getValue() - prediction), 2); count++; } MAE = MAE / count; RMSE = Math.sqrt(RMSE / count); logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " MAE: " + MAE + " RMSE: " + RMSE); logger.flush(); totalMAE = totalMAE + MAE; totalRMSE = totalRMSE + RMSE; } System.out.println("MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F); logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Final results: MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F); logger.flush(); logger.close(); //MAE: 0.8353035962363073 RMSE: 1.0422971886952053 (MovieLens 100k) }
From source file:com.knowbout.epg.EPG.java
public static void main(String[] args) { String configFile = "/etc/flip.tv/epg.xml"; boolean sitemaponly = false; if (args.length > 0) { if (args.length == 2 && args[0].equals("-config")) { configFile = args[1];//from w w w.j a v a 2s . c om } else if (args.length == 1 && args[0].equals("-sitemaponly")) { sitemaponly = true; } else { System.err.println("Usage: java " + EPG.class.getName() + " [-config <xmlfile>]"); System.exit(1); } } try { XMLConfiguration config = new XMLConfiguration(configFile); HashMap<String, String> hibernateProperties = new HashMap<String, String>(); Configuration database = config.subset("database"); hibernateProperties.put(Environment.DRIVER, database.getString("driver")); hibernateProperties.put(Environment.URL, database.getString("url")); hibernateProperties.put(Environment.USER, database.getString("user")); hibernateProperties.put(Environment.PASS, database.getString("password")); hibernateProperties.put(Environment.DATASOURCE, null); HibernateUtil.setProperties(hibernateProperties); if (!sitemaponly) { //test(config); // Get the server configuration to download the content Configuration provider = config.subset("provider"); InetAddress server = InetAddress.getByName(provider.getString("server")); File destinationFolder = new File(provider.getString("destinationFolder")); String username = provider.getString("username"); String password = provider.getString("password"); String remoteDirectory = provider.getString("remoteWorkingDirectory"); boolean forceDownload = provider.getBoolean("forceDownload"); Downloader downloader = new Downloader(server, username, password, destinationFolder, remoteDirectory, forceDownload); int count = downloader.downloadFiles(); log.info("Downloaded " + count + " files"); // int count = 14; if (count > 0) { log.info("Processing downloads now."); //Get the name of the files to process Configuration files = config.subset("files"); File headend = new File(destinationFolder, files.getString("headend")); File lineup = new File(destinationFolder, files.getString("lineup")); File stations = new File(destinationFolder, files.getString("stations")); File programs = new File(destinationFolder, files.getString("programs")); File schedules = new File(destinationFolder, files.getString("schedules")); Parser parser = new Parser(config, headend, lineup, stations, programs, schedules); parser.parse(); log.info("Finished parsing EPG Data. Invoking AlertQueue service now."); String alertUrl = config.getString("alertUrl"); HessianProxyFactory factory = new HessianProxyFactory(); AlertQueue alerts = (AlertQueue) factory.create(AlertQueue.class, alertUrl); alerts.checkAlerts(); log.info("Updating sitemap"); updateSitemap(config.getString("sitemap", "/usr/local/webapps/search/sitemap.xml.gz")); log.info("Exiting EPG now."); } else { log.info("No files were downloaded, so don't process the old files."); } } else { log.info("Updating sitemap"); updateSitemap(config.getString("sitemap", "/usr/local/webapps/search/sitemap.xml.gz")); log.info("Done updating sitemap"); } } catch (ConfigurationException e) { log.fatal("Configuration error in file " + configFile, e); e.printStackTrace(); } catch (UnknownHostException e) { log.fatal("Unable to connect to host", e); e.printStackTrace(); } catch (IOException e) { log.fatal("Error downloading or processing EPG information", e); e.printStackTrace(); } catch (Throwable e) { log.fatal("Unexpected Error", e); e.printStackTrace(); } }
From source file:net.itransformers.postDiscoverer.core.ReportManager.java
public static void main(String[] args) throws IOException { File projectDir = new File("."); File scriptPath = new File("postDiscoverer/src/main/resources/postDiscoverer/conf/groovy/"); ResourceManagerFactory resourceManagerFactory = new XmlResourceManagerFactory( "iDiscover/resourceManager/xmlResourceManager/src/main/resources/xmlResourceManager/conf/xml/resource.xml"); Map<String, String> resourceManagerParams = new HashMap<>(); resourceManagerParams.put("projectPath", projectDir.getAbsolutePath()); ResourceManager resourceManager = resourceManagerFactory.createResourceManager("xml", resourceManagerParams);/* w ww . j av a2 s .c o m*/ Map<String, String> params = new HashMap<String, String>(); params.put("protocol", "telnet"); params.put("deviceName", "R1"); params.put("deviceType", "CISCO"); params.put("address", "10.17.1.5"); params.put("port", "23"); ResourceType resource = resourceManager.findFirstResourceBy(params); List connectParameters = resource.getConnectionParams(); for (int i = 0; i < connectParameters.size(); i++) { ConnectionParamsType connParamsType = (ConnectionParamsType) connectParameters.get(i); String connectionType = connParamsType.getConnectionType(); if (connectionType.equalsIgnoreCase(params.get("protocol"))) { for (ParamType param : connParamsType.getParam()) { params.put(param.getName(), param.getValue()); } } } File postDiscoveryConfing = new File( projectDir + "/postDiscoverer/src/main/resources/postDiscoverer/conf/xml/reportGenerator.xml"); if (!postDiscoveryConfing.exists()) { System.out.println("File missing: " + postDiscoveryConfing.getAbsolutePath()); return; } ReportGeneratorType reportGenerator = null; FileInputStream is = new FileInputStream(postDiscoveryConfing); try { reportGenerator = JaxbMarshalar.unmarshal(ReportGeneratorType.class, is); } catch (JAXBException e) { logger.info(e); //To change body of catch statement use File | Settings | File Templates. } finally { is.close(); } ReportManager reportManager = new ReportManager(reportGenerator, scriptPath.getPath(), projectDir, "postDiscoverer/conf/xslt/table_creator.xslt"); StringBuffer report = null; HashMap<String, Object> groovyExecutorParams = new HashMap<String, Object>(); for (String s : params.keySet()) { groovyExecutorParams.put(s, params.get(s)); } try { report = reportManager.reportExecutor( new File("/Users/niau/Projects/Projects/netTransformer10/version1/post-discovery"), groovyExecutorParams); } catch (ParserConfigurationException e) { e.printStackTrace(); } catch (SAXException e) { e.printStackTrace(); } if (report != null) { System.out.println(report.toString()); } else { System.out.println("Report generation failed!"); } }
From source file:ch.epfl.lsir.xin.test.GlobalMeanTest.java
/** * @param args//from ww w. j a v a 2 s.co m */ public static void main(String[] args) throws Exception { // TODO Auto-generated method stub PrintWriter logger = new PrintWriter(".//results//GlobalMean"); PropertiesConfiguration config = new PropertiesConfiguration(); config.setFile(new File("conf//GlobalMean.properties")); try { config.load(); } catch (ConfigurationException e) { // TODO Auto-generated catch block e.printStackTrace(); } logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Read rating data..."); DataLoaderFile loader = new DataLoaderFile(".//data//MoveLens100k.txt"); loader.readSimple(); DataSetNumeric dataset = loader.getDataset(); System.out.println("Number of ratings: " + dataset.getRatings().size() + " Number of users: " + dataset.getUserIDs().size() + " Number of items: " + dataset.getItemIDs().size()); logger.println("Number of ratings: " + dataset.getRatings().size() + ", Number of users: " + dataset.getUserIDs().size() + ", Number of items: " + dataset.getItemIDs().size()); double totalMAE = 0; double totalRMSE = 0; int F = 5; logger.println(F + "- folder cross validation."); logger.flush(); ArrayList<ArrayList<NumericRating>> folders = new ArrayList<ArrayList<NumericRating>>(); for (int i = 0; i < F; i++) { folders.add(new ArrayList<NumericRating>()); } while (dataset.getRatings().size() > 0) { int index = new Random().nextInt(dataset.getRatings().size()); int r = new Random().nextInt(F); folders.get(r).add(dataset.getRatings().get(index)); dataset.getRatings().remove(index); } for (int folder = 1; folder <= F; folder++) { System.out.println("Folder: " + folder); logger.println("Folder: " + folder); ArrayList<NumericRating> trainRatings = new ArrayList<NumericRating>(); ArrayList<NumericRating> testRatings = new ArrayList<NumericRating>(); for (int i = 0; i < folders.size(); i++) { if (i == folder - 1)//test data { testRatings.addAll(folders.get(i)); } else {//training data trainRatings.addAll(folders.get(i)); } } //create rating matrix HashMap<String, Integer> userIDIndexMapping = new HashMap<String, Integer>(); HashMap<String, Integer> itemIDIndexMapping = new HashMap<String, Integer>(); for (int i = 0; i < dataset.getUserIDs().size(); i++) { userIDIndexMapping.put(dataset.getUserIDs().get(i), i); } for (int i = 0; i < dataset.getItemIDs().size(); i++) { itemIDIndexMapping.put(dataset.getItemIDs().get(i), i); } RatingMatrix trainRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(), dataset.getItemIDs().size()); for (int i = 0; i < trainRatings.size(); i++) { trainRatingMatrix.set(userIDIndexMapping.get(trainRatings.get(i).getUserID()), itemIDIndexMapping.get(trainRatings.get(i).getItemID()), trainRatings.get(i).getValue()); } RatingMatrix testRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(), dataset.getItemIDs().size()); for (int i = 0; i < testRatings.size(); i++) { testRatingMatrix.set(userIDIndexMapping.get(testRatings.get(i).getUserID()), itemIDIndexMapping.get(testRatings.get(i).getItemID()), testRatings.get(i).getValue()); } System.out.println("Training: " + trainRatingMatrix.getTotalRatingNumber() + " vs Test: " + testRatingMatrix.getTotalRatingNumber()); logger.println("Initialize a recommendation model based on global average method."); GlobalAverage algo = new GlobalAverage(trainRatingMatrix); algo.setLogger(logger); algo.build(); algo.saveModel(".//localModels//" + config.getString("NAME")); logger.println("Save the model."); logger.flush(); System.out.println(trainRatings.size() + " vs. " + testRatings.size()); double RMSE = 0; double MAE = 0; int count = 0; for (int i = 0; i < testRatings.size(); i++) { NumericRating rating = testRatings.get(i); double prediction = algo.predict(rating.getUserID(), rating.getItemID()); if (Double.isNaN(prediction)) { System.out.println("no prediction"); continue; } MAE = MAE + Math.abs(rating.getValue() - prediction); RMSE = RMSE + Math.pow((rating.getValue() - prediction), 2); count++; } MAE = MAE / count; RMSE = Math.sqrt(RMSE / count); // System.out.println("MAE: " + MAE + " RMSE: " + RMSE); logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " MAE: " + MAE + " RMSE: " + RMSE); logger.flush(); totalMAE = totalMAE + MAE; totalRMSE = totalRMSE + RMSE; } System.out.println("MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F); logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Final results: MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F); logger.flush(); logger.close(); //MAE: 0.9338607074893257 RMSE: 1.1170971131112037 (MovieLens1M) //MAE: 0.9446876509332618 RMSE: 1.1256517870920375 (MovieLens100K) }
From source file:ch.epfl.lsir.xin.test.ItemAverageTest.java
/** * @param args//from w w w . j a v a 2s . c o m */ public static void main(String[] args) throws Exception { // TODO Auto-generated method stub PrintWriter logger = new PrintWriter(".//results//ItemAverage"); PropertiesConfiguration config = new PropertiesConfiguration(); config.setFile(new File(".//conf//ItemAverage.properties")); try { config.load(); } catch (ConfigurationException e) { // TODO Auto-generated catch block e.printStackTrace(); } logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Read rating data..."); DataLoaderFile loader = new DataLoaderFile(".//data//MoveLens100k.txt"); loader.readSimple(); DataSetNumeric dataset = loader.getDataset(); System.out.println("Number of ratings: " + dataset.getRatings().size() + " Number of users: " + dataset.getUserIDs().size() + " Number of items: " + dataset.getItemIDs().size()); logger.println("Number of ratings: " + dataset.getRatings().size() + ", Number of users: " + dataset.getUserIDs().size() + ", Number of items: " + dataset.getItemIDs().size()); logger.flush(); double totalMAE = 0; double totalRMSE = 0; int F = 5; logger.println(F + "- folder cross validation."); ArrayList<ArrayList<NumericRating>> folders = new ArrayList<ArrayList<NumericRating>>(); for (int i = 0; i < F; i++) { folders.add(new ArrayList<NumericRating>()); } while (dataset.getRatings().size() > 0) { int index = new Random().nextInt(dataset.getRatings().size()); int r = new Random().nextInt(F); folders.get(r).add(dataset.getRatings().get(index)); dataset.getRatings().remove(index); } for (int folder = 1; folder <= F; folder++) { logger.println("Folder: " + folder); logger.flush(); System.out.println("Folder: " + folder); ArrayList<NumericRating> trainRatings = new ArrayList<NumericRating>(); ArrayList<NumericRating> testRatings = new ArrayList<NumericRating>(); for (int i = 0; i < folders.size(); i++) { if (i == folder - 1)//test data { testRatings.addAll(folders.get(i)); } else {//training data trainRatings.addAll(folders.get(i)); } } //create rating matrix HashMap<String, Integer> userIDIndexMapping = new HashMap<String, Integer>(); HashMap<String, Integer> itemIDIndexMapping = new HashMap<String, Integer>(); for (int i = 0; i < dataset.getUserIDs().size(); i++) { userIDIndexMapping.put(dataset.getUserIDs().get(i), i); } for (int i = 0; i < dataset.getItemIDs().size(); i++) { itemIDIndexMapping.put(dataset.getItemIDs().get(i), i); } RatingMatrix trainRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(), dataset.getItemIDs().size()); for (int i = 0; i < trainRatings.size(); i++) { trainRatingMatrix.set(userIDIndexMapping.get(trainRatings.get(i).getUserID()), itemIDIndexMapping.get(trainRatings.get(i).getItemID()), trainRatings.get(i).getValue()); } trainRatingMatrix.calculateGlobalAverage(); RatingMatrix testRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(), dataset.getItemIDs().size()); for (int i = 0; i < testRatings.size(); i++) { testRatingMatrix.set(userIDIndexMapping.get(testRatings.get(i).getUserID()), itemIDIndexMapping.get(testRatings.get(i).getItemID()), testRatings.get(i).getValue()); } System.out.println("Training: " + trainRatingMatrix.getTotalRatingNumber() + " vs Test: " + testRatingMatrix.getTotalRatingNumber()); logger.println("Initialize a recommendation model based on item average method."); ItemAverage algo = new ItemAverage(trainRatingMatrix); algo.setLogger(logger); algo.build(); algo.saveModel(".//localModels//" + config.getString("NAME")); logger.println("Save the model."); logger.flush(); System.out.println(trainRatings.size() + " vs. " + testRatings.size()); double RMSE = 0; double MAE = 0; int count = 0; for (int i = 0; i < testRatings.size(); i++) { NumericRating rating = testRatings.get(i); double prediction = algo.predict(userIDIndexMapping.get(rating.getUserID()), itemIDIndexMapping.get(rating.getItemID())); if (Double.isNaN(prediction)) { System.out.println("no prediction"); continue; } MAE = MAE + Math.abs(rating.getValue() - prediction); RMSE = RMSE + Math.pow((rating.getValue() - prediction), 2); count++; } MAE = MAE / count; RMSE = Math.sqrt(RMSE / count); logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " MAE: " + MAE + " RMSE: " + RMSE); logger.flush(); // System.out.println("MAE: " + MAE + " RMSE: " + RMSE); totalMAE = totalMAE + MAE; totalRMSE = totalRMSE + RMSE; } System.out.println("MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F); logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Final results: MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F); logger.flush(); //MAE: 0.8173633324758338 RMSE: 1.0251973503888645 (MovieLens 100K) }
From source file:contractEditor.contractClients.java
public static void main(String[] args) { JSONObject obj = new JSONObject(); obj.put("name", "clientTemplate"); obj.put("context", "VM-deployment"); //obj.put("Context", new Integer); HashMap serviceRequirement = new HashMap(); HashMap serviceDescription = new HashMap(); serviceRequirement.put("VM1_volume", "18_GB"); serviceDescription.put("VM1_purpose", "dev"); serviceDescription.put("VM1_data", "private"); serviceDescription.put("VM1_application", "internal"); serviceRequirement.put("VM2_volume", "20_GB"); serviceDescription.put("VM2_purpose", "prod"); serviceDescription.put("VM2_data", "public"); serviceDescription.put("VM2_application", "business"); serviceRequirement.put("VM3_volume", "30_GB"); serviceDescription.put("VM3_purpose", "test"); serviceDescription.put("VM3_data", "public"); serviceDescription.put("VM3_application", "business"); serviceRequirement.put("VM4_volume", "20_GB"); serviceDescription.put("VM4_purpose", "prod"); serviceDescription.put("VM4_data", "public"); serviceDescription.put("VM4_application", "business"); obj.put("serviceRequirement", serviceRequirement); obj.put("serviceDescription", serviceDescription); HashMap gauranteeTerm = new HashMap(); gauranteeTerm.put("VM1_availability", "more_97_percentage"); gauranteeTerm.put("VM2_availability", "more_99_percentage"); gauranteeTerm.put("VM3_availability", "more_95_percentage"); gauranteeTerm.put("VM4_availability", "more_99_percentage"); obj.put("gauranteeTerm", gauranteeTerm); //Constraint1 HashMap host_rule1 = new HashMap(); HashMap VM_rule1 = new HashMap(); host_rule1.put("certificate", "true"); VM_rule1.put("purpose", "dev"); ArrayList rule1 = new ArrayList(); rule1.add("permission"); rule1.add(host_rule1);/* w ww.ja va 2 s. co m*/ rule1.add(VM_rule1); HashMap host_rule1_2 = new HashMap(); HashMap VM_rule1_2 = new HashMap(); host_rule1_2.put("certificate", "true"); VM_rule1_2.put("purpose", "prod"); ArrayList rule1_2 = new ArrayList(); rule1_2.add("permission"); rule1_2.add(host_rule1_2); rule1_2.add(VM_rule1_2); HashMap host_rule1_3 = new HashMap(); HashMap VM_rule1_3 = new HashMap(); host_rule1_3.put("certificate", "true"); VM_rule1_3.put("purpose", "test"); ArrayList rule1_3 = new ArrayList(); rule1_3.add("permission"); rule1_3.add(host_rule1_3); rule1_3.add(VM_rule1_3); HashMap host_rule2 = new HashMap(); HashMap VM_rule2 = new HashMap(); host_rule2.put("location", "France"); VM_rule2.put("ID", "VM2"); ArrayList rule2 = new ArrayList(); rule2.add("permission"); rule2.add(host_rule2); rule2.add(VM_rule2); HashMap host_rule2_1 = new HashMap(); HashMap VM_rule2_1 = new HashMap(); host_rule2_1.put("location", "UK"); VM_rule2_1.put("ID", "VM2"); ArrayList rule2_1 = new ArrayList(); rule2_1.add("permission"); rule2_1.add(host_rule2_1); rule2_1.add(VM_rule2_1); HashMap host_rule3 = new HashMap(); HashMap VM_rule3 = new HashMap(); host_rule3.put("location", "France"); VM_rule3.put("application", "business"); ArrayList rule3 = new ArrayList(); rule3.add("permission"); rule3.add(host_rule3); rule3.add(VM_rule3); HashMap host_rule3_1 = new HashMap(); HashMap VM_rule3_1 = new HashMap(); host_rule3_1.put("location", "UK"); VM_rule3_1.put("application", "business"); ArrayList rule3_1 = new ArrayList(); rule3_1.add("permission"); rule3_1.add(host_rule3_1); rule3_1.add(VM_rule3_1); HashMap VMSeperation_rule_1_1 = new HashMap(); HashMap VMSeperation_rule_1_2 = new HashMap(); VMSeperation_rule_1_1.put("ID", "VM1"); VMSeperation_rule_1_2.put("ID", "VM3"); ArrayList rule4 = new ArrayList(); rule4.add("separation"); rule4.add(VMSeperation_rule_1_1); rule4.add(VMSeperation_rule_1_2); ArrayList policyInConstraint1 = new ArrayList(); policyInConstraint1.add(rule1); policyInConstraint1.add(rule1_2); policyInConstraint1.add(rule1_3); policyInConstraint1.add(rule2); policyInConstraint1.add(rule2_1); policyInConstraint1.add(rule3); policyInConstraint1.add(rule3_1); policyInConstraint1.add(rule4); ArrayList creationConstraint1 = new ArrayList(); creationConstraint1.add("RP4"); creationConstraint1.add("true"); creationConstraint1.add("true"); creationConstraint1.add(policyInConstraint1); ArrayList totalConstraint = new ArrayList(); totalConstraint.add(creationConstraint1); obj.put("creationConstraint", totalConstraint); try { FileWriter file = new FileWriter("confClient" + File.separator + "test3.json"); file.write(obj.toJSONString()); file.flush(); file.close(); } catch (IOException e) { e.printStackTrace(); } System.out.print(obj); /* JSONParser parser = new JSONParser(); try { Object obj2 = parser.parse(new FileReader("test2.json")); JSONObject jsonObject = (JSONObject) obj2; HashMap serviceDescription2=(HashMap) jsonObject.get("serviceDescription"); method.printHashMap(serviceDescription2); HashMap gauranteeTerm2=(HashMap) jsonObject.get("gauranteeTerm"); method.printHashMap(gauranteeTerm2); ArrayList creationConstraint=(ArrayList) jsonObject.get("creationConstraint"); method.printArrayList(creationConstraint); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } catch (ParseException e) { e.printStackTrace(); } */ }
From source file:gedi.lfc.quick.ShiroguchiCounter.java
public static void main(String[] args) throws IOException { String path = "/home/users/erhard/biostor/seq/ngade/shiroguchi_randombarcodes/data/"; MemoryIntervalTreeStorage<int[]> reads = new MemoryIntervalTreeStorage<int[]>(int[].class); String[] files = { "Shiroguchi_A_collapsed.bed", "Shiroguchi_B_collapsed.bed", "Shiroguchi_A_uncollapsed.bed", "Shiroguchi_B_uncollapsed.bed" }; for (int i = 0; i < 4; i++) { Iterator<String> it = new LineOrientedFile(path + files[i]).lineIterator(); while (it.hasNext()) { String[] f = StringUtils.split(it.next(), '\t'); Chromosome chr = Chromosome.obtain(f[0]); ArrayGenomicRegion region = new ArrayGenomicRegion(Integer.parseInt(f[1]), Integer.parseInt(f[2])); int c = Integer.parseInt(StringUtils.splitField(f[3], '|', 0)); int[] counts = reads.getData(chr, region); if (counts == null) reads.add(chr, region, counts = new int[4]); counts[i] += c;// ww w . j av a 2 s .co m } } HashMap<String, String> map = new HashMap<String, String>(); new LineOrientedFile(path + "U00096.2.genes.csv").lineIterator().forEachRemaining(s -> { String[] f = StringUtils.split(s, '\t'); map.put(f[0], f[7]); }); LineOrientedFile fragments = new LineOrientedFile("fragments.csv"); fragments.startWriting(); fragments.writef("Gene\tonlyA\tonlyB\tBoth\tLength\n"); LineOrientedFile bias = new LineOrientedFile("bias.csv"); bias.startWriting(); bias.writef("OriginalA\tBiasA\tOriginalB\tBiasB\n"); IntArrayList biasFactors = new IntArrayList(); ArrayList<GeneData> geneData = new ArrayList<GeneData>(); MemoryIntervalTreeStorage<Transcript> genes = new BiomartExonFileReader(path + "U00096.2.exons.csv", false) .readIntoMemoryTakeFirst(); for (ImmutableReferenceGenomicRegion<Transcript> g : genes.getReferenceGenomicRegions()) { ArrayList<ImmutableReferenceGenomicRegion<int[]>> frag = reads .getReferenceRegionsIntersecting(g.getReference().toStrandIndependent(), g.getRegion()); GeneData gd = new GeneData(); int l = g.getRegion().getTotalLength(); for (ImmutableReferenceGenomicRegion<int[]> r : frag) { if (r.getData()[0] == 0) gd.onlyB++; if (r.getData()[1] == 0) gd.onlyA++; if (r.getData()[0] == 0 && r.getData()[1] == 0) throw new RuntimeException(); bias.writef("%d\t%.0f\t%d\t%.0f\n", r.getData()[0], r.getData()[2] / (double) r.getData()[0], r.getData()[1], r.getData()[3] / (double) r.getData()[1]); if (r.getData()[0] > 0) { biasFactors.add(r.getData()[2] / r.getData()[0]); } if (r.getData()[1] > 0) { biasFactors.add(r.getData()[3] / r.getData()[1]); } } gd.both = frag.size() - gd.onlyA - gd.onlyB; fragments.writef("%s\t%d\t%d\t%d\t%d\n", map.get(g.getData().getTranscriptId()), gd.onlyA, gd.onlyB, gd.both, l); if (gd.onlyA + gd.onlyB + gd.both > 0) geneData.add(gd); } fragments.finishWriting(); bias.finishWriting(); double fc = 1.4; int rep = 5; int nDiff = 1000; int n = 10000; int N = 6000; double noise = 0.05; LineOrientedFile countMatrix = new LineOrientedFile("countMatrix.csv"); countMatrix.startWriting(); LineOrientedFile downCountMatrix = new LineOrientedFile("countMatrix_downsampled.csv"); downCountMatrix.startWriting(); RandomNumbers rnd = new RandomNumbers(); for (int i = 0; i < n; i++) { GeneData gd = geneData.get(rnd.getUnif(0, geneData.size())); // int N = gd.both==0?Integer.MAX_VALUE/2:(int) (gd.onlyA+gd.onlyB+gd.both+gd.onlyA*gd.onlyB/gd.both); double p1 = (gd.onlyA + gd.both) / (double) N; double p2 = i < nDiff ? p1 / fc : p1; ArrayList<ReadData> list = new ArrayList<ReadData>(); for (int r = 0; r < rep * 2; r++) { int k = rnd.getBinom(N, r < rep ? p1 : p2) + 1; int hit = N == -1 ? 0 : rnd.getBinom(k, list.size() / (double) N); rnd.shuffle(list); for (int x = 0; x < hit; x++) list.get(x).reads[r] = (int) rnd.getNormal(list.get(x).bias, list.get(x).bias * noise); for (int x = 0; x < k - hit; x++) list.add(new ReadData(biasFactors.getInt(rnd.getUnif(0, biasFactors.size())), rep * 2, r)); } int[] c = new int[rep * 2]; for (ReadData d : list) { for (int r = 0; r < c.length; r++) { c[r] += d.reads[r]; } } double[] down = new double[rep * 2]; for (ReadData d : list) { double max = ArrayUtils.max(d.reads); for (int r = 0; r < down.length; r++) { down[r] += d.reads[r] / max; } } countMatrix.writeLine(StringUtils.concat("\t", c)); downCountMatrix.writeLine(StringUtils.concat("\t", down)); } countMatrix.finishWriting(); downCountMatrix.finishWriting(); }