List of usage examples for java.util ArrayList get
public E get(int index)
From source file:com.aestel.chemistry.openEye.fp.DistMatrix.java
public static void main(String... args) throws IOException { long start = System.currentTimeMillis(); // create command line Options object Options options = new Options(); Option opt = new Option("i", true, "input file [.tsv from FingerPrinter]"); opt.setRequired(true);/*from w ww.ja v a 2 s .c o m*/ options.addOption(opt); opt = new Option("o", true, "outpur file [.tsv "); opt.setRequired(true); options.addOption(opt); CommandLineParser parser = new PosixParser(); CommandLine cmd = null; try { cmd = parser.parse(options, args); } catch (Exception e) { System.err.println(e.getMessage()); exitWithHelp(options); } args = cmd.getArgs(); if (args.length != 0) exitWithHelp(options); String file = cmd.getOptionValue("i"); BufferedReader in = new BufferedReader(new FileReader(file)); file = cmd.getOptionValue("o"); PrintWriter out = new PrintWriter(new BufferedWriter(new FileWriter(file))); ArrayList<Fingerprint> fps = new ArrayList<Fingerprint>(); ArrayList<String> ids = new ArrayList<String>(); String line; while ((line = in.readLine()) != null) { String[] parts = line.split("\t"); if (parts.length == 3) { ids.add(parts[0]); fps.add(new ByteFingerprint(parts[2])); } } in.close(); out.print("ID"); for (int i = 0; i < ids.size(); i++) { out.print('\t'); out.print(ids.get(i)); } out.println(); for (int i = 0; i < ids.size(); i++) { out.print(ids.get(i)); Fingerprint fp1 = fps.get(i); for (int j = 0; j <= i; j++) { out.printf("\t%.4g", fp1.tanimoto(fps.get(j))); } out.println(); } out.close(); System.err.printf("Done %d fingerprints in %.2gsec\n", fps.size(), (System.currentTimeMillis() - start) / 1000D); }
From source file:ch.epfl.lsir.xin.test.UserAverageTest.java
/** * @param args/* w w w. ja v a 2 s . co m*/ */ public static void main(String[] args) throws Exception { // TODO Auto-generated method stub PrintWriter logger = new PrintWriter(".//results//UserAverage"); PropertiesConfiguration config = new PropertiesConfiguration(); config.setFile(new File(".//conf//UserAverage.properties")); try { config.load(); } catch (ConfigurationException e) { // TODO Auto-generated catch block e.printStackTrace(); } logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Read rating data..."); DataLoaderFile loader = new DataLoaderFile(".//data//MoveLens100k.txt"); loader.readSimple(); DataSetNumeric dataset = loader.getDataset(); System.out.println("Number of ratings: " + dataset.getRatings().size() + " Number of users: " + dataset.getUserIDs().size() + " Number of items: " + dataset.getItemIDs().size()); logger.println("Number of ratings: " + dataset.getRatings().size() + " Number of users: " + dataset.getUserIDs().size() + " Number of items: " + dataset.getItemIDs().size()); logger.flush(); double totalMAE = 0; double totalRMSE = 0; int F = 5; logger.println(F + "- folder cross validation."); ArrayList<ArrayList<NumericRating>> folders = new ArrayList<ArrayList<NumericRating>>(); for (int i = 0; i < F; i++) { folders.add(new ArrayList<NumericRating>()); } while (dataset.getRatings().size() > 0) { int index = new Random().nextInt(dataset.getRatings().size()); int r = new Random().nextInt(F); folders.get(r).add(dataset.getRatings().get(index)); dataset.getRatings().remove(index); } for (int folder = 1; folder <= F; folder++) { logger.println("Folder: " + folder); System.out.println("Folder: " + folder); ArrayList<NumericRating> trainRatings = new ArrayList<NumericRating>(); ArrayList<NumericRating> testRatings = new ArrayList<NumericRating>(); for (int i = 0; i < folders.size(); i++) { if (i == folder - 1)//test data { testRatings.addAll(folders.get(i)); } else {//training data trainRatings.addAll(folders.get(i)); } } //create rating matrix HashMap<String, Integer> userIDIndexMapping = new HashMap<String, Integer>(); HashMap<String, Integer> itemIDIndexMapping = new HashMap<String, Integer>(); for (int i = 0; i < dataset.getUserIDs().size(); i++) { userIDIndexMapping.put(dataset.getUserIDs().get(i), i); } for (int i = 0; i < dataset.getItemIDs().size(); i++) { itemIDIndexMapping.put(dataset.getItemIDs().get(i), i); } RatingMatrix trainRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(), dataset.getItemIDs().size()); for (int i = 0; i < trainRatings.size(); i++) { trainRatingMatrix.set(userIDIndexMapping.get(trainRatings.get(i).getUserID()), itemIDIndexMapping.get(trainRatings.get(i).getItemID()), trainRatings.get(i).getValue()); } trainRatingMatrix.calculateGlobalAverage(); RatingMatrix testRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(), dataset.getItemIDs().size()); for (int i = 0; i < testRatings.size(); i++) { testRatingMatrix.set(userIDIndexMapping.get(testRatings.get(i).getUserID()), itemIDIndexMapping.get(testRatings.get(i).getItemID()), testRatings.get(i).getValue()); } System.out.println("Training: " + trainRatingMatrix.getTotalRatingNumber() + " vs Test: " + testRatingMatrix.getTotalRatingNumber()); logger.println("Initialize a recommendation model based on user average method."); UserAverage algo = new UserAverage(trainRatingMatrix); algo.setLogger(logger); algo.build(); algo.saveModel(".//localModels//" + config.getString("NAME")); logger.println("Save the model."); System.out.println(trainRatings.size() + " vs. " + testRatings.size()); double RMSE = 0; double MAE = 0; int count = 0; for (int i = 0; i < testRatings.size(); i++) { NumericRating rating = testRatings.get(i); double prediction = algo.predict(userIDIndexMapping.get(rating.getUserID()), itemIDIndexMapping.get(rating.getItemID())); if (Double.isNaN(prediction)) { System.out.println("no prediction"); continue; } MAE = MAE + Math.abs(rating.getValue() - prediction); RMSE = RMSE + Math.pow((rating.getValue() - prediction), 2); count++; } MAE = MAE / count; RMSE = Math.sqrt(RMSE / count); logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " MAE: " + MAE + " RMSE: " + RMSE); logger.flush(); totalMAE = totalMAE + MAE; totalRMSE = totalRMSE + RMSE; } System.out.println("MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F); logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Final results: MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F); logger.flush(); logger.close(); //MAE: 0.8353035962363073 RMSE: 1.0422971886952053 (MovieLens 100k) }
From source file:ch.epfl.lsir.xin.test.GlobalMeanTest.java
/** * @param args/*from w w w . j a va 2 s . co m*/ */ public static void main(String[] args) throws Exception { // TODO Auto-generated method stub PrintWriter logger = new PrintWriter(".//results//GlobalMean"); PropertiesConfiguration config = new PropertiesConfiguration(); config.setFile(new File("conf//GlobalMean.properties")); try { config.load(); } catch (ConfigurationException e) { // TODO Auto-generated catch block e.printStackTrace(); } logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Read rating data..."); DataLoaderFile loader = new DataLoaderFile(".//data//MoveLens100k.txt"); loader.readSimple(); DataSetNumeric dataset = loader.getDataset(); System.out.println("Number of ratings: " + dataset.getRatings().size() + " Number of users: " + dataset.getUserIDs().size() + " Number of items: " + dataset.getItemIDs().size()); logger.println("Number of ratings: " + dataset.getRatings().size() + ", Number of users: " + dataset.getUserIDs().size() + ", Number of items: " + dataset.getItemIDs().size()); double totalMAE = 0; double totalRMSE = 0; int F = 5; logger.println(F + "- folder cross validation."); logger.flush(); ArrayList<ArrayList<NumericRating>> folders = new ArrayList<ArrayList<NumericRating>>(); for (int i = 0; i < F; i++) { folders.add(new ArrayList<NumericRating>()); } while (dataset.getRatings().size() > 0) { int index = new Random().nextInt(dataset.getRatings().size()); int r = new Random().nextInt(F); folders.get(r).add(dataset.getRatings().get(index)); dataset.getRatings().remove(index); } for (int folder = 1; folder <= F; folder++) { System.out.println("Folder: " + folder); logger.println("Folder: " + folder); ArrayList<NumericRating> trainRatings = new ArrayList<NumericRating>(); ArrayList<NumericRating> testRatings = new ArrayList<NumericRating>(); for (int i = 0; i < folders.size(); i++) { if (i == folder - 1)//test data { testRatings.addAll(folders.get(i)); } else {//training data trainRatings.addAll(folders.get(i)); } } //create rating matrix HashMap<String, Integer> userIDIndexMapping = new HashMap<String, Integer>(); HashMap<String, Integer> itemIDIndexMapping = new HashMap<String, Integer>(); for (int i = 0; i < dataset.getUserIDs().size(); i++) { userIDIndexMapping.put(dataset.getUserIDs().get(i), i); } for (int i = 0; i < dataset.getItemIDs().size(); i++) { itemIDIndexMapping.put(dataset.getItemIDs().get(i), i); } RatingMatrix trainRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(), dataset.getItemIDs().size()); for (int i = 0; i < trainRatings.size(); i++) { trainRatingMatrix.set(userIDIndexMapping.get(trainRatings.get(i).getUserID()), itemIDIndexMapping.get(trainRatings.get(i).getItemID()), trainRatings.get(i).getValue()); } RatingMatrix testRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(), dataset.getItemIDs().size()); for (int i = 0; i < testRatings.size(); i++) { testRatingMatrix.set(userIDIndexMapping.get(testRatings.get(i).getUserID()), itemIDIndexMapping.get(testRatings.get(i).getItemID()), testRatings.get(i).getValue()); } System.out.println("Training: " + trainRatingMatrix.getTotalRatingNumber() + " vs Test: " + testRatingMatrix.getTotalRatingNumber()); logger.println("Initialize a recommendation model based on global average method."); GlobalAverage algo = new GlobalAverage(trainRatingMatrix); algo.setLogger(logger); algo.build(); algo.saveModel(".//localModels//" + config.getString("NAME")); logger.println("Save the model."); logger.flush(); System.out.println(trainRatings.size() + " vs. " + testRatings.size()); double RMSE = 0; double MAE = 0; int count = 0; for (int i = 0; i < testRatings.size(); i++) { NumericRating rating = testRatings.get(i); double prediction = algo.predict(rating.getUserID(), rating.getItemID()); if (Double.isNaN(prediction)) { System.out.println("no prediction"); continue; } MAE = MAE + Math.abs(rating.getValue() - prediction); RMSE = RMSE + Math.pow((rating.getValue() - prediction), 2); count++; } MAE = MAE / count; RMSE = Math.sqrt(RMSE / count); // System.out.println("MAE: " + MAE + " RMSE: " + RMSE); logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " MAE: " + MAE + " RMSE: " + RMSE); logger.flush(); totalMAE = totalMAE + MAE; totalRMSE = totalRMSE + RMSE; } System.out.println("MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F); logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Final results: MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F); logger.flush(); logger.close(); //MAE: 0.9338607074893257 RMSE: 1.1170971131112037 (MovieLens1M) //MAE: 0.9446876509332618 RMSE: 1.1256517870920375 (MovieLens100K) }
From source file:ch.epfl.lsir.xin.test.ItemAverageTest.java
/** * @param args//from w w w. ja va 2 s . c o m */ public static void main(String[] args) throws Exception { // TODO Auto-generated method stub PrintWriter logger = new PrintWriter(".//results//ItemAverage"); PropertiesConfiguration config = new PropertiesConfiguration(); config.setFile(new File(".//conf//ItemAverage.properties")); try { config.load(); } catch (ConfigurationException e) { // TODO Auto-generated catch block e.printStackTrace(); } logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Read rating data..."); DataLoaderFile loader = new DataLoaderFile(".//data//MoveLens100k.txt"); loader.readSimple(); DataSetNumeric dataset = loader.getDataset(); System.out.println("Number of ratings: " + dataset.getRatings().size() + " Number of users: " + dataset.getUserIDs().size() + " Number of items: " + dataset.getItemIDs().size()); logger.println("Number of ratings: " + dataset.getRatings().size() + ", Number of users: " + dataset.getUserIDs().size() + ", Number of items: " + dataset.getItemIDs().size()); logger.flush(); double totalMAE = 0; double totalRMSE = 0; int F = 5; logger.println(F + "- folder cross validation."); ArrayList<ArrayList<NumericRating>> folders = new ArrayList<ArrayList<NumericRating>>(); for (int i = 0; i < F; i++) { folders.add(new ArrayList<NumericRating>()); } while (dataset.getRatings().size() > 0) { int index = new Random().nextInt(dataset.getRatings().size()); int r = new Random().nextInt(F); folders.get(r).add(dataset.getRatings().get(index)); dataset.getRatings().remove(index); } for (int folder = 1; folder <= F; folder++) { logger.println("Folder: " + folder); logger.flush(); System.out.println("Folder: " + folder); ArrayList<NumericRating> trainRatings = new ArrayList<NumericRating>(); ArrayList<NumericRating> testRatings = new ArrayList<NumericRating>(); for (int i = 0; i < folders.size(); i++) { if (i == folder - 1)//test data { testRatings.addAll(folders.get(i)); } else {//training data trainRatings.addAll(folders.get(i)); } } //create rating matrix HashMap<String, Integer> userIDIndexMapping = new HashMap<String, Integer>(); HashMap<String, Integer> itemIDIndexMapping = new HashMap<String, Integer>(); for (int i = 0; i < dataset.getUserIDs().size(); i++) { userIDIndexMapping.put(dataset.getUserIDs().get(i), i); } for (int i = 0; i < dataset.getItemIDs().size(); i++) { itemIDIndexMapping.put(dataset.getItemIDs().get(i), i); } RatingMatrix trainRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(), dataset.getItemIDs().size()); for (int i = 0; i < trainRatings.size(); i++) { trainRatingMatrix.set(userIDIndexMapping.get(trainRatings.get(i).getUserID()), itemIDIndexMapping.get(trainRatings.get(i).getItemID()), trainRatings.get(i).getValue()); } trainRatingMatrix.calculateGlobalAverage(); RatingMatrix testRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(), dataset.getItemIDs().size()); for (int i = 0; i < testRatings.size(); i++) { testRatingMatrix.set(userIDIndexMapping.get(testRatings.get(i).getUserID()), itemIDIndexMapping.get(testRatings.get(i).getItemID()), testRatings.get(i).getValue()); } System.out.println("Training: " + trainRatingMatrix.getTotalRatingNumber() + " vs Test: " + testRatingMatrix.getTotalRatingNumber()); logger.println("Initialize a recommendation model based on item average method."); ItemAverage algo = new ItemAverage(trainRatingMatrix); algo.setLogger(logger); algo.build(); algo.saveModel(".//localModels//" + config.getString("NAME")); logger.println("Save the model."); logger.flush(); System.out.println(trainRatings.size() + " vs. " + testRatings.size()); double RMSE = 0; double MAE = 0; int count = 0; for (int i = 0; i < testRatings.size(); i++) { NumericRating rating = testRatings.get(i); double prediction = algo.predict(userIDIndexMapping.get(rating.getUserID()), itemIDIndexMapping.get(rating.getItemID())); if (Double.isNaN(prediction)) { System.out.println("no prediction"); continue; } MAE = MAE + Math.abs(rating.getValue() - prediction); RMSE = RMSE + Math.pow((rating.getValue() - prediction), 2); count++; } MAE = MAE / count; RMSE = Math.sqrt(RMSE / count); logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " MAE: " + MAE + " RMSE: " + RMSE); logger.flush(); // System.out.println("MAE: " + MAE + " RMSE: " + RMSE); totalMAE = totalMAE + MAE; totalRMSE = totalRMSE + RMSE; } System.out.println("MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F); logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Final results: MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F); logger.flush(); //MAE: 0.8173633324758338 RMSE: 1.0251973503888645 (MovieLens 100K) }
From source file:com.ericsson.eiffel.remrem.semantics.clone.PrepareLocalEiffelSchemas.java
public static void main(String[] args) throws IOException { final PrepareLocalEiffelSchemas prepareLocalSchema = new PrepareLocalEiffelSchemas(); final Proxy proxy = prepareLocalSchema.getProxy(httpProxyUrl, httpProxyPort, httpProxyUsername, httpProxyPassword);//from w ww.j av a 2s . c om if (proxy != null) { prepareLocalSchema.setProxy(proxy); } final String eiffelRepoUrl = args[0]; final String eiffelRepoBranch = args[1]; final String operationRepoUrl = args[2]; final String operationRepoBranch = args[3]; final File localEiffelRepoPath = new File( System.getProperty(EiffelConstants.USER_HOME) + File.separator + EiffelConstants.EIFFEL); final File localOperationsRepoPath = new File(System.getProperty(EiffelConstants.USER_HOME) + File.separator + EiffelConstants.OPERATIONS_REPO_NAME); // Clone Eiffel Repo from GitHub prepareLocalSchema.cloneEiffelRepo(eiffelRepoUrl, eiffelRepoBranch, localEiffelRepoPath); //Clone Eiffel operations Repo from GitHub prepareLocalSchema.cloneEiffelRepo(operationRepoUrl, operationRepoBranch, localOperationsRepoPath); //Copy operations repo Schemas to location where Eiffel repo schemas available prepareLocalSchema.copyOperationSchemas(localOperationsRepoPath.getAbsolutePath(), localEiffelRepoPath.getAbsolutePath()); // Read and Load JsonSchemas from Cloned Directory final LocalRepo localRepo = new LocalRepo(localEiffelRepoPath); localRepo.readSchemas(); final ArrayList<String> jsonEventNames = localRepo.getJsonEventNames(); final ArrayList<File> jsonEventSchemas = localRepo.getJsonEventSchemas(); // Schema changes final SchemaFile schemaFile = new SchemaFile(); // Iterate the Each jsonSchema file to Add and Modify the necessary properties if (jsonEventNames != null && jsonEventSchemas != null) { for (int i = 0; i < jsonEventNames.size(); i++) { schemaFile.modify(jsonEventSchemas.get(i), jsonEventNames.get(i)); } } }
From source file:edu.usd.btl.ontology.ToolTree.java
public static void main(String[] args) throws Exception { try {/*from ww w. ja v a 2s .com*/ //File ontoInput = new File(".\\ontology_files\\EDAM_1.3.owl"); ObjectMapper mapper = new ObjectMapper(); OntologyFileRead ontFileRead = new OntologyFileRead(); ArrayList<edu.usd.btl.ontology.BioPortalElement> nodeList = ontFileRead .readFile(".\\ontology_files\\EDAM_1.3.owl"); //write nodelist to JSON string ObjectWriter treeWriter = mapper.writer().withDefaultPrettyPrinter(); String edamJSON = mapper.writeValueAsString(nodeList); JsonNode rootNode = mapper.readValue(edamJSON, JsonNode.class); System.out.println("IsNull" + rootNode.toString()); OntSearch ontSearch = new OntSearch(); System.out.println(nodeList.get(0).getURI()); String result = ontSearch.searchElementByURI("http://edamontology.org/topic_2817"); System.out.println("RESULT = " + result); String topicSearchResult = ontSearch.findAllTopics(); System.out.println("Topics Result = " + topicSearchResult); File ontFile = new File(".\\ontology_files\\EDAM_1.3.owl"); String searchFromFileResult = ontSearch.searchNodeFromFile("http://edamontology.org/topic_2817", ".\\ontology_files\\EDAM_1.3.owl"); System.out.println("File Response = " + searchFromFileResult.toString()); } catch (IOException e) { System.out.println(e.getMessage()); } //Hashmap stuff // OntologyFileRead ontFileRead = new OntologyFileRead(); // ArrayList<edu.usd.btl.ontology.BioPortalElement> nodeList = ontFileRead.readFile(".\\ontology_files\\EDAM_1.3.owl"); // // HashMap hm = new HashMap(); // // //find topics // ArrayList<OntologyNode> ontoTopicList = new ArrayList(); // // for(BioPortalElement node : nodeList){ // //System.out.println("****" + node.getURI()); // hm.put(node.getURI(), node.getName()); // } // // Set set = hm.entrySet(); // Iterator i = set.iterator(); // while(i.hasNext()){ // Map.Entry me = (Map.Entry)i.next(); // System.out.println(me.getKey() + ": " + me.getValue()); // } // System.out.println("HashMap Size = " + hm.size()); }
From source file:edu.oregonstate.eecs.mcplan.domains.toy.RelevantIrrelevant.java
public static void main(final String[] argv) throws NumberFormatException, IOException { final RandomGenerator rng = new MersenneTwister(42); final int T = 30; final int nr = 3; final int ni = 3; final Parameters params = new Parameters(T, nr, ni); final Actions actions = new Actions(params); final FsssModel model = new FsssModel(rng, params); State s = model.initialState(); while (!s.isTerminal()) { System.out.println(s);/*from w w w . j a v a2s . c o m*/ System.out.println("R(s): " + model.reward(s)); actions.setState(s, 0); final ArrayList<Action> action_list = Fn.takeAll(actions); for (int i = 0; i < action_list.size(); ++i) { System.out.println(i + ": " + action_list.get(i)); } System.out.print(">>> "); final BufferedReader cin = new BufferedReader(new InputStreamReader(System.in)); final int choice = Integer.parseInt(cin.readLine()); final Action a = action_list.get(choice); System.out.println("R(s, a): " + model.reward(s, a)); s = model.sampleTransition(s, a); } // // Estimate the value of a "good" policy. // // Note: The "good" policy is to Invest when you can, and Sell if the // // price is >= 2. This is not necessarily optimal because: // // 1. You should Borrow once the episode will end before the loan must be repaid // // 2. For some values of invest_period, you should pass on a low price // // early in the period to try to get a better one later. // final int Ngames = 10000; // double V = 0; // int Ninvest = 0; // for( int i = 0; i < Ngames; ++i ) { // State s = model.initialState(); // double Vi = model.reward( s ); // while( !s.isTerminal() ) { // final Action a; // // // "Good" policy // if( s.investment == 0 ) { // a = new InvestAction(); // Ninvest += 1; // } // else if( s.investment > 0 && s.price >= 2 ) { // if( s.invest_t < (params.invest_period - 1) || s.price > 2 ) { // a = new SellAction(); // } // else { // a = new SaveAction(); // } //// a = new SellAction(); // } // else { // a = new SaveAction(); // } // // // "Borrow" policy //// if( s.loan == 0 ) { //// a = new BorrowAction(); //// } //// else { //// a = new SaveAction(); //// } // // final double ra = model.reward( s, a ); // s = model.sampleTransition( s, a ); // Vi += ra + model.reward( s ); // } // V += Vi; // } // // final double Vavg = V / Ngames; // final double Navg = (Ninvest / ((double) Ngames)); // System.out.println( "Avg. value: " + Vavg ); // System.out.println( "Avg. Invest actions: " + Navg ); // System.out.println( "V(Invest) ~= " + ( 1 + (Vavg - params.T)/Navg ) ); }
From source file:challenge302.intermediate.ASCIIHistogramMaker.java
public static void main(String[] args) { String inFile = "/data/challenge302intermediate.txt"; // ArrayList<int[]> graphData = new ArrayList<int[]>(); ArrayList<String> input = new ChallengeInput().getInputByLines(ASCIIHistogramMaker.class, inFile); //take first line as the chart bounds and create new BarChart with those bounds IntBasedBarChart chart = new IntBasedBarChart(input.remove(0).split(" ")); //take next line as the number of chart elements int size = Integer.parseInt(input.remove(0)); for (int i = 0; i < size; i++) { // System.out.println("Parsing: graph data line=\""+input.get(i)+"\""); chart.addData(stringArraytoIntArray(input.get(i).split(" "))); } //end adding all lines printChart(chart, size);/*from w w w .j a v a 2s. com*/ }
From source file:ch.cyclops.gatekeeper.Main.java
public static void main(String[] args) throws Exception { CompositeConfiguration config = new CompositeConfiguration(); config.addConfiguration(new SystemConfiguration()); if (args.length > 0) config.addConfiguration(new PropertiesConfiguration(args[args.length - 1])); //setting up the logging framework now Logger.getRootLogger().getLoggerRepository().resetConfiguration(); ConsoleAppender console = new ConsoleAppender(); //create appender //configure the appender String PATTERN = "%d [%p|%C{1}|%M|%L] %m%n"; console.setLayout(new PatternLayout(PATTERN)); String logConsoleLevel = config.getProperty("log.level.console").toString(); switch (logConsoleLevel) { case ("INFO"): console.setThreshold(Level.INFO); break;// ww w.j av a 2s. c o m case ("DEBUG"): console.setThreshold(Level.DEBUG); break; case ("WARN"): console.setThreshold(Level.WARN); break; case ("ERROR"): console.setThreshold(Level.ERROR); break; case ("FATAL"): console.setThreshold(Level.FATAL); break; case ("OFF"): console.setThreshold(Level.OFF); break; default: console.setThreshold(Level.ALL); } console.activateOptions(); //add appender to any Logger (here is root) Logger.getRootLogger().addAppender(console); String logFileLevel = config.getProperty("log.level.file").toString(); String logFile = config.getProperty("log.file").toString(); if (logFile != null && logFile.length() > 0) { FileAppender fa = new FileAppender(); fa.setName("FileLogger"); fa.setFile(logFile); fa.setLayout(new PatternLayout("%d %-5p [%c{1}] %m%n")); switch (logFileLevel) { case ("INFO"): fa.setThreshold(Level.INFO); break; case ("DEBUG"): fa.setThreshold(Level.DEBUG); break; case ("WARN"): fa.setThreshold(Level.WARN); break; case ("ERROR"): fa.setThreshold(Level.ERROR); break; case ("FATAL"): fa.setThreshold(Level.FATAL); break; case ("OFF"): fa.setThreshold(Level.OFF); break; default: fa.setThreshold(Level.ALL); } fa.setAppend(true); fa.activateOptions(); //add appender to any Logger (here is root) Logger.getRootLogger().addAppender(fa); } //now logger configuration is done, we can start using it. Logger mainLogger = Logger.getLogger("gatekeeper-driver.Main"); mainLogger.debug("Driver loaded properly"); if (args.length > 0) { GKDriver gkDriver = new GKDriver(args[args.length - 1], 1, "Eq7K8h9gpg"); System.out.println("testing if admin: " + gkDriver.isAdmin(1, 0)); ArrayList<String> uList = gkDriver.getUserList(0); //the argument is the starting count of number of allowed //internal attempts. if (uList != null) { mainLogger.info("Received user list from Gatekeeper! Count: " + uList.size()); for (int i = 0; i < uList.size(); i++) mainLogger.info(uList.get(i)); } boolean authResponse = gkDriver.simpleAuthentication(1, "Eq7K8h9gpg"); if (authResponse) mainLogger.info("Authentication attempt was successful."); else mainLogger.warn("Authentication attempt failed!"); String sName = "myservice-" + System.currentTimeMillis(); HashMap<String, String> newService = gkDriver.registerService(sName, "this is my new cool service", 0); String sKey = ""; if (newService != null) { mainLogger.info("Service registration was successful! Got:" + newService.get("uri") + ", Key=" + newService.get("key")); sKey = newService.get("key"); } else { mainLogger.warn("Service registration failed!"); } int newUserId = gkDriver.registerUser("user-" + System.currentTimeMillis(), "pass1234", false, sName, 0); if (newUserId != -1) mainLogger.info("User registration was successful. Received new id: " + newUserId); else mainLogger.warn("User registration failed!"); String token = gkDriver.generateToken(newUserId, "pass1234"); boolean isValidToken = gkDriver.validateToken(token, newUserId); if (isValidToken) mainLogger.info("The token: " + token + " is successfully validated for user-id: " + newUserId); else mainLogger.warn("Token validation was unsuccessful! Token: " + token + ", user-id: " + newUserId); ArrayList<String> sList = gkDriver.getServiceList(0); //the argument is the starting count of number of allowed //internal attempts. if (sList != null) { mainLogger.info("Received service list from Gatekeeper! Count: " + sList.size()); for (int i = 0; i < sList.size(); i++) mainLogger.info(sList.get(i)); } isValidToken = gkDriver.validateToken(token, sKey); if (isValidToken) mainLogger.info("The token: " + token + " is successfully validated for user-id: " + newUserId + " against s-key:" + sKey); else mainLogger.warn("Token validation was unsuccessful! Token: " + token + ", user-id: " + newUserId + ", s-key: " + sKey); boolean deleteResult = gkDriver.deleteUser(newUserId, 0); if (deleteResult) mainLogger.info("User with id: " + newUserId + " was deleted successfully."); else mainLogger.warn("User with id: " + newUserId + " could not be deleted successfully!"); } }
From source file:edu.oregonstate.eecs.mcplan.ml.LinearDiscriminantAnalysis.java
public static void main(final String[] args) throws FileNotFoundException { final File root = new File("test/LinearDiscriminantAnalysis"); root.mkdirs();/*from w w w. j av a2 s . c o m*/ final int seed = 42; final int N = 30; final double shrinkage = 1e-6; final RandomGenerator rng = new MersenneTwister(seed); final Pair<ArrayList<double[]>, int[]> dataset = Datasets.twoVerticalGaussian2D(rng, N); final ArrayList<double[]> data = dataset.first; final int[] label = dataset.second; final int Nlabels = 2; final int[] shuffle_idx = Fn.linspace(0, Nlabels * N); Fn.shuffle(rng, shuffle_idx); final ArrayList<double[]> shuffled = new ArrayList<double[]>(); final int[] shuffled_label = new int[label.length]; for (int i = 0; i < data.size(); ++i) { shuffled.add(Fn.copy(data.get(shuffle_idx[i]))); shuffled_label[i] = label[shuffle_idx[i]]; } final Csv.Writer data_writer = new Csv.Writer(new PrintStream(new File(root, "data.csv"))); for (final double[] v : data) { for (int i = 0; i < v.length; ++i) { data_writer.cell(v[i]); } data_writer.newline(); } data_writer.close(); System.out.println("[Training]"); // final KernelPrincipalComponentsAnalysis<RealVector> kpca // = new KernelPrincipalComponentsAnalysis<RealVector>( shuffled, new RadialBasisFunctionKernel( 0.5 ), 1e-6 ); final LinearDiscriminantAnalysis lda = new LinearDiscriminantAnalysis(shuffled, shuffled_label, Nlabels, shrinkage); System.out.println("[Finished]"); for (final RealVector ev : lda.eigenvectors) { System.out.println(ev); } System.out.println("Transformed data:"); final LinearDiscriminantAnalysis.Transformer transformer = lda.makeTransformer(); final Csv.Writer transformed_writer = new Csv.Writer(new PrintStream(new File(root, "transformed.csv"))); for (final double[] u : data) { final RealVector uvec = new ArrayRealVector(u); System.out.println(uvec); final RealVector v = transformer.transform(uvec); System.out.println("-> " + v); for (int i = 0; i < v.getDimension(); ++i) { transformed_writer.cell(v.getEntry(i)); } transformed_writer.newline(); } transformed_writer.close(); }