List of usage examples for java.util HashMap HashMap
public HashMap()
From source file:edu.msu.cme.rdp.seqmatch.cli.SeqMatchMain.java
public static void main(String[] args) throws Exception { if (args.length == 0) { System.err.println("USAGE: SeqMatchMain [train|seqmatch] <args>"); return;// w w w . j av a2 s . co m } String cmd = args[0]; args = Arrays.copyOfRange(args, 1, args.length); if (cmd.equals("train")) { if (args.length != 2) { System.err.println("USAGE: train <reference sequences> <trainee_out_file_prefix>" + "\nMultiple trainee output files might be created, each containing maximum " + Trainee.MAX_NUM_SEQ + " sequences"); return; } File refSeqs = new File(args[0]); File traineeFileOut = new File(args[1]); //maybe more than 1 trainee files need to be created, depending on the number of seqs CreateMultiMatchFromFile.getMultiTrainee(refSeqs, traineeFileOut); } else if (cmd.equals("seqmatch")) { File refFile = null; File queryFile = null; HashMap<String, String> descMap = new HashMap<String, String>(); PrintStream out = new PrintStream(System.out); int knn = 20; float minSab = .5f; try { CommandLine line = new PosixParser().parse(options, args); if (line.hasOption("knn")) { knn = Integer.parseInt(line.getOptionValue("knn")); } if (line.hasOption("sab")) { minSab = Float.parseFloat(line.getOptionValue("sab")); } if (line.hasOption("desc")) { descMap = readDesc(new File(line.getOptionValue("desc"))); } if (line.hasOption("outFile")) { out = new PrintStream(new File(line.getOptionValue("outFile"))); } args = line.getArgs(); if (args.length != 2) { throw new Exception("Unexpected number of command line arguments"); } refFile = new File(args[0]); queryFile = new File(args[1]); } catch (Exception e) { new HelpFormatter().printHelp("seqmatch <refseqs | trainee_file_or_dir> <query_file>\n" + " trainee_file_or_dir is a single trainee file or a directory containing multiple trainee files", options); System.err.println("Error: " + e.getMessage()); return; } SeqMatch seqmatch = null; if (refFile.isDirectory()) { // a directory of trainee files List<SeqMatch> engineList = new ArrayList<SeqMatch>(); for (File f : refFile.listFiles()) { if (!f.isHidden()) { TwowaySeqMatch match = new TwowaySeqMatch(new SeqMatchEngine(new StorageTrainee(f))); engineList.add(match); } } seqmatch = new MultiTraineeSeqMatch(engineList); } else { // a single fasta file or trainee file if (SeqUtils.guessFileFormat(refFile) == SequenceFormat.UNKNOWN) { seqmatch = CLISeqMatchFactory.trainTwowaySeqMatch(new StorageTrainee(refFile)); } else { seqmatch = CreateMultiMatchFromFile.getMultiMatch(refFile); } } out.println("query name\tmatch seq\torientation\tS_ab score\tunique oligomers\tdescription"); SeqReader reader = new SequenceReader(queryFile); Sequence seq; while ((seq = reader.readNextSequence()) != null) { SeqMatchResultSet resultSet = seqmatch.match(seq, knn); for (SeqMatchResult result : resultSet) { char r = '+'; if (result.isReverse()) { r = '-'; } if (result.getScore() > minSab) { out.println(seq.getSeqName() + "\t" + result.getSeqName() + "\t" + r + "\t" + result.getScore() + "\t" + resultSet.getQueryWordCount() + "\t" + descMap.get(result.getSeqName())); } } } out.close(); } else { throw new IllegalArgumentException("USAGE: SeqMatchMain [train|seqmatch] <args>"); } }
From source file:org.mitre.mpf.rest.client.Main.java
public static void main(String[] args) throws RestClientException, IOException, InterruptedException { System.out.println("Starting rest-client!"); //not necessary for localhost, but left if a proxy configuration is needed //System.setProperty("http.proxyHost",""); //System.setProperty("http.proxyPort",""); String currentDirectory;/* ww w. j a v a 2 s . c o m*/ currentDirectory = System.getProperty("user.dir"); System.out.println("Current working directory : " + currentDirectory); String username = "mpf"; String password = "mpf123"; byte[] encodedBytes = Base64.encodeBase64((username + ":" + password).getBytes()); String base64 = new String(encodedBytes); System.out.println("encodedBytes " + base64); final String mpfAuth = "Basic " + base64; RequestInterceptor authorize = new RequestInterceptor() { @Override public void intercept(HttpRequestBase request) { request.addHeader("Authorization", mpfAuth /*credentials*/); } }; //RestClient client = RestClient.builder().requestInterceptor(authorize).build(); CustomRestClient client = (CustomRestClient) CustomRestClient.builder() .restClientClass(CustomRestClient.class).requestInterceptor(authorize).build(); //getAvailableWorkPipelineNames String url = "http://localhost:8080/workflow-manager/rest/pipelines"; Map<String, String> params = new HashMap<String, String>(); List<String> availableWorkPipelines = client.get(url, params, new TypeReference<List<String>>() { }); System.out.println("availableWorkPipelines size: " + availableWorkPipelines.size()); System.out.println(Arrays.toString(availableWorkPipelines.toArray())); //processMedia JobCreationRequest jobCreationRequest = new JobCreationRequest(); URI uri = Paths.get(currentDirectory, "/trunk/workflow-manager/src/test/resources/samples/meds/aa/S001-01-t10_01.jpg").toUri(); jobCreationRequest.getMedia().add(new JobCreationMediaData(uri.toString())); uri = Paths.get(currentDirectory, "/trunk/workflow-manager/src/test/resources/samples/meds/aa/S008-01-t10_01.jpg").toUri(); jobCreationRequest.getMedia().add(new JobCreationMediaData(uri.toString())); jobCreationRequest.setExternalId("external id"); //get first DLIB pipeline String firstDlibPipeline = availableWorkPipelines.stream() //.peek(pipepline -> System.out.println("will filter - " + pipepline)) .filter(pipepline -> pipepline.startsWith("DLIB")).findFirst().get(); System.out.println("found firstDlibPipeline: " + firstDlibPipeline); jobCreationRequest.setPipelineName(firstDlibPipeline); //grabbed from 'rest/pipelines' - see #1 //two optional params jobCreationRequest.setBuildOutput(true); //jobCreationRequest.setPriority(priority); //will be set to 4 (default) if not set JobCreationResponse jobCreationResponse = client.customPostObject( "http://localhost:8080/workflow-manager/rest/jobs", jobCreationRequest, JobCreationResponse.class); System.out.println("jobCreationResponse job id: " + jobCreationResponse.getJobId()); System.out.println("\n---Sleeping for 10 seconds to let the job process---\n"); Thread.sleep(10000); //getJobStatus url = "http://localhost:8080/workflow-manager/rest/jobs"; // /status"; params = new HashMap<String, String>(); //OPTIONAL //params.put("v", "") - no versioning currently implemented //id is now a path var - if not set, all job info will returned url = url + "/" + Long.toString(jobCreationResponse.getJobId()); SingleJobInfo jobInfo = client.get(url, params, SingleJobInfo.class); System.out.println("jobInfo id: " + jobInfo.getJobId()); //getSerializedOutput String jobIdToGetOutputStr = Long.toString(jobCreationResponse.getJobId()); url = "http://localhost:8080/workflow-manager/rest/jobs/" + jobIdToGetOutputStr + "/output/detection"; params = new HashMap<String, String>(); //REQUIRED - job id is now a path var and required for this endpoint String serializedOutput = client.getAsString(url, params); System.out.println("serializedOutput: " + serializedOutput); }
From source file:burstcoin.observer.Observer.java
public static void main(String[] args) throws Exception { LOG.info("Starting the engines ... please wait!"); // overwritten by application.properties Map<String, Object> properties = new HashMap<String, Object>(); properties.put("server.port", ObserverProperties.getObserverPort()); properties.put("spring.mail.protocol", ObserverProperties.getMailProtocol()); properties.put("spring.mail.host", ObserverProperties.getMailHost()); properties.put("spring.mail.port", ObserverProperties.getMailPort()); properties.put("spring.mail.username", ObserverProperties.getMailUsername()); properties.put("spring.mail.password", ObserverProperties.getMailPassword()); properties.put("spring.thymeleaf.cache", ObserverProperties.isEnableTemplateCaching()); new SpringApplicationBuilder(Observer.class).properties(properties).build(args).run(); }
From source file:ch.epfl.lsir.xin.test.GlobalMeanTest.java
/** * @param args/*from w w w . j av a 2 s. com*/ */ public static void main(String[] args) throws Exception { // TODO Auto-generated method stub PrintWriter logger = new PrintWriter(".//results//GlobalMean"); PropertiesConfiguration config = new PropertiesConfiguration(); config.setFile(new File("conf//GlobalMean.properties")); try { config.load(); } catch (ConfigurationException e) { // TODO Auto-generated catch block e.printStackTrace(); } logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Read rating data..."); DataLoaderFile loader = new DataLoaderFile(".//data//MoveLens100k.txt"); loader.readSimple(); DataSetNumeric dataset = loader.getDataset(); System.out.println("Number of ratings: " + dataset.getRatings().size() + " Number of users: " + dataset.getUserIDs().size() + " Number of items: " + dataset.getItemIDs().size()); logger.println("Number of ratings: " + dataset.getRatings().size() + ", Number of users: " + dataset.getUserIDs().size() + ", Number of items: " + dataset.getItemIDs().size()); double totalMAE = 0; double totalRMSE = 0; int F = 5; logger.println(F + "- folder cross validation."); logger.flush(); ArrayList<ArrayList<NumericRating>> folders = new ArrayList<ArrayList<NumericRating>>(); for (int i = 0; i < F; i++) { folders.add(new ArrayList<NumericRating>()); } while (dataset.getRatings().size() > 0) { int index = new Random().nextInt(dataset.getRatings().size()); int r = new Random().nextInt(F); folders.get(r).add(dataset.getRatings().get(index)); dataset.getRatings().remove(index); } for (int folder = 1; folder <= F; folder++) { System.out.println("Folder: " + folder); logger.println("Folder: " + folder); ArrayList<NumericRating> trainRatings = new ArrayList<NumericRating>(); ArrayList<NumericRating> testRatings = new ArrayList<NumericRating>(); for (int i = 0; i < folders.size(); i++) { if (i == folder - 1)//test data { testRatings.addAll(folders.get(i)); } else {//training data trainRatings.addAll(folders.get(i)); } } //create rating matrix HashMap<String, Integer> userIDIndexMapping = new HashMap<String, Integer>(); HashMap<String, Integer> itemIDIndexMapping = new HashMap<String, Integer>(); for (int i = 0; i < dataset.getUserIDs().size(); i++) { userIDIndexMapping.put(dataset.getUserIDs().get(i), i); } for (int i = 0; i < dataset.getItemIDs().size(); i++) { itemIDIndexMapping.put(dataset.getItemIDs().get(i), i); } RatingMatrix trainRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(), dataset.getItemIDs().size()); for (int i = 0; i < trainRatings.size(); i++) { trainRatingMatrix.set(userIDIndexMapping.get(trainRatings.get(i).getUserID()), itemIDIndexMapping.get(trainRatings.get(i).getItemID()), trainRatings.get(i).getValue()); } RatingMatrix testRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(), dataset.getItemIDs().size()); for (int i = 0; i < testRatings.size(); i++) { testRatingMatrix.set(userIDIndexMapping.get(testRatings.get(i).getUserID()), itemIDIndexMapping.get(testRatings.get(i).getItemID()), testRatings.get(i).getValue()); } System.out.println("Training: " + trainRatingMatrix.getTotalRatingNumber() + " vs Test: " + testRatingMatrix.getTotalRatingNumber()); logger.println("Initialize a recommendation model based on global average method."); GlobalAverage algo = new GlobalAverage(trainRatingMatrix); algo.setLogger(logger); algo.build(); algo.saveModel(".//localModels//" + config.getString("NAME")); logger.println("Save the model."); logger.flush(); System.out.println(trainRatings.size() + " vs. " + testRatings.size()); double RMSE = 0; double MAE = 0; int count = 0; for (int i = 0; i < testRatings.size(); i++) { NumericRating rating = testRatings.get(i); double prediction = algo.predict(rating.getUserID(), rating.getItemID()); if (Double.isNaN(prediction)) { System.out.println("no prediction"); continue; } MAE = MAE + Math.abs(rating.getValue() - prediction); RMSE = RMSE + Math.pow((rating.getValue() - prediction), 2); count++; } MAE = MAE / count; RMSE = Math.sqrt(RMSE / count); // System.out.println("MAE: " + MAE + " RMSE: " + RMSE); logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " MAE: " + MAE + " RMSE: " + RMSE); logger.flush(); totalMAE = totalMAE + MAE; totalRMSE = totalRMSE + RMSE; } System.out.println("MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F); logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Final results: MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F); logger.flush(); logger.close(); //MAE: 0.9338607074893257 RMSE: 1.1170971131112037 (MovieLens1M) //MAE: 0.9446876509332618 RMSE: 1.1256517870920375 (MovieLens100K) }
From source file:com.github.besherman.fingerprint.Main.java
public static void main(String[] args) throws Exception { Options options = new Options(); options.addOption(OptionBuilder.withDescription("creates a fingerprint file for the directory").hasArg() .withArgName("dir").create('c')); options.addOption(OptionBuilder.withDescription("shows a diff between two fingerprint files") .withArgName("left-file> <right-file").hasArgs(2).create('d')); options.addOption(OptionBuilder.withDescription("shows a diff between a fingerprint and a directory") .withArgName("fingerprint> <dir").hasArgs(2).create('t')); options.addOption(OptionBuilder.withDescription("shows duplicates in a directory").withArgName("dir") .hasArgs(1).create('u')); options.addOption(//from w w w.jav a 2 s . c om OptionBuilder.withDescription("output to file").withArgName("output-file").hasArg().create('o')); PosixParser parser = new PosixParser(); CommandLine cmd = null; try { cmd = parser.parse(options, args); } catch (ParseException ex) { System.out.println(ex.getMessage()); System.out.println(""); HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("fingerprint-dir", options, true); System.exit(7); } if (cmd.hasOption('c')) { Path root = Paths.get(cmd.getOptionValue('c')); if (!Files.isDirectory(root)) { System.err.println("root is not a directory"); System.exit(7); } OutputStream out = System.out; if (cmd.hasOption('o')) { Path p = Paths.get(cmd.getOptionValue('o')); out = Files.newOutputStream(p); } Fingerprint fp = new Fingerprint(root, ""); fp.write(out); } else if (cmd.hasOption('d')) { String[] ar = cmd.getOptionValues('d'); Path leftFingerprintFile = Paths.get(ar[0]), rightFingerprintFile = Paths.get(ar[1]); if (!Files.isRegularFile(leftFingerprintFile)) { System.out.printf("%s is not a file%n", leftFingerprintFile); System.exit(7); } if (!Files.isRegularFile(rightFingerprintFile)) { System.out.printf("%s is not a file%n", rightFingerprintFile); System.exit(7); } Fingerprint left, right; try (InputStream input = Files.newInputStream(leftFingerprintFile)) { left = new Fingerprint(input); } try (InputStream input = Files.newInputStream(rightFingerprintFile)) { right = new Fingerprint(input); } Diff diff = new Diff(left, right); // TODO: if we have redirected output diff.print(System.out); } else if (cmd.hasOption('t')) { throw new RuntimeException("Not yet implemented"); } else if (cmd.hasOption('u')) { Path root = Paths.get(cmd.getOptionValue('u')); Fingerprint fp = new Fingerprint(root, ""); Map<String, FilePrint> map = new HashMap<>(); fp.stream().forEach(f -> { if (map.containsKey(f.getHash())) { System.out.println(" " + map.get(f.getHash()).getPath()); System.out.println("= " + f.getPath()); System.out.println(""); } else { map.put(f.getHash(), f); } }); } else { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("fingd", options, true); } }
From source file:fr.inria.atlanmod.kyanos.benchmarks.KyanosGraphTraverser.java
public static void main(String[] args) { Options options = new Options(); Option inputOpt = OptionBuilder.create(IN); inputOpt.setArgName("INPUT"); inputOpt.setDescription("Input Kyanos resource directory"); inputOpt.setArgs(1);//from w w w .ja v a 2 s . c om inputOpt.setRequired(true); Option inClassOpt = OptionBuilder.create(EPACKAGE_CLASS); inClassOpt.setArgName("CLASS"); inClassOpt.setDescription("FQN of EPackage implementation class"); inClassOpt.setArgs(1); inClassOpt.setRequired(true); Option optFileOpt = OptionBuilder.create(OPTIONS_FILE); optFileOpt.setArgName("FILE"); optFileOpt.setDescription("Properties file holding the options to be used in the Kyanos Resource"); optFileOpt.setArgs(1); options.addOption(inputOpt); options.addOption(inClassOpt); options.addOption(optFileOpt); CommandLineParser parser = new PosixParser(); try { PersistenceBackendFactoryRegistry.getFactories().put(NeoBlueprintsURI.NEO_GRAPH_SCHEME, new BlueprintsPersistenceBackendFactory()); CommandLine commandLine = parser.parse(options, args); URI uri = NeoBlueprintsURI.createNeoGraphURI(new File(commandLine.getOptionValue(IN))); Class<?> inClazz = KyanosGraphTraverser.class.getClassLoader() .loadClass(commandLine.getOptionValue(EPACKAGE_CLASS)); inClazz.getMethod("init").invoke(null); ResourceSet resourceSet = new ResourceSetImpl(); resourceSet.getResourceFactoryRegistry().getProtocolToFactoryMap() .put(NeoBlueprintsURI.NEO_GRAPH_SCHEME, PersistentResourceFactory.eINSTANCE); Resource resource = resourceSet.createResource(uri); Map<String, Object> loadOpts = new HashMap<String, Object>(); if (commandLine.hasOption(OPTIONS_FILE)) { Properties properties = new Properties(); properties.load(new FileInputStream(new File(commandLine.getOptionValue(OPTIONS_FILE)))); for (final Entry<Object, Object> entry : properties.entrySet()) { loadOpts.put((String) entry.getKey(), (String) entry.getValue()); } } resource.load(loadOpts); LOG.log(Level.INFO, "Start counting"); int count = 0; long begin = System.currentTimeMillis(); for (Iterator<EObject> iterator = resource.getAllContents(); iterator.hasNext(); iterator .next(), count++) ; long end = System.currentTimeMillis(); LOG.log(Level.INFO, "End counting"); LOG.log(Level.INFO, MessageFormat.format("Resource {0} contains {1} elements", uri, count)); LOG.log(Level.INFO, MessageFormat.format("Time spent: {0}", MessageUtil.formatMillis(end - begin))); if (resource instanceof PersistentResourceImpl) { PersistentResourceImpl.shutdownWithoutUnload((PersistentResourceImpl) resource); } else { resource.unload(); } } catch (ParseException e) { MessageUtil.showError(e.toString()); MessageUtil.showError("Current arguments: " + Arrays.toString(args)); HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("java -jar <this-file.jar>", options, true); } catch (Throwable e) { MessageUtil.showError(e.toString()); } }
From source file:org.ala.harvester.MorphbankHarvester.java
/** * Main method for testing this particular Harvester * * @param args//from w w w . j a v a 2s . c o m */ public static void main(String[] args) throws Exception { String[] locations = { "classpath*:spring.xml" }; ApplicationContext context = new ClassPathXmlApplicationContext(locations); MorphbankHarvester h = new MorphbankHarvester(); Repository r = (Repository) context.getBean("repository"); h.setRepository(r); //set the connection params Map<String, String> connectParams = new HashMap<String, String>(); if (args.length == 1 && args[0].equals("Coral")) { h.setCoral(true); connectParams.put("endpoint", "http://morphbank-svc.ala.org.au/mb3/request?method=search&objecttype=Image&keywords=Coral+Reef+Research&limit=" + RESULT_LIMIT + "&firstResult=0&user=&group=&change=&lastDateChanged=&numChangeDays=1&id=&taxonName=&format=svc"); } else if (args.length == 1) { connectParams.put("endpoint", "http://morphbank-svc.ala.org.au/mb3/request?method=search&objecttype=Image&keywords=Australia&limit=" + RESULT_LIMIT + "&firstResult=0&user=&group=&change=&lastDateChanged=&numChangeDays=1&id=&taxonName=&format=svc"); try { h.setInit(Integer.valueOf(args[0])); } catch (NumberFormatException nfe) { System.out.println("Starting id is not a number!"); System.exit(1); } } else { connectParams.put("endpoint", "http://morphbank-svc.ala.org.au/mb3/request?method=search&objecttype=Image&keywords=Australia&limit=" + RESULT_LIMIT + "&firstResult=0&user=&group=&change=&lastDateChanged=&numChangeDays=1&id=&taxonName=&format=svc"); } h.setConnectionParams(connectParams); h.start(MORPHBANK_INFOSOURCE_ID); }
From source file:fr.inria.atlanmod.kyanos.benchmarks.CdoCreator.java
public static void main(String[] args) { Options options = new Options(); Option inputOpt = OptionBuilder.create(IN); inputOpt.setArgName("INPUT"); inputOpt.setDescription("Input file"); inputOpt.setArgs(1);//from w w w. j a v a 2 s . c o m inputOpt.setRequired(true); Option outputOpt = OptionBuilder.create(OUT); outputOpt.setArgName("OUTPUT"); outputOpt.setDescription("Output directory"); outputOpt.setArgs(1); outputOpt.setRequired(true); Option inClassOpt = OptionBuilder.create(EPACKAGE_CLASS); inClassOpt.setArgName("CLASS"); inClassOpt.setDescription("FQN of EPackage implementation class"); inClassOpt.setArgs(1); inClassOpt.setRequired(true); Option repoOpt = OptionBuilder.create(REPO_NAME); repoOpt.setArgName("REPO_NAME"); repoOpt.setDescription("CDO Repository name"); repoOpt.setArgs(1); repoOpt.setRequired(true); options.addOption(inputOpt); options.addOption(outputOpt); options.addOption(inClassOpt); options.addOption(repoOpt); CommandLineParser parser = new PosixParser(); try { CommandLine commandLine = parser.parse(options, args); URI sourceUri = URI.createFileURI(commandLine.getOptionValue(IN)); String outputDir = commandLine.getOptionValue(OUT); String repositoryName = commandLine.getOptionValue(REPO_NAME); Class<?> inClazz = CdoCreator.class.getClassLoader() .loadClass(commandLine.getOptionValue(EPACKAGE_CLASS)); inClazz.getMethod("init").invoke(null); ResourceSet resourceSet = new ResourceSetImpl(); resourceSet.getResourceFactoryRegistry().getExtensionToFactoryMap().put("xmi", new XMIResourceFactoryImpl()); resourceSet.getResourceFactoryRegistry().getExtensionToFactoryMap().put("zxmi", new XMIResourceFactoryImpl()); Resource sourceResource = resourceSet.createResource(sourceUri); Map<String, Object> loadOpts = new HashMap<String, Object>(); if ("zxmi".equals(sourceUri.fileExtension())) { loadOpts.put(XMIResource.OPTION_ZIP, Boolean.TRUE); } Runtime.getRuntime().gc(); long initialUsedMemory = Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory(); LOG.log(Level.INFO, MessageFormat.format("Used memory before loading: {0}", MessageUtil.byteCountToDisplaySize(initialUsedMemory))); LOG.log(Level.INFO, "Loading source resource"); sourceResource.load(loadOpts); LOG.log(Level.INFO, "Source resource loaded"); Runtime.getRuntime().gc(); long finalUsedMemory = Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory(); LOG.log(Level.INFO, MessageFormat.format("Used memory after loading: {0}", MessageUtil.byteCountToDisplaySize(finalUsedMemory))); LOG.log(Level.INFO, MessageFormat.format("Memory use increase: {0}", MessageUtil.byteCountToDisplaySize(finalUsedMemory - initialUsedMemory))); EmbeddedCDOServer server = new EmbeddedCDOServer(outputDir, repositoryName); try { server.run(); CDOSession session = server.openSession(); CDOTransaction transaction = session.openTransaction(); transaction.getRootResource().getContents().clear(); LOG.log(Level.INFO, "Start moving elements"); transaction.getRootResource().getContents().addAll(sourceResource.getContents()); LOG.log(Level.INFO, "End moving elements"); LOG.log(Level.INFO, "Commiting"); transaction.commit(); LOG.log(Level.INFO, "Commit done"); transaction.close(); session.close(); } finally { server.stop(); } } catch (ParseException e) { MessageUtil.showError(e.toString()); MessageUtil.showError("Current arguments: " + Arrays.toString(args)); HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("java -jar <this-file.jar>", options, true); } catch (Throwable e) { MessageUtil.showError(e.toString()); } }
From source file:de.tudarmstadt.ukp.csniper.resbuild.EvaluationItemFixer.java
public static void main(String[] args) { connect(HOST, DATABASE, USER, PASSWORD); Map<Integer, String> items = new HashMap<Integer, String>(); Map<Integer, String> failed = new HashMap<Integer, String>(); // fetch coveredTexts of dubious items and clean it PreparedStatement select = null; try {/*from w w w . j a v a2 s .c o m*/ StringBuilder selectQuery = new StringBuilder(); selectQuery.append("SELECT * FROM EvaluationItem "); selectQuery.append("WHERE LOCATE(coveredText, ' ') > 0 "); selectQuery.append("OR LOCATE('" + LRB + "', coveredText) > 0 "); selectQuery.append("OR LOCATE('" + RRB + "', coveredText) > 0 "); selectQuery.append("OR LEFT(coveredText, 1) = ' ' "); selectQuery.append("OR RIGHT(coveredText, 1) = ' ' "); select = connection.prepareStatement(selectQuery.toString()); log.info("Running query [" + selectQuery.toString() + "]."); ResultSet rs = select.executeQuery(); while (rs.next()) { int id = rs.getInt("id"); String coveredText = rs.getString("coveredText"); try { // special handling of double whitespace: in this case, re-fetch the text if (coveredText.contains(" ")) { coveredText = retrieveCoveredText(rs.getString("collectionId"), rs.getString("documentId"), rs.getInt("beginOffset"), rs.getInt("endOffset")); } // replace bracket placeholders and trim the text coveredText = StringUtils.replace(coveredText, LRB, "("); coveredText = StringUtils.replace(coveredText, RRB, ")"); coveredText = coveredText.trim(); items.put(id, coveredText); } catch (IllegalArgumentException e) { failed.put(id, e.getMessage()); } } } catch (SQLException e) { log.error("Exception while selecting: " + e.getMessage()); } finally { closeQuietly(select); } // write logs BufferedWriter bwf = null; BufferedWriter bws = null; try { bwf = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(new File(LOG_FAILED)), "UTF-8")); for (Entry<Integer, String> e : failed.entrySet()) { bwf.write(e.getKey() + " - " + e.getValue() + "\n"); } bws = new BufferedWriter( new OutputStreamWriter(new FileOutputStream(new File(LOG_SUCCESSFUL)), "UTF-8")); for (Entry<Integer, String> e : items.entrySet()) { bws.write(e.getKey() + " - " + e.getValue() + "\n"); } } catch (IOException e) { log.error("Got an IOException while writing the log files."); } finally { IOUtils.closeQuietly(bwf); IOUtils.closeQuietly(bws); } log.info("Texts for [" + items.size() + "] items need to be cleaned up."); // update the dubious items with the cleaned coveredText PreparedStatement update = null; try { String updateQuery = "UPDATE EvaluationItem SET coveredText = ? WHERE id = ?"; update = connection.prepareStatement(updateQuery); int i = 0; for (Entry<Integer, String> e : items.entrySet()) { int id = e.getKey(); String coveredText = e.getValue(); // update item in database update.setString(1, coveredText); update.setInt(2, id); update.executeUpdate(); log.debug("Updating " + id + " with [" + coveredText + "]"); // show percentage of updated items i++; int part = (int) Math.ceil((double) items.size() / 100); if (i % part == 0) { log.info(i / part + "% finished (" + i + "/" + items.size() + ")."); } } } catch (SQLException e) { log.error("Exception while updating: " + e.getMessage()); } finally { closeQuietly(update); } closeQuietly(connection); }
From source file:ch.epfl.lsir.xin.test.UserAverageTest.java
/** * @param args/*w ww .j a va 2 s .c om*/ */ public static void main(String[] args) throws Exception { // TODO Auto-generated method stub PrintWriter logger = new PrintWriter(".//results//UserAverage"); PropertiesConfiguration config = new PropertiesConfiguration(); config.setFile(new File(".//conf//UserAverage.properties")); try { config.load(); } catch (ConfigurationException e) { // TODO Auto-generated catch block e.printStackTrace(); } logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Read rating data..."); DataLoaderFile loader = new DataLoaderFile(".//data//MoveLens100k.txt"); loader.readSimple(); DataSetNumeric dataset = loader.getDataset(); System.out.println("Number of ratings: " + dataset.getRatings().size() + " Number of users: " + dataset.getUserIDs().size() + " Number of items: " + dataset.getItemIDs().size()); logger.println("Number of ratings: " + dataset.getRatings().size() + " Number of users: " + dataset.getUserIDs().size() + " Number of items: " + dataset.getItemIDs().size()); logger.flush(); double totalMAE = 0; double totalRMSE = 0; int F = 5; logger.println(F + "- folder cross validation."); ArrayList<ArrayList<NumericRating>> folders = new ArrayList<ArrayList<NumericRating>>(); for (int i = 0; i < F; i++) { folders.add(new ArrayList<NumericRating>()); } while (dataset.getRatings().size() > 0) { int index = new Random().nextInt(dataset.getRatings().size()); int r = new Random().nextInt(F); folders.get(r).add(dataset.getRatings().get(index)); dataset.getRatings().remove(index); } for (int folder = 1; folder <= F; folder++) { logger.println("Folder: " + folder); System.out.println("Folder: " + folder); ArrayList<NumericRating> trainRatings = new ArrayList<NumericRating>(); ArrayList<NumericRating> testRatings = new ArrayList<NumericRating>(); for (int i = 0; i < folders.size(); i++) { if (i == folder - 1)//test data { testRatings.addAll(folders.get(i)); } else {//training data trainRatings.addAll(folders.get(i)); } } //create rating matrix HashMap<String, Integer> userIDIndexMapping = new HashMap<String, Integer>(); HashMap<String, Integer> itemIDIndexMapping = new HashMap<String, Integer>(); for (int i = 0; i < dataset.getUserIDs().size(); i++) { userIDIndexMapping.put(dataset.getUserIDs().get(i), i); } for (int i = 0; i < dataset.getItemIDs().size(); i++) { itemIDIndexMapping.put(dataset.getItemIDs().get(i), i); } RatingMatrix trainRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(), dataset.getItemIDs().size()); for (int i = 0; i < trainRatings.size(); i++) { trainRatingMatrix.set(userIDIndexMapping.get(trainRatings.get(i).getUserID()), itemIDIndexMapping.get(trainRatings.get(i).getItemID()), trainRatings.get(i).getValue()); } trainRatingMatrix.calculateGlobalAverage(); RatingMatrix testRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(), dataset.getItemIDs().size()); for (int i = 0; i < testRatings.size(); i++) { testRatingMatrix.set(userIDIndexMapping.get(testRatings.get(i).getUserID()), itemIDIndexMapping.get(testRatings.get(i).getItemID()), testRatings.get(i).getValue()); } System.out.println("Training: " + trainRatingMatrix.getTotalRatingNumber() + " vs Test: " + testRatingMatrix.getTotalRatingNumber()); logger.println("Initialize a recommendation model based on user average method."); UserAverage algo = new UserAverage(trainRatingMatrix); algo.setLogger(logger); algo.build(); algo.saveModel(".//localModels//" + config.getString("NAME")); logger.println("Save the model."); System.out.println(trainRatings.size() + " vs. " + testRatings.size()); double RMSE = 0; double MAE = 0; int count = 0; for (int i = 0; i < testRatings.size(); i++) { NumericRating rating = testRatings.get(i); double prediction = algo.predict(userIDIndexMapping.get(rating.getUserID()), itemIDIndexMapping.get(rating.getItemID())); if (Double.isNaN(prediction)) { System.out.println("no prediction"); continue; } MAE = MAE + Math.abs(rating.getValue() - prediction); RMSE = RMSE + Math.pow((rating.getValue() - prediction), 2); count++; } MAE = MAE / count; RMSE = Math.sqrt(RMSE / count); logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " MAE: " + MAE + " RMSE: " + RMSE); logger.flush(); totalMAE = totalMAE + MAE; totalRMSE = totalRMSE + RMSE; } System.out.println("MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F); logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Final results: MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F); logger.flush(); logger.close(); //MAE: 0.8353035962363073 RMSE: 1.0422971886952053 (MovieLens 100k) }