List of usage examples for java.util ArrayList add
public boolean add(E e)
From source file:gov.llnl.lc.infiniband.opensm.plugin.gui.bargraph.AnimatedBarGraph.java
/** * Starting point for the demonstration application. * //from w ww .j av a 2 s . c om * @param args * ignored. */ public static void main(String[] args) { // take three arguments // // arg 1 is the filename // arg 2 is the portcounter // arg 3, if it exists, is the node guid (if none supplied, assume entire fabric) // String fileName = "/home/meier3/.smt/vrelic.hst"; String fileName = "/home/meier3/.smt/cabHist.his"; PortCounterName pcName = PortCounterName.xmit_data; // PortCounterName pcName = PortCounterName.rcv_data; // PortCounterName pcName = PortCounterName.symbol_err_cnt; // PortCounterName pcName = PortCounterName.rcv_err; // PortCounterName pcName = PortCounterName.link_err_recover; // String fileName = "/home/meier3/.smt/BigFabricDelta.cache"; // String fileName = "/home/meier3/.smt/DayFabricDeltaCollection.cache"; int mSecs = 1500; boolean cycle = true; // IB_Guid swGuid = new IB_Guid("66a00ec003003"); // hype // IB_Guid swGuid = new IB_Guid("0002:c902:0048:b718"); // vrelic vulcan leaf switch IB_Guid swGuid = new IB_Guid("0006:6a00:e300:4414"); // cab 0006:6a00:e300:4414 // /home/meier3/omsRepo/vrelic/vrelic627-2.hst rcv_data 0002:c902:0048:b718 // /home/meier3/.smt/cabHist.his rcv_data 0006:6a00:e300:4414 if ((args.length > 0) && (args[0].length() > 1)) fileName = args[0]; // if((args.length > 1) && (args[1].length() > 1)) // { // PortCounterName pc = PortCounterName.getByName(args[1]); // if(pc != null) // pcName = pc; // } // int numBins = 15; boolean excludeZero = true; BarGraphDataSeries dataSeries = null; if ((args.length > 1) && (args[1].length() > 1)) { // convert the argument to guid swGuid = new IB_Guid(args[1]); // vrelic vulcan leaf switch // just the ports of a single switch (pc rate vs port num) ArrayList<PortCounterName> counterNames = new ArrayList<PortCounterName>(); counterNames.add(PortCounterName.xmit_data); counterNames.add(PortCounterName.rcv_data); counterNames.add(PortCounterName.rcv_pkts); counterNames.add(PortCounterName.xmit_pkts); dataSeries = OMS_DataSetFactory.getSwitchPortCounterSeries(fileName, swGuid, counterNames); } else { // all ports in the system (num ports vs pc rate) <= y, x ArrayList<PortCounterName> counterNames = new ArrayList<PortCounterName>(); counterNames.add(PortCounterName.xmit_data); counterNames.add(PortCounterName.rcv_data); counterNames.add(PortCounterName.rcv_pkts); counterNames.add(PortCounterName.xmit_pkts); dataSeries = OMS_DataSetFactory.getPortCounterChangeSeries(fileName, counterNames, numBins, excludeZero); } //set up the basic bar graph if ((dataSeries != null) && dataSeries.isValid()) { AnimatedBarGraph barGraph = new AnimatedBarGraph(dataSeries, mSecs, cycle); barGraph.pack(); RefineryUtilities.centerFrameOnScreen(barGraph); barGraph.setVisible(true); } else System.err.println("Could not fully construct the data series for the annimated bar graph"); }
From source file:contractEditor.contractClients.java
public static void main(String[] args) { JSONObject obj = new JSONObject(); obj.put("name", "clientTemplate"); obj.put("context", "VM-deployment"); //obj.put("Context", new Integer); HashMap serviceRequirement = new HashMap(); HashMap serviceDescription = new HashMap(); serviceRequirement.put("VM1_volume", "18_GB"); serviceDescription.put("VM1_purpose", "dev"); serviceDescription.put("VM1_data", "private"); serviceDescription.put("VM1_application", "internal"); serviceRequirement.put("VM2_volume", "20_GB"); serviceDescription.put("VM2_purpose", "prod"); serviceDescription.put("VM2_data", "public"); serviceDescription.put("VM2_application", "business"); serviceRequirement.put("VM3_volume", "30_GB"); serviceDescription.put("VM3_purpose", "test"); serviceDescription.put("VM3_data", "public"); serviceDescription.put("VM3_application", "business"); serviceRequirement.put("VM4_volume", "20_GB"); serviceDescription.put("VM4_purpose", "prod"); serviceDescription.put("VM4_data", "public"); serviceDescription.put("VM4_application", "business"); obj.put("serviceRequirement", serviceRequirement); obj.put("serviceDescription", serviceDescription); HashMap gauranteeTerm = new HashMap(); gauranteeTerm.put("VM1_availability", "more_97_percentage"); gauranteeTerm.put("VM2_availability", "more_99_percentage"); gauranteeTerm.put("VM3_availability", "more_95_percentage"); gauranteeTerm.put("VM4_availability", "more_99_percentage"); obj.put("gauranteeTerm", gauranteeTerm); //Constraint1 HashMap host_rule1 = new HashMap(); HashMap VM_rule1 = new HashMap(); host_rule1.put("certificate", "true"); VM_rule1.put("purpose", "dev"); ArrayList rule1 = new ArrayList(); rule1.add("permission"); rule1.add(host_rule1);/*from ww w . j a v a 2 s . c o m*/ rule1.add(VM_rule1); HashMap host_rule1_2 = new HashMap(); HashMap VM_rule1_2 = new HashMap(); host_rule1_2.put("certificate", "true"); VM_rule1_2.put("purpose", "prod"); ArrayList rule1_2 = new ArrayList(); rule1_2.add("permission"); rule1_2.add(host_rule1_2); rule1_2.add(VM_rule1_2); HashMap host_rule1_3 = new HashMap(); HashMap VM_rule1_3 = new HashMap(); host_rule1_3.put("certificate", "true"); VM_rule1_3.put("purpose", "test"); ArrayList rule1_3 = new ArrayList(); rule1_3.add("permission"); rule1_3.add(host_rule1_3); rule1_3.add(VM_rule1_3); HashMap host_rule2 = new HashMap(); HashMap VM_rule2 = new HashMap(); host_rule2.put("location", "France"); VM_rule2.put("ID", "VM2"); ArrayList rule2 = new ArrayList(); rule2.add("permission"); rule2.add(host_rule2); rule2.add(VM_rule2); HashMap host_rule2_1 = new HashMap(); HashMap VM_rule2_1 = new HashMap(); host_rule2_1.put("location", "UK"); VM_rule2_1.put("ID", "VM2"); ArrayList rule2_1 = new ArrayList(); rule2_1.add("permission"); rule2_1.add(host_rule2_1); rule2_1.add(VM_rule2_1); HashMap host_rule3 = new HashMap(); HashMap VM_rule3 = new HashMap(); host_rule3.put("location", "France"); VM_rule3.put("application", "business"); ArrayList rule3 = new ArrayList(); rule3.add("permission"); rule3.add(host_rule3); rule3.add(VM_rule3); HashMap host_rule3_1 = new HashMap(); HashMap VM_rule3_1 = new HashMap(); host_rule3_1.put("location", "UK"); VM_rule3_1.put("application", "business"); ArrayList rule3_1 = new ArrayList(); rule3_1.add("permission"); rule3_1.add(host_rule3_1); rule3_1.add(VM_rule3_1); HashMap VMSeperation_rule_1_1 = new HashMap(); HashMap VMSeperation_rule_1_2 = new HashMap(); VMSeperation_rule_1_1.put("ID", "VM1"); VMSeperation_rule_1_2.put("ID", "VM3"); ArrayList rule4 = new ArrayList(); rule4.add("separation"); rule4.add(VMSeperation_rule_1_1); rule4.add(VMSeperation_rule_1_2); ArrayList policyInConstraint1 = new ArrayList(); policyInConstraint1.add(rule1); policyInConstraint1.add(rule1_2); policyInConstraint1.add(rule1_3); policyInConstraint1.add(rule2); policyInConstraint1.add(rule2_1); policyInConstraint1.add(rule3); policyInConstraint1.add(rule3_1); policyInConstraint1.add(rule4); ArrayList creationConstraint1 = new ArrayList(); creationConstraint1.add("RP4"); creationConstraint1.add("true"); creationConstraint1.add("true"); creationConstraint1.add(policyInConstraint1); ArrayList totalConstraint = new ArrayList(); totalConstraint.add(creationConstraint1); obj.put("creationConstraint", totalConstraint); try { FileWriter file = new FileWriter("confClient" + File.separator + "test3.json"); file.write(obj.toJSONString()); file.flush(); file.close(); } catch (IOException e) { e.printStackTrace(); } System.out.print(obj); /* JSONParser parser = new JSONParser(); try { Object obj2 = parser.parse(new FileReader("test2.json")); JSONObject jsonObject = (JSONObject) obj2; HashMap serviceDescription2=(HashMap) jsonObject.get("serviceDescription"); method.printHashMap(serviceDescription2); HashMap gauranteeTerm2=(HashMap) jsonObject.get("gauranteeTerm"); method.printHashMap(gauranteeTerm2); ArrayList creationConstraint=(ArrayList) jsonObject.get("creationConstraint"); method.printArrayList(creationConstraint); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } catch (ParseException e) { e.printStackTrace(); } */ }
From source file:com.qpark.eip.core.ToString.java
public static void main(final String[] args) { X x = new X(); X y = new X(); ArrayList<X> list = new ArrayList<ToString.X>(); list.add(x); list.add(y);/*from www. j av a2 s . c o m*/ System.out.println(toString(x)); System.out.println(toString(new Z())); }
From source file:gov.noaa.pfel.coastwatch.pointdata.MakeErdJavaZip.java
/** * This runs MakeErdJava.zip in <contextdirectory> * (usually <tomcat>/webapps/cwexperimental). * * @param args is ignored/*from w w w . j a v a 2 s . c o m*/ */ public static void main(String args[]) throws Exception { String2.log("\n*** MakeErdJavaZip"); String errorInMethod = String2.ERROR + " while generating ErdJava.zip:\n"; //define directories String baseDir = SSR.getContextDirectory() + //with / separator and / at the end "WEB-INF/"; String classPath = baseDir + "classes/"; String coastWatchDir = classPath + "gov/noaa/pfel/coastwatch/"; //make the JavaDocs String coastWatchClass = "gov.noaa.pfel.coastwatch."; String commandLine0 = "C:\\Progra~1\\Java\\jdk1.8.0_144\\bin\\javadoc" + //" -source 1.4" + //use 1.4 for the DODS classes that use "enum" //2011-02-22 Bob Simons changed enum to en. " -sourcepath " + classPath + //root directory of the classes " -d "; //+ baseDir + "ConvertTableDoc" + //dir to hold results String commandLine2 = //space separated .java files " " + coastWatchDir + "pointdata/ConvertTable.java " + coastWatchDir + "pointdata/DigirHelper.java " + coastWatchDir + "pointdata/DigirIobisTDSP.java " + coastWatchDir + "pointdata/DigirObisTDSP.java " + coastWatchDir + "pointdata/Table.java " + coastWatchDir + "TimePeriods.java " + coastWatchDir + "ValidateDataSetProperties.java " + //recursively searched packages "-classpath " + //';' separated; //external packages are important here //baseDir + "lib/activation.jar;" + baseDir + "lib/commons-compress.jar;" + //baseDir + "lib/commons-discovery.jar;" + //baseDir + "lib/commons-codec-1.3.jar;" + //these 3 now in netcdfAll-latest //baseDir + "lib/commons-httpclient-3.0.1.jar;" + //baseDir + "lib/commons-logging-1.1.jar;" + baseDir + "lib/mail.jar;" + baseDir + "lib/slf4j.jar;" + baseDir + "lib/netcdfAll-latest.jar " + //space after last one "-subpackages " + //the packages to be doc'd // ':' separated //adding a package? add it to dirName below, too //"com.sshtools:org.apache.commons.logging:" + //this external package not very relevant "dods:" + //this external package is relevant "com.cohort.array:" + "com.cohort.util:" + coastWatchClass + "griddata:" + coastWatchClass + "netcheck:" + coastWatchClass + "util"; //generate javadocs once so it will be in zip file String tDir = baseDir + "docs/ErdJavaDoc"; //dir to hold results SSR.dosShell("del /s /q " + //delete (/s=recursive /q=quiet) previous results String2.replaceAll(tDir, "/", "\\"), 60); String checkNames[] = { "/index.html", "/com/cohort/array/DoubleArray.html", "/gov/noaa/pfel/coastwatch/pointdata/ConvertTable.html", "/gov/noaa/pfel/coastwatch/netcheck/NetCheck.html", "/gov/noaa/pfel/coastwatch/griddata/GenerateThreddsXml.html", "/gov/noaa/pfel/coastwatch/griddata/GridSaveAs.html" }; for (int i = 0; i < checkNames.length; i++) Test.ensureTrue(!File2.isFile(tDir + checkNames[i]), errorInMethod + tDir + checkNames[i] + " not deleted."); try { String2.log(String2.toNewlineString(SSR.dosShell(commandLine0 + tDir + commandLine2, 120).toArray())); } catch (Exception e) { String2.log(MustBe.throwable(errorInMethod + "(expected) [for zip]:", e)); } for (int i = 0; i < checkNames.length; i++) Test.ensureTrue(File2.isFile(tDir + checkNames[i]), errorInMethod + tDir + checkNames[i] + " not found."); //generate javadocs again for online use tDir = SSR.getContextDirectory() + //with / separator and / at the end "ErdJavaDoc"; //dir to hold results SSR.dosShell("del /s /q " + //delete (/s=recursive /q=quiet) previous results String2.replaceAll(tDir, "/", "\\"), 60); for (int i = 0; i < checkNames.length; i++) Test.ensureTrue(!File2.isFile(tDir + checkNames[i]), errorInMethod + tDir + checkNames[i] + " not deleted."); try { String2.log(String2.toNewlineString(SSR.dosShell(commandLine0 + tDir + commandLine2, 120).toArray())); } catch (Exception e) { String2.log(MustBe.throwable(errorInMethod + " (expected) [for online]:", e)); } for (int i = 0; i < checkNames.length; i++) Test.ensureTrue(File2.isFile(tDir + checkNames[i]), errorInMethod + tDir + checkNames[i] + " not found."); //make sure relevant files are compiled ConvertTable convertTable = new ConvertTable(); GenerateThreddsXml gtdsh = new GenerateThreddsXml(); GridSaveAs gridSaveAs = new GridSaveAs(); ValidateDataSetProperties validateDataSetProperties = new ValidateDataSetProperties(); DoubleCenterGrids doubleCenterGrids = new DoubleCenterGrids(); try { NetCheck netCheck = new NetCheck(baseDir + "DoesntExist.xml", true); } catch (Exception e) { //don't care if error. deployment may be on another computer //String2.log(MustBe.throwable( // "MakeNetCheckZip.main test constructors. Ignore this exception:\n", e)); } //I think that is what generates the .xml.log file: delete it File2.delete(baseDir + "DoesntExit.xml.log"); //delete the log created by DoubleCenterGrids.test File2.delete("c:/programs/_tomcat/webapps/cwexperimental/WEB-INF/DoubleCenterGrids.log"); //delete the test file from ConvertTable File2.delete("c:/programs/_tomcat/webapps/cwexperimental/WEB-INF/result.nc"); //accumulate the file names to be zipped ArrayList<String> dirNames = new ArrayList(); dirNames.add(baseDir + "ConvertTable.sh"); dirNames.add(baseDir + "ConvertTable.bat"); dirNames.add(baseDir + "DoubleCenterGrids.sh"); dirNames.add(baseDir + "GridSaveAs.sh"); dirNames.add(baseDir + "GridSaveAs.bat"); dirNames.add(baseDir + "GenerateOceanwatchThreddsXml.sh"); dirNames.add(baseDir + "GenerateOtterThreddsXml.sh"); dirNames.add(baseDir + "GenerateThredds1ThreddsXml.sh"); dirNames.add(baseDir + "GenerateThreddsXml.sh"); dirNames.add(baseDir + "incompleteMainCatalog.xml"); dirNames.add(baseDir + "iobis.m"); dirNames.add(baseDir + "NetCheck.sh"); dirNames.add(baseDir + "NetCheck.bat"); dirNames.add(baseDir + "NetCheck.xml"); dirNames.add(baseDir + "obis.m"); dirNames.add(baseDir + "QN2005193_2005193_ux10_westus.grd"); dirNames.add(baseDir + "ValidateDataSetProperties.sh"); dirNames.add(baseDir + "ValidateDataSetProperties.bat"); //dirNames.add(baseDir + "lib/activation.jar"); dirNames.add(baseDir + "lib/commons-compress.jar"); //dirNames.add(baseDir + "lib/commons-discovery.jar"); //dirNames.add(baseDir + "lib/commons-codec-1.3.jar"); //these 3 are now in netcdfAll-latest //dirNames.add(baseDir + "lib/commons-httpclient-3.0.1.jar"); //dirNames.add(baseDir + "lib/commons-logging-1.1.jar"); dirNames.add(baseDir + "lib/mail.jar"); dirNames.add(baseDir + "lib/netcdfAll-latest.jar"); dirNames.add(baseDir + "lib/slf4j.jar"); String2.add(dirNames, RegexFilenameFilter.recursiveFullNameList(baseDir + "docs/ErdJavaDoc/", ".+", false)); //javadocs String2.add(dirNames, RegexFilenameFilter.recursiveFullNameList(baseDir + "classes/dods/", ".+", false)); String2.add(dirNames, RegexFilenameFilter.recursiveFullNameList(baseDir + "classes/com/sshtools/", ".+", false)); String2.add(dirNames, RegexFilenameFilter.recursiveFullNameList(baseDir + "classes/org/jdom/", ".+", false)); String2.add(dirNames, RegexFilenameFilter.fullNameList(baseDir + "classes/com/cohort/array/", ".+")); String2.add(dirNames, RegexFilenameFilter.fullNameList(baseDir + "classes/com/cohort/util/", ".+")); String2.add(dirNames, RegexFilenameFilter.fullNameList(coastWatchDir, "CWBrowser.properties")); String2.add(dirNames, RegexFilenameFilter.fullNameList(coastWatchDir, "BrowserDefault.properties")); String2.add(dirNames, RegexFilenameFilter.fullNameList(coastWatchDir, "DataSet.properties")); String2.add(dirNames, RegexFilenameFilter.fullNameList(coastWatchDir, "LICENSE\\.txt")); String2.add(dirNames, RegexFilenameFilter.fullNameList(coastWatchDir, "OneOf.+")); String2.add(dirNames, RegexFilenameFilter.fullNameList(coastWatchDir, "TimePeriods.+")); String2.add(dirNames, RegexFilenameFilter.fullNameList(coastWatchDir, "ValidateDataSetProperties.+")); String2.add(dirNames, RegexFilenameFilter.fullNameList(coastWatchDir + "griddata/", ".+")); String2.add(dirNames, RegexFilenameFilter.fullNameList(coastWatchDir + "hdf/", ".+")); String2.add(dirNames, RegexFilenameFilter.fullNameList(coastWatchDir + "netcheck/", ".+")); String2.add(dirNames, RegexFilenameFilter.fullNameList(coastWatchDir + "pointdata/", "ConvertTable.+")); String2.add(dirNames, RegexFilenameFilter.fullNameList(coastWatchDir + "pointdata/", "Digir.+")); String2.add(dirNames, RegexFilenameFilter.fullNameList(coastWatchDir + "pointdata/", "Table.+")); String2.add(dirNames, RegexFilenameFilter.fullNameList(coastWatchDir + "pointdata/", "LICENSE\\.txt")); String2.add(dirNames, RegexFilenameFilter.fullNameList(coastWatchDir + "util/", ".+")); //convert to sorted String array String dirNameArray[] = dirNames.toArray(new String[0]); Arrays.sort(dirNameArray); //String2.log(String2.toNewlineString(dirNameArray)); //make the zip file String zipName = SSR.getContextDirectory() + //with / separator and / at the end "ErdJava.zip"; String2.log("MakeErdJavaZip is making " + zipName + "."); File2.delete(zipName); SSR.zip(zipName, dirNameArray, 60, baseDir); String2.log("\nMakeErdJavaZip successfully finished making " + zipName + ".\nnFiles=" + dirNames.size()); }
From source file:PCC.java
/** * @param args the command line arguments * @throws java.io.IOException/*from ww w .j a v a 2 s . c o m*/ */ public static void main(String[] args) throws IOException { // TODO code application logic here PearsonsCorrelation corel = new PearsonsCorrelation(); PCC method = new PCC(); ArrayList<String> name = new ArrayList<>(); Multimap<String, String> genes = ArrayListMultimap.create(); BufferedWriter bw = new BufferedWriter(new FileWriter(args[1])); BufferedReader br = new BufferedReader(new FileReader(args[0])); String str; while ((str = br.readLine()) != null) { String[] a = str.split("\t"); name.add(a[0]); for (int i = 1; i < a.length; i++) { genes.put(a[0], a[i]); } } for (String key : genes.keySet()) { double[] first = new double[genes.get(key).size()]; int element1 = 0; for (String value : genes.get(key)) { double d = Double.parseDouble(value); first[element1] = d; element1++; } for (String key1 : genes.keySet()) { if (!key.equals(key1)) { double[] second = new double[genes.get(key1).size()]; int element2 = 0; for (String value : genes.get(key1)) { double d = Double.parseDouble(value); second[element2] = d; element2++; } double corrlation = corel.correlation(first, second); if (corrlation > 0.5) { bw.write(key + "\t" + key1 + "\t" + corrlation + "\t" + method.pvalue(corrlation, second.length) + "\n"); } } } } br.close(); bw.close(); }
From source file:de.prozesskraft.pkraft.Commitit.java
public static void main(String[] args) throws org.apache.commons.cli.ParseException, IOException { // try/*from w w w. ja va 2 s. c om*/ // { // if (args.length != 3) // { // System.out.println("Please specify processdefinition file (xml) and an outputfilename"); // } // // } // catch (ArrayIndexOutOfBoundsException e) // { // System.out.println("***ArrayIndexOutOfBoundsException: Please specify processdefinition.xml, openoffice_template.od*, newfile_for_processdefinitions.odt\n" + e.toString()); // } /*---------------------------- get options from ini-file ----------------------------*/ java.io.File inifile = new java.io.File( WhereAmI.getInstallDirectoryAbsolutePath(Commitit.class) + "/" + "../etc/pkraft-commitit.ini"); if (inifile.exists()) { try { ini = new Ini(inifile); } catch (InvalidFileFormatException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } catch (IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } } else { System.err.println("ini file does not exist: " + inifile.getAbsolutePath()); System.exit(1); } /*---------------------------- create boolean options ----------------------------*/ Option ohelp = new Option("help", "print this message"); /*---------------------------- create argument options ----------------------------*/ Option oinstance = OptionBuilder.withArgName("FILE").hasArg() .withDescription("[mandatory] process instance file") // .isRequired() .create("instance"); Option ostep = OptionBuilder.withArgName("STEPNAME").hasArg() .withDescription("[optional, default: root] process step to commit to") // .isRequired() .create("step"); Option ofile = OptionBuilder.withArgName("FILE").hasArg() .withDescription("[optional] this file will be committed as file. key will be set to 'default'") // .isRequired() .create("file"); Option okey = OptionBuilder.withArgName("KEY").hasArg() .withDescription( "[optional, default: default] this string will be considered as the key for the commit.") // .isRequired() .create("key"); Option ovariable = OptionBuilder.withArgName("VALUE").hasArg() .withDescription("[optional] this string will be committed as a variable.") // .isRequired() .create("variable"); /*---------------------------- create options object ----------------------------*/ Options options = new Options(); options.addOption(ohelp); options.addOption(oinstance); options.addOption(ostep); options.addOption(ofile); options.addOption(okey); options.addOption(ovariable); /*---------------------------- create the parser ----------------------------*/ CommandLineParser parser = new GnuParser(); try { // parse the command line arguments commandline = parser.parse(options, args); } catch (Exception exp) { // oops, something went wrong System.err.println("Parsing failed. Reason: " + exp.getMessage()); exiter(); } /*---------------------------- usage/help ----------------------------*/ if (commandline.hasOption("help")) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("commit", options); System.exit(0); } /*---------------------------- ueberpruefen ob eine schlechte kombination von parametern angegeben wurde ----------------------------*/ if (!(commandline.hasOption("instance"))) { System.out.println("option -instance is mandatory."); exiter(); } else if (!(commandline.hasOption("dir")) && !(commandline.hasOption("file")) && !(commandline.hasOption("varfile")) && !(commandline.hasOption("varname")) && !(commandline.hasOption("varvalue")) && !(commandline.hasOption("variable"))) { System.out.println( "at least one of these options needed. -dir -file -varfile -variable -varname -varvalue."); exiter(); } else if ((commandline.hasOption("varname") && !(commandline.hasOption("varvalue"))) || (!(commandline.hasOption("varname")) && commandline.hasOption("varvalue"))) { System.out.println("use options -varname and -varvalue only in combination with each other."); exiter(); } /*---------------------------- die lizenz ueberpruefen und ggf abbrechen ----------------------------*/ // check for valid license ArrayList<String> allPortAtHost = new ArrayList<String>(); allPortAtHost.add(ini.get("license-server", "license-server-1")); allPortAtHost.add(ini.get("license-server", "license-server-2")); allPortAtHost.add(ini.get("license-server", "license-server-3")); MyLicense lic = new MyLicense(allPortAtHost, "1", "user-edition", "0.1"); // lizenz-logging ausgeben for (String actLine : (ArrayList<String>) lic.getLog()) { System.err.println(actLine); } // abbruch, wenn lizenz nicht valide if (!lic.isValid()) { System.exit(1); } /*---------------------------- die eigentliche business logic ----------------------------*/ // setzen des steps String stepname = "root"; if (commandline.hasOption("step")) { stepname = commandline.getOptionValue("step"); } // setzen des key String key = "default"; if (commandline.hasOption("key")) { key = commandline.getOptionValue("key"); } Process p1 = new Process(); p1.setInfilebinary(commandline.getOptionValue("instance")); System.out.println("info: reading process instance " + commandline.getOptionValue("instance")); Process p2 = p1.readBinary(); p2.setOutfilebinary(commandline.getOptionValue("instance")); // step ueber den namen heraussuchen Step step = p2.getStep(stepname); if (step == null) { System.err.println("step not found: " + stepname); exiter(); } // den Commit 'by-process-commitit' heraussuchen oder einen neuen Commit dieses Namens erstellen Commit commit = step.getCommit("by-hand"); if (commit == null) { commit = new Commit(step); commit.setName("by-process-commitit"); } // committen if (commandline.hasOption("file")) { File file = new File(); file.setKey(key); file.setGlob(commandline.getOptionValue("file")); commit.addFile(file); commit.doIt(); } if (commandline.hasOption("variable")) { Variable variable = new Variable(); variable.setKey(key); variable.setValue(commandline.getOptionValue("variable")); commit.addVariable(variable); commit.doIt(); } p2.writeBinary(); System.out.println("info: writing process instance " + p2.getOutfilebinary()); }
From source file:edu.oregonstate.eecs.mcplan.ml.LinearDiscriminantAnalysis.java
public static void main(final String[] args) throws FileNotFoundException { final File root = new File("test/LinearDiscriminantAnalysis"); root.mkdirs();/*from w ww . ja v a2s.c o m*/ final int seed = 42; final int N = 30; final double shrinkage = 1e-6; final RandomGenerator rng = new MersenneTwister(seed); final Pair<ArrayList<double[]>, int[]> dataset = Datasets.twoVerticalGaussian2D(rng, N); final ArrayList<double[]> data = dataset.first; final int[] label = dataset.second; final int Nlabels = 2; final int[] shuffle_idx = Fn.linspace(0, Nlabels * N); Fn.shuffle(rng, shuffle_idx); final ArrayList<double[]> shuffled = new ArrayList<double[]>(); final int[] shuffled_label = new int[label.length]; for (int i = 0; i < data.size(); ++i) { shuffled.add(Fn.copy(data.get(shuffle_idx[i]))); shuffled_label[i] = label[shuffle_idx[i]]; } final Csv.Writer data_writer = new Csv.Writer(new PrintStream(new File(root, "data.csv"))); for (final double[] v : data) { for (int i = 0; i < v.length; ++i) { data_writer.cell(v[i]); } data_writer.newline(); } data_writer.close(); System.out.println("[Training]"); // final KernelPrincipalComponentsAnalysis<RealVector> kpca // = new KernelPrincipalComponentsAnalysis<RealVector>( shuffled, new RadialBasisFunctionKernel( 0.5 ), 1e-6 ); final LinearDiscriminantAnalysis lda = new LinearDiscriminantAnalysis(shuffled, shuffled_label, Nlabels, shrinkage); System.out.println("[Finished]"); for (final RealVector ev : lda.eigenvectors) { System.out.println(ev); } System.out.println("Transformed data:"); final LinearDiscriminantAnalysis.Transformer transformer = lda.makeTransformer(); final Csv.Writer transformed_writer = new Csv.Writer(new PrintStream(new File(root, "transformed.csv"))); for (final double[] u : data) { final RealVector uvec = new ArrayRealVector(u); System.out.println(uvec); final RealVector v = transformer.transform(uvec); System.out.println("-> " + v); for (int i = 0; i < v.getDimension(); ++i) { transformed_writer.cell(v.getEntry(i)); } transformed_writer.newline(); } transformed_writer.close(); }
From source file:com.dtolabs.rundeck.ec2.NodeGenerator.java
public static void main(final String[] args) throws IOException, GeneratorException { File outfile = null;/*from w w w . j a v a 2 s.c om*/ //load generator mapping if (args.length < 2) { System.err.println( "usage: <credentials.properties> <endpoint> [mapping.properties] [outfile] [query parameters, \"a=b\" ...]"); System.err.println( "\t optional arguments can be replaced by \"-\" to use the default, and then query parameters appended"); System.exit(2); } final InputStream stream = new FileInputStream(args[0]); final String endPoint = args[1]; final AWSCredentials credentials = new PropertiesCredentials(stream); Properties mapping = new Properties(); if (args.length > 2 && !"-".equals(args[2])) { mapping.load(new FileInputStream(args[2])); } else { mapping.load(NodeGenerator.class.getClassLoader().getResourceAsStream("simplemapping.properties")); } final ResourceXMLGenerator gen; if (args.length > 3 && !"-".equals(args[3])) { outfile = new File(args[3]); gen = new ResourceXMLGenerator(outfile); } else { //use stdout gen = new ResourceXMLGenerator(System.out); } ArrayList<String> params = new ArrayList<String>(); if (args.length > 4) { for (int i = 4; i < args.length; i++) { params.add(args[i]); } } Set<Instance> instances = performQuery(credentials, endPoint, params); for (final Instance inst : instances) { final INodeEntry iNodeEntry = instanceToNode(inst, mapping); if (null != iNodeEntry) { gen.addNode(iNodeEntry); } } gen.generate(); // if (null != outfile) { System.out.println("XML Stored: " + outfile.getAbsolutePath()); } }
From source file:no.uio.medicine.virsurveillance.charts.BoxAndWhiskerChart_AWT.java
public static void main(final String[] args) { //Log.getInstance().addTarget(new PrintStreamLogTarget(System.out)); ArrayList<ArrayList<ArrayList<Float>>> dataPoints = new ArrayList<>(); ArrayList<String> titles = new ArrayList<>(); ArrayList<ArrayList<String>> categories = new ArrayList<>(); for (int i = 0; i < 2; i++) { ArrayList<ArrayList<Float>> serie = new ArrayList<>(); titles.add("Serie " + i); ArrayList<String> categoriesPerSerie = new ArrayList<>(); int max = i + 2; for (int j = 0; j < max; j++) { ArrayList<Float> points = new ArrayList<>(); for (int k = 0; k < 50; k++) { points.add((float) (i * 10 + Math.random() * 50)); }/*from w w w . j a v a 2 s . co m*/ serie.add(points); categoriesPerSerie.add("Categorie" + j); } dataPoints.add(serie); categories.add(categoriesPerSerie); } final BoxAndWhiskerChart_AWT demo = new BoxAndWhiskerChart_AWT("A", "B", "C", "D", dataPoints, categories, titles); final BoxAndWhiskerChart_AWT demo2 = new BoxAndWhiskerChart_AWT("A2", "B2", "C2", "D2", dataPoints.get(0), categories.get(0), titles.get(0)); demo.updateChartData(); demo2.updateChartData(); }
From source file:ch.epfl.lsir.xin.test.SVDPPTest.java
/** * @param args//w w w . j av a 2 s.c o m */ public static void main(String[] args) throws Exception { // TODO Auto-generated method stub PrintWriter logger = new PrintWriter(".//results//SVDPP"); PropertiesConfiguration config = new PropertiesConfiguration(); config.setFile(new File("conf//SVDPlusPlus.properties")); try { config.load(); } catch (ConfigurationException e) { // TODO Auto-generated catch block e.printStackTrace(); } logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Read rating data..."); logger.flush(); DataLoaderFile loader = new DataLoaderFile(".//data//MoveLens100k.txt"); loader.readSimple(); DataSetNumeric dataset = loader.getDataset(); System.out.println("Number of ratings: " + dataset.getRatings().size() + " Number of users: " + dataset.getUserIDs().size() + " Number of items: " + dataset.getItemIDs().size()); logger.println("Number of ratings: " + dataset.getRatings().size() + ", Number of users: " + dataset.getUserIDs().size() + ", Number of items: " + dataset.getItemIDs().size()); logger.flush(); double totalMAE = 0; double totalRMSE = 0; double totalPrecision = 0; double totalRecall = 0; double totalMAP = 0; double totalNDCG = 0; double totalMRR = 0; double totalAUC = 0; int F = 5; logger.println(F + "- folder cross validation."); logger.flush(); ArrayList<ArrayList<NumericRating>> folders = new ArrayList<ArrayList<NumericRating>>(); for (int i = 0; i < F; i++) { folders.add(new ArrayList<NumericRating>()); } while (dataset.getRatings().size() > 0) { int index = new Random().nextInt(dataset.getRatings().size()); int r = new Random().nextInt(F); folders.get(r).add(dataset.getRatings().get(index)); dataset.getRatings().remove(index); } for (int folder = 1; folder <= F; folder++) { System.out.println("Folder: " + folder); logger.println("Folder: " + folder); logger.flush(); ArrayList<NumericRating> trainRatings = new ArrayList<NumericRating>(); ArrayList<NumericRating> testRatings = new ArrayList<NumericRating>(); for (int i = 0; i < folders.size(); i++) { if (i == folder - 1)//test data { testRatings.addAll(folders.get(i)); } else {//training data trainRatings.addAll(folders.get(i)); } } //create rating matrix HashMap<String, Integer> userIDIndexMapping = new HashMap<String, Integer>(); HashMap<String, Integer> itemIDIndexMapping = new HashMap<String, Integer>(); for (int i = 0; i < dataset.getUserIDs().size(); i++) { userIDIndexMapping.put(dataset.getUserIDs().get(i), i); } for (int i = 0; i < dataset.getItemIDs().size(); i++) { itemIDIndexMapping.put(dataset.getItemIDs().get(i), i); } RatingMatrix trainRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(), dataset.getItemIDs().size()); for (int i = 0; i < trainRatings.size(); i++) { trainRatingMatrix.set(userIDIndexMapping.get(trainRatings.get(i).getUserID()), itemIDIndexMapping.get(trainRatings.get(i).getItemID()), trainRatings.get(i).getValue()); } RatingMatrix testRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(), dataset.getItemIDs().size()); for (int i = 0; i < testRatings.size(); i++) { if (testRatings.get(i).getValue() < 5) continue; testRatingMatrix.set(userIDIndexMapping.get(testRatings.get(i).getUserID()), itemIDIndexMapping.get(testRatings.get(i).getItemID()), testRatings.get(i).getValue()); } System.out.println("Training: " + trainRatingMatrix.getTotalRatingNumber() + " vs Test: " + testRatingMatrix.getTotalRatingNumber()); logger.println("Initialize a SVD++ recommendation model."); logger.flush(); SVDPlusPlus algo = new SVDPlusPlus(trainRatingMatrix, false, ".//localModels//" + config.getString("NAME")); algo.setLogger(logger); algo.build(); algo.saveModel(".//localModels//" + config.getString("NAME")); logger.println("Save the model."); logger.flush(); //rating prediction accuracy double RMSE = 0; double MAE = 0; double precision = 0; double recall = 0; double map = 0; double ndcg = 0; double mrr = 0; double auc = 0; int count = 0; for (int i = 0; i < testRatings.size(); i++) { NumericRating rating = testRatings.get(i); double prediction = algo.predict(userIDIndexMapping.get(rating.getUserID()), itemIDIndexMapping.get(rating.getItemID()), false); if (prediction > algo.getMaxRating()) prediction = algo.getMaxRating(); if (prediction < algo.getMinRating()) prediction = algo.getMinRating(); if (Double.isNaN(prediction)) { System.out.println("no prediction"); continue; } MAE = MAE + Math.abs(rating.getValue() - prediction); RMSE = RMSE + Math.pow((rating.getValue() - prediction), 2); count++; } MAE = MAE / count; RMSE = Math.sqrt(RMSE / count); totalMAE = totalMAE + MAE; totalRMSE = totalRMSE + RMSE; System.out.println("Folder --- MAE: " + MAE + " RMSE: " + RMSE); logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Folder --- MAE: " + MAE + " RMSE: " + RMSE); //ranking accuracy if (algo.getTopN() > 0) { HashMap<Integer, ArrayList<ResultUnit>> results = new HashMap<Integer, ArrayList<ResultUnit>>(); for (int i = 0; i < trainRatingMatrix.getRow(); i++) { ArrayList<ResultUnit> rec = algo.getRecommendationList(i); if (rec == null) continue; int total = testRatingMatrix.getUserRatingNumber(i); if (total == 0)//this user is ignored continue; results.put(i, rec); } RankResultGenerator generator = new RankResultGenerator(results, algo.getTopN(), testRatingMatrix); precision = generator.getPrecisionN(); totalPrecision = totalPrecision + precision; recall = generator.getRecallN(); totalRecall = totalRecall + recall; map = generator.getMAPN(); totalMAP = totalMAP + map; ndcg = generator.getNDCGN(); totalNDCG = totalNDCG + ndcg; mrr = generator.getMRRN(); totalMRR = totalMRR + mrr; auc = generator.getAUC(); totalAUC = totalAUC + auc; System.out.println("Folder --- precision: " + precision + " recall: " + recall + " map: " + map + " ndcg: " + ndcg + " mrr: " + mrr + " auc: " + auc); logger.println("Folder --- precision: " + precision + " recall: " + recall + " map: " + map + " ndcg: " + ndcg + " mrr: " + mrr + " auc: " + auc); } logger.flush(); } System.out.println("MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F); System.out.println("Precision@N: " + totalPrecision / F); System.out.println("Recall@N: " + totalRecall / F); System.out.println("MAP@N: " + totalMAP / F); System.out.println("MRR@N: " + totalMRR / F); System.out.println("NDCG@N: " + totalNDCG / F); System.out.println("AUC@N: " + totalAUC / F); logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + "\n" + "MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F + "\n" + "Precision@N: " + totalPrecision / F + "\n" + "Recall@N: " + totalRecall / F + "\n" + "MAP@N: " + totalMAP / F + "\n" + "MRR@N: " + totalMRR / F + "\n" + "NDCG@N: " + totalNDCG / F + "\n" + "AUC@N: " + totalAUC / F); logger.flush(); logger.close(); }