List of usage examples for java.util ArrayList add
public boolean add(E e)
From source file:gedi.lfc.quick.ShiroguchiCounter.java
public static void main(String[] args) throws IOException { String path = "/home/users/erhard/biostor/seq/ngade/shiroguchi_randombarcodes/data/"; MemoryIntervalTreeStorage<int[]> reads = new MemoryIntervalTreeStorage<int[]>(int[].class); String[] files = { "Shiroguchi_A_collapsed.bed", "Shiroguchi_B_collapsed.bed", "Shiroguchi_A_uncollapsed.bed", "Shiroguchi_B_uncollapsed.bed" }; for (int i = 0; i < 4; i++) { Iterator<String> it = new LineOrientedFile(path + files[i]).lineIterator(); while (it.hasNext()) { String[] f = StringUtils.split(it.next(), '\t'); Chromosome chr = Chromosome.obtain(f[0]); ArrayGenomicRegion region = new ArrayGenomicRegion(Integer.parseInt(f[1]), Integer.parseInt(f[2])); int c = Integer.parseInt(StringUtils.splitField(f[3], '|', 0)); int[] counts = reads.getData(chr, region); if (counts == null) reads.add(chr, region, counts = new int[4]); counts[i] += c;/*from w w w . j av a 2 s. c o m*/ } } HashMap<String, String> map = new HashMap<String, String>(); new LineOrientedFile(path + "U00096.2.genes.csv").lineIterator().forEachRemaining(s -> { String[] f = StringUtils.split(s, '\t'); map.put(f[0], f[7]); }); LineOrientedFile fragments = new LineOrientedFile("fragments.csv"); fragments.startWriting(); fragments.writef("Gene\tonlyA\tonlyB\tBoth\tLength\n"); LineOrientedFile bias = new LineOrientedFile("bias.csv"); bias.startWriting(); bias.writef("OriginalA\tBiasA\tOriginalB\tBiasB\n"); IntArrayList biasFactors = new IntArrayList(); ArrayList<GeneData> geneData = new ArrayList<GeneData>(); MemoryIntervalTreeStorage<Transcript> genes = new BiomartExonFileReader(path + "U00096.2.exons.csv", false) .readIntoMemoryTakeFirst(); for (ImmutableReferenceGenomicRegion<Transcript> g : genes.getReferenceGenomicRegions()) { ArrayList<ImmutableReferenceGenomicRegion<int[]>> frag = reads .getReferenceRegionsIntersecting(g.getReference().toStrandIndependent(), g.getRegion()); GeneData gd = new GeneData(); int l = g.getRegion().getTotalLength(); for (ImmutableReferenceGenomicRegion<int[]> r : frag) { if (r.getData()[0] == 0) gd.onlyB++; if (r.getData()[1] == 0) gd.onlyA++; if (r.getData()[0] == 0 && r.getData()[1] == 0) throw new RuntimeException(); bias.writef("%d\t%.0f\t%d\t%.0f\n", r.getData()[0], r.getData()[2] / (double) r.getData()[0], r.getData()[1], r.getData()[3] / (double) r.getData()[1]); if (r.getData()[0] > 0) { biasFactors.add(r.getData()[2] / r.getData()[0]); } if (r.getData()[1] > 0) { biasFactors.add(r.getData()[3] / r.getData()[1]); } } gd.both = frag.size() - gd.onlyA - gd.onlyB; fragments.writef("%s\t%d\t%d\t%d\t%d\n", map.get(g.getData().getTranscriptId()), gd.onlyA, gd.onlyB, gd.both, l); if (gd.onlyA + gd.onlyB + gd.both > 0) geneData.add(gd); } fragments.finishWriting(); bias.finishWriting(); double fc = 1.4; int rep = 5; int nDiff = 1000; int n = 10000; int N = 6000; double noise = 0.05; LineOrientedFile countMatrix = new LineOrientedFile("countMatrix.csv"); countMatrix.startWriting(); LineOrientedFile downCountMatrix = new LineOrientedFile("countMatrix_downsampled.csv"); downCountMatrix.startWriting(); RandomNumbers rnd = new RandomNumbers(); for (int i = 0; i < n; i++) { GeneData gd = geneData.get(rnd.getUnif(0, geneData.size())); // int N = gd.both==0?Integer.MAX_VALUE/2:(int) (gd.onlyA+gd.onlyB+gd.both+gd.onlyA*gd.onlyB/gd.both); double p1 = (gd.onlyA + gd.both) / (double) N; double p2 = i < nDiff ? p1 / fc : p1; ArrayList<ReadData> list = new ArrayList<ReadData>(); for (int r = 0; r < rep * 2; r++) { int k = rnd.getBinom(N, r < rep ? p1 : p2) + 1; int hit = N == -1 ? 0 : rnd.getBinom(k, list.size() / (double) N); rnd.shuffle(list); for (int x = 0; x < hit; x++) list.get(x).reads[r] = (int) rnd.getNormal(list.get(x).bias, list.get(x).bias * noise); for (int x = 0; x < k - hit; x++) list.add(new ReadData(biasFactors.getInt(rnd.getUnif(0, biasFactors.size())), rep * 2, r)); } int[] c = new int[rep * 2]; for (ReadData d : list) { for (int r = 0; r < c.length; r++) { c[r] += d.reads[r]; } } double[] down = new double[rep * 2]; for (ReadData d : list) { double max = ArrayUtils.max(d.reads); for (int r = 0; r < down.length; r++) { down[r] += d.reads[r] / max; } } countMatrix.writeLine(StringUtils.concat("\t", c)); downCountMatrix.writeLine(StringUtils.concat("\t", down)); } countMatrix.finishWriting(); downCountMatrix.finishWriting(); }
From source file:edu.usc.goffish.gofs.tools.GoFSFormat.java
public static void main(String[] args) throws IOException { if (args.length < REQUIRED_ARGS) { PrintUsageAndQuit(null);/* w w w . ja v a 2 s . c om*/ } if (args.length == 1 && args[0].equals("-help")) { PrintUsageAndQuit(null); } Path executableDirectory; try { executableDirectory = Paths .get(GoFSFormat.class.getProtectionDomain().getCodeSource().getLocation().toURI()).getParent(); } catch (URISyntaxException e) { throw new RuntimeException("Unexpected error retrieving executable location", e); } Path configPath = executableDirectory.resolve(DEFAULT_CONFIG).normalize(); boolean copyBinaries = false; // parse optional arguments int i = 0; OptArgLoop: for (i = 0; i < args.length - REQUIRED_ARGS; i++) { switch (args[i]) { case "-config": i++; try { configPath = Paths.get(args[i]); } catch (InvalidPathException e) { PrintUsageAndQuit("Config file - " + e.getMessage()); } break; case "-copyBinaries": copyBinaries = true; break; default: break OptArgLoop; } } if (args.length - i < REQUIRED_ARGS) { PrintUsageAndQuit(null); } // finished parsing args if (i < args.length) { PrintUsageAndQuit("Unrecognized argument \"" + args[i] + "\""); } // parse config System.out.println("Parsing config..."); PropertiesConfiguration config = new PropertiesConfiguration(); config.setDelimiterParsingDisabled(true); try { config.load(Files.newInputStream(configPath)); } catch (ConfigurationException e) { throw new IOException(e); } // retrieve data nodes ArrayList<URI> dataNodes; { String[] dataNodesArray = config.getStringArray(GOFS_DATANODES_KEY); if (dataNodesArray.length == 0) { throw new ConversionException("Config must contain key " + GOFS_DATANODES_KEY); } dataNodes = new ArrayList<>(dataNodesArray.length); if (dataNodesArray.length == 0) { throw new ConversionException("Config key " + GOFS_DATANODES_KEY + " has invalid format - must define at least one data node"); } try { for (String node : dataNodesArray) { URI dataNodeURI = new URI(node); if (!"file".equalsIgnoreCase(dataNodeURI.getScheme())) { throw new ConversionException("config key " + GOFS_DATANODES_KEY + " value \"" + dataNodeURI + "\" has invalid format - data node urls must have 'file' scheme"); } else if (dataNodeURI.getPath() == null || dataNodeURI.getPath().isEmpty()) { throw new ConversionException("config key " + GOFS_DATANODES_KEY + " value \"" + dataNodeURI + "\" has invalid format - data node urls must have an absolute path specified"); } // ensure uri ends with a slash, so we know it is a directory if (!dataNodeURI.getPath().endsWith("/")) { dataNodeURI = dataNodeURI.resolve(dataNodeURI.getPath() + "/"); } dataNodes.add(dataNodeURI); } } catch (URISyntaxException e) { throw new ConversionException( "Config key " + GOFS_DATANODES_KEY + " has invalid format - " + e.getMessage()); } } // validate serializer type Class<? extends ISliceSerializer> serializerType; { String serializerTypeName = config.getString(GOFS_SERIALIZER_KEY); if (serializerTypeName == null) { throw new ConversionException("Config must contain key " + GOFS_SERIALIZER_KEY); } try { serializerType = SliceSerializerProvider.loadSliceSerializerType(serializerTypeName); } catch (ReflectiveOperationException e) { throw new ConversionException( "Config key " + GOFS_SERIALIZER_KEY + " has invalid format - " + e.getMessage()); } } // retrieve name node IInternalNameNode nameNode; try { nameNode = NameNodeProvider.loadNameNodeFromConfig(config, GOFS_NAMENODE_TYPE_KEY, GOFS_NAMENODE_LOCATION_KEY); } catch (ReflectiveOperationException e) { throw new RuntimeException("Unable to load name node", e); } System.out.println("Contacting name node..."); // validate name node if (!nameNode.isAvailable()) { throw new IOException("Name node at " + nameNode.getURI() + " is not available"); } System.out.println("Contacting data nodes..."); // validate data nodes for (URI dataNode : dataNodes) { // only attempt ssh if host exists if (dataNode.getHost() != null) { try { SSHHelper.SSH(dataNode, "true"); } catch (IOException e) { throw new IOException("Data node at " + dataNode + " is not available", e); } } } // create temporary directory Path workingDir = Files.createTempDirectory("gofs_format"); try { // create deploy directory Path deployDirectory = Files.createDirectory(workingDir.resolve(DATANODE_DIR_NAME)); // create empty slice directory Files.createDirectory(deployDirectory.resolve(DataNode.DATANODE_SLICE_DIR)); // copy binaries if (copyBinaries) { System.out.println("Copying binaries..."); FileUtils.copyDirectory(executableDirectory.toFile(), deployDirectory.resolve(executableDirectory.getFileName()).toFile()); } // write config file Path dataNodeConfigFile = deployDirectory.resolve(DataNode.DATANODE_CONFIG); try { // create config for every data node and scp deploy folder into place for (URI dataNodeParent : dataNodes) { URI dataNode = dataNodeParent.resolve(DATANODE_DIR_NAME); PropertiesConfiguration datanode_config = new PropertiesConfiguration(); datanode_config.setDelimiterParsingDisabled(true); datanode_config.setProperty(DataNode.DATANODE_INSTALLED_KEY, true); datanode_config.setProperty(DataNode.DATANODE_NAMENODE_TYPE_KEY, config.getString(GOFS_NAMENODE_TYPE_KEY)); datanode_config.setProperty(DataNode.DATANODE_NAMENODE_LOCATION_KEY, config.getString(GOFS_NAMENODE_LOCATION_KEY)); datanode_config.setProperty(DataNode.DATANODE_LOCALHOSTURI_KEY, dataNode.toString()); try { datanode_config.save(Files.newOutputStream(dataNodeConfigFile)); } catch (ConfigurationException e) { throw new IOException(e); } System.out.println("Formatting data node " + dataNode.toString() + "..."); // scp everything into place on the data node SCPHelper.SCP(deployDirectory, dataNodeParent); // update name node nameNode.addDataNode(dataNode); } // update name node nameNode.setSerializer(serializerType); } catch (Exception e) { System.out.println( "ERROR: data node formatting interrupted - name node and data nodes are in an inconsistent state and require clean up"); throw e; } System.out.println("GoFS format complete"); } finally { FileUtils.deleteQuietly(workingDir.toFile()); } }
From source file:com.sebuilder.interpreter.SeInterpreter.java
public static void main(String[] args) { if (args.length == 0) { System.out.println(// www .j ava 2 s . c o m "Usage: [--driver=<drivername] [--driver.<configkey>=<configvalue>...] [--implicitlyWait=<ms>] [--pageLoadTimeout=<ms>] [--stepTypePackage=<package name>] <script path>..."); System.exit(0); } Log log = LogFactory.getFactory().getInstance(SeInterpreter.class); WebDriverFactory wdf = DEFAULT_DRIVER_FACTORY; ScriptFactory sf = new ScriptFactory(); StepTypeFactory stf = new StepTypeFactory(); sf.setStepTypeFactory(stf); TestRunFactory trf = new TestRunFactory(); sf.setTestRunFactory(trf); ArrayList<String> paths = new ArrayList<String>(); HashMap<String, String> driverConfig = new HashMap<String, String>(); for (String s : args) { if (s.startsWith("--")) { String[] kv = s.split("=", 2); if (kv.length < 2) { log.fatal("Driver configuration option \"" + s + "\" is not of the form \"--driver=<name>\" or \"--driver.<key>=<value\"."); System.exit(1); } if (s.startsWith("--implicitlyWait")) { trf.setImplicitlyWaitDriverTimeout(Integer.parseInt(kv[1])); } else if (s.startsWith("--pageLoadTimeout")) { trf.setPageLoadDriverTimeout(Integer.parseInt(kv[1])); } else if (s.startsWith("--stepTypePackage")) { stf.setPrimaryPackage(kv[1]); } else if (s.startsWith("--driver.")) { driverConfig.put(kv[0].substring("--driver.".length()), kv[1]); } else if (s.startsWith("--driver")) { try { wdf = (WebDriverFactory) Class .forName("com.sebuilder.interpreter.webdriverfactory." + kv[1]).newInstance(); } catch (ClassNotFoundException e) { log.fatal("Unknown WebDriverFactory: " + "com.sebuilder.interpreter.webdriverfactory." + kv[1], e); } catch (InstantiationException e) { log.fatal("Could not instantiate WebDriverFactory " + "com.sebuilder.interpreter.webdriverfactory." + kv[1], e); } catch (IllegalAccessException e) { log.fatal("Could not instantiate WebDriverFactory " + "com.sebuilder.interpreter.webdriverfactory." + kv[1], e); } } else { paths.add(s); } } else { paths.add(s); } } if (paths.isEmpty()) { log.info("Configuration successful but no paths to scripts specified. Exiting."); System.exit(0); } HashMap<String, String> cfg = new HashMap<String, String>(driverConfig); for (String path : paths) { try { TestRun lastRun = null; for (Script script : sf.parse(new File(path))) { for (Map<String, String> data : script.dataRows) { try { lastRun = script.testRunFactory.createTestRun(script, log, wdf, driverConfig, data, lastRun); if (lastRun.finish()) { log.info(script.name + " succeeded"); } else { log.info(script.name + " failed"); } } catch (Exception e) { log.info(script.name + " failed", e); } } } } catch (Exception e) { log.fatal("Run error.", e); System.exit(1); } } }
From source file:ServerStatus.java
License:asdf
/** * @param args the command line arguments *//*from w ww .j a va 2 s .c o m*/ public static void main(String[] args) throws InterruptedException, FileNotFoundException, IOException, ParseException { FileReader reader = null; ArrayList<BankInfo2> BankArray = new ArrayList<BankInfo2>(); reader = new FileReader(args[0]); JSONParser jp = new JSONParser(); JSONObject doc = (JSONObject) jp.parse(reader); JSONObject banks = (JSONObject) doc.get("banks"); //Set bankKeys = banks.keySet(); //Object [] bankNames = bankKeys.toArray(); Object[] bankNames = banks.keySet().toArray(); for (int i = 0; i < bankNames.length; i++) { //System.out.println(bankNames[i]); String bname = (String) bankNames[i]; BankInfo2 binfo = new BankInfo2(bname); JSONObject banki = (JSONObject) banks.get(bname); JSONArray chain = (JSONArray) banki.get("chain"); int chainLength = chain.size(); //System.out.println(chainLength); for (Object chain1 : chain) { JSONObject serv = (JSONObject) chain1; ServerInfo sinfo = new ServerInfo((String) serv.get("ip"), serv.get("port").toString(), serv.get("start_delay").toString(), serv.get("lifetime").toString(), serv.get("receive").toString(), serv.get("send").toString()); binfo.servers.add(sinfo); //System.out.println(serv.get("ip") + ":" + serv.get("port")); } BankArray.add(binfo); } //System.out.println("Done Processing Servers"); JSONArray clients = (JSONArray) doc.get("clients"); ArrayList<ClientInfo> clientsList = new ArrayList<ClientInfo>(); for (int i = 0; i < clients.size(); i++) { JSONObject client_i = (JSONObject) clients.get(i); //This is for hard coded requests in the json file //System.out.println(client_i); //System.out.println(client_i.getClass()); String typeOfClient = client_i.get("requests").getClass().toString(); //This is for a client that has hardCoded requests if (typeOfClient.equals("class org.json.simple.JSONArray")) { //System.out.println("JSONArray"); JSONArray requests = (JSONArray) client_i.get("requests"); ClientInfo c = new ClientInfo(client_i.get("reply_timeout").toString(), client_i.get("request_retries").toString(), client_i.get("resend_head").toString()); c.prob_failure = client_i.get("prob_failure").toString(); c.msg_send_delay = client_i.get("msg_delay").toString(); System.out.println( "Successfully added prob failure and msg_send " + c.prob_failure + "," + c.msg_send_delay); ArrayList<RequestInfo> req_list = new ArrayList<RequestInfo>(); for (int j = 0; j < requests.size(); j++) { JSONObject request_j = (JSONObject) requests.get(j); String req = request_j.get("request").toString(); String bank = request_j.get("" + "bank").toString(); String acc = request_j.get("account").toString(); String seq = request_j.get("seq_num").toString(); String amt = null; try { amt = request_j.get("amount").toString(); } catch (NullPointerException e) { //System.out.println("Amount not specified."); } RequestInfo r; if (amt == null) { r = new RequestInfo(req, bank, acc, seq); } else { r = new RequestInfo(req, bank, acc, amt, seq); } //RequestInfo r = new RequestInfo(request_j.get("request").toString(), request_j.get("bank").toString(), request_j.get("account").toString(), request_j.get("amount").toString()); req_list.add(r); } c.requests = req_list; c.PortNumber = 60000 + i; clientsList.add(c); //System.out.println(client_i); } //This is for Random client requests else if (typeOfClient.equals("class org.json.simple.JSONObject")) { JSONObject randomReq = (JSONObject) client_i.get("requests"); String seed = randomReq.get("seed").toString(); String num_requests = randomReq.get("num_requests").toString(); String prob_balance = randomReq.get("prob_balance").toString(); String prob_deposit = randomReq.get("prob_deposit").toString(); String prob_withdraw = randomReq.get("prob_withdrawal").toString(); String prob_transfer = randomReq.get("prob_transfer").toString(); //ClientInfo c = new ClientInfo(true, seed, num_requests, prob_balance, prob_deposit, prob_withdraw, prob_transfer); ClientInfo c = new ClientInfo(client_i.get("reply_timeout").toString(), client_i.get("request_retries").toString(), client_i.get("resend_head").toString(), seed, num_requests, prob_balance, prob_deposit, prob_withdraw, prob_transfer); c.PortNumber = 60000 + i; clientsList.add(c); } } //System.out.println(clients.size()); double lowerPercent = 0.0; double upperPercent = 1.0; double result; String bankChainInfoMaster = ""; for (int x = 0; x < BankArray.size(); x++) { BankInfo2 analyze = BankArray.get(x); String chain = analyze.bank_name + "#"; //analyze.servers for (int j = 0; j < analyze.servers.size(); j++) { if (analyze.servers.get(j).Start_delay.equals("0")) { if (j == 0) { chain += analyze.servers.get(j).Port; } else { chain += "#" + analyze.servers.get(j).Port; } } } if (x == 0) { bankChainInfoMaster += chain; } else { bankChainInfoMaster += "@" + chain; } } //System.out.println("CHAIN: "+ bankChainInfoMaster); String clientInfoMaster = ""; for (int x = 0; x < clientsList.size(); x++) { ClientInfo analyze = clientsList.get(x); if (x == 0) { clientInfoMaster += analyze.PortNumber; } else { clientInfoMaster += "#" + analyze.PortNumber; } } //System.out.println("Clients: "+ clientInfoMaster); //RUN MASTER HERE String MasterPort = "49999"; String masterExec = "java Master " + MasterPort + " " + clientInfoMaster + " " + bankChainInfoMaster; Process masterProcess = Runtime.getRuntime().exec(masterExec); System.out.println(masterExec); ArrayList<ServerInfoForClient> servInfoCli = new ArrayList<ServerInfoForClient>(); // List of all servers is saved so that we can wait for them to exit. ArrayList<Process> serverPros = new ArrayList<Process>(); //ArrayList<String> execServs = new ArrayList<String>(); for (int i = 0; i < BankArray.size(); i++) { BankInfo2 analyze = BankArray.get(i); //System.out.println(analyze.bank_name); //One server in the chain String execCmd = "java Server "; String hIP = "", hPort = "", tIP = "", tPort = "", bn = ""; bn = analyze.bank_name; boolean joinFlag = false; if (analyze.servers.size() == 2 && analyze.servers.get(1).Start_delay.equals("0")) { joinFlag = false; } else { joinFlag = true; } if (analyze.servers.size() == 1 && joinFlag == false) { //if(analyze.servers.size() == 1){ ServerInfo si = analyze.servers.get(0); execCmd += "HEAD_TAIL " + si.IP + ":" + si.Port; execCmd += " localhost:0 localhost:0 localhost:" + MasterPort + " " + si.Start_delay + " " + si.Lifetime + " " + si.Receive + " " + si.Send + " " + analyze.bank_name; ; hIP = si.IP; hPort = si.Port; tIP = si.IP; tPort = si.Port; System.out.println(execCmd); Thread.sleep(500); Process pro = Runtime.getRuntime().exec(execCmd); serverPros.add(pro); //} } else if (analyze.servers.size() == 2 && joinFlag == true) { ServerInfo si = analyze.servers.get(0); execCmd += "HEAD_TAIL " + si.IP + ":" + si.Port; execCmd += " localhost:0 localhost:0 localhost:" + MasterPort + " " + si.Start_delay + " " + si.Lifetime + " " + si.Receive + " " + si.Send + " " + analyze.bank_name; ; hIP = si.IP; hPort = si.Port; tIP = si.IP; tPort = si.Port; System.out.println(execCmd); Thread.sleep(500); Process pro = Runtime.getRuntime().exec(execCmd); serverPros.add(pro); execCmd = "java Server "; ServerInfo si2 = analyze.servers.get(1); execCmd += "TAIL " + si2.IP + ":" + si2.Port; execCmd += " localhost:0 localhost:0 localhost:" + MasterPort + " " + si2.Start_delay + " " + si2.Lifetime + " " + si2.Receive + " " + si2.Send + " " + analyze.bank_name; ; hIP = si.IP; hPort = si.Port; tIP = si.IP; tPort = si.Port; System.out.println(execCmd); Thread.sleep(500); Process pro2 = Runtime.getRuntime().exec(execCmd); serverPros.add(pro2); } else { int icount = 0; for (int x = 0; x < analyze.servers.size(); x++) { ServerInfo si = analyze.servers.get(x); if (si.Start_delay.equals("0")) { icount++; } } System.out.println("icount:" + icount); for (int j = 0; j < icount; j++) { //for(int j = 0; j < analyze.servers.size(); j++){ execCmd = "java Server "; ServerInfo si = analyze.servers.get(j); //Head server if (j == 0) { ServerInfo siSucc = analyze.servers.get(j + 1); execCmd += "HEAD " + si.IP + ":" + si.Port + " "; execCmd += "localhost:0 " + siSucc.IP + ":" + siSucc.Port + " localhost:" + MasterPort; execCmd += " " + si.Start_delay + " " + si.Lifetime + " " + si.Receive + " " + si.Send + " " + analyze.bank_name; System.out.println(execCmd); hIP = si.IP; hPort = si.Port; } //Tail Server else if (j == (icount - 1)) {//analyze.servers.size() - 1) ){ ServerInfo siPred = analyze.servers.get(j - 1); execCmd += "TAIL " + si.IP + ":" + si.Port + " "; execCmd += siPred.IP + ":" + siPred.Port + " localhost:0 localhost:" + MasterPort; execCmd += " " + si.Start_delay + " " + si.Lifetime + " " + si.Receive + " " + si.Send + " " + analyze.bank_name; tIP = si.IP; tPort = si.Port; System.out.println(execCmd); } //Middle Server else { ServerInfo siSucc = analyze.servers.get(j + 1); ServerInfo siPred = analyze.servers.get(j - 1); execCmd += "MIDDLE " + si.IP + ":" + si.Port + " "; execCmd += siPred.IP + ":" + siPred.Port + " " + siSucc.IP + ":" + siSucc.Port + " localhost:" + MasterPort; execCmd += " " + si.Start_delay + " " + si.Lifetime + " " + si.Receive + " " + si.Send + " " + analyze.bank_name; System.out.println(execCmd); } Thread.sleep(500); Process pro = Runtime.getRuntime().exec(execCmd); serverPros.add(pro); } for (int j = icount; j < analyze.servers.size(); j++) { execCmd = "java Server "; ServerInfo si = analyze.servers.get(j); ServerInfo siPred = analyze.servers.get(j - 1); execCmd += "TAIL " + si.IP + ":" + si.Port + " "; execCmd += siPred.IP + ":" + siPred.Port + " localhost:0 localhost:" + MasterPort; execCmd += " " + si.Start_delay + " " + si.Lifetime + " " + si.Receive + " " + si.Send + " " + analyze.bank_name; tIP = si.IP; tPort = si.Port; System.out.println(execCmd); Thread.sleep(500); Process pro = Runtime.getRuntime().exec(execCmd); serverPros.add(pro); } } ServerInfoForClient newServInfoForCli = new ServerInfoForClient(hPort, hIP, tPort, tIP, bn); servInfoCli.add(newServInfoForCli); } String banksCliParam = ""; for (int i = 0; i < servInfoCli.size(); i++) { ServerInfoForClient temp = servInfoCli.get(i); String add = "@" + temp.bank_name + "#" + temp.HeadIP + ":" + temp.HeadPort + "#" + temp.TailIP + ":" + temp.TailPort; banksCliParam += add; } banksCliParam = banksCliParam.replaceFirst("@", ""); //System.out.println(banksCliParam); // List of clients is saved so that we can wait for them to exit. ArrayList<Process> clientPros = new ArrayList<Process>(); for (int i = 0; i < clientsList.size(); i++) { ClientInfo analyze = clientsList.get(i); String requestsString = ""; if (analyze.isRandom) { double balance = Double.parseDouble(analyze.prob_balance); //System.out.println(analyze.prob_balance); double deposit = Double.parseDouble(analyze.prob_deposit); double withdraw = Double.parseDouble(analyze.prob_withdraw); int numRequests = Integer.parseInt(analyze.num_requests); for (int j = 0; j < numRequests; j++) { result = Math.random() * (1.0 - 0.0) + 0.0; int randAccount = (int) (Math.random() * (10001 - 0) + 0); double randAmount = Math.random() * (10001.00 - 0.0) + 0; int adjustMoney = (int) randAmount * 100; randAmount = (double) adjustMoney / 100.00; int randBank = (int) (Math.random() * (bankNames.length - 0) + 0); if (result < balance) { //withdrawal#clientIPPORT%bank_name%accountnum%seq#amount requestsString += "@balance#localhost:" + analyze.PortNumber + "%" + bankNames[randBank] + "%" + randAccount + "%" + j; } else if (result < (deposit + balance)) { requestsString += "@deposit#localhost:" + analyze.PortNumber + "%" + bankNames[randBank] + "%" + randAccount + "%" + j + "#" + randAmount; } else { requestsString += "@withdrawal#localhost:" + analyze.PortNumber + "%" + bankNames[randBank] + "%" + randAccount + "%" + j + "#" + randAmount; } } } else { for (int j = 0; j < analyze.requests.size(); j++) { RequestInfo req = analyze.requests.get(j); //System.out.println("Sequence ###" + req.sequenceNum); if (req.request.equals("balance")) { requestsString += "@" + req.request + "#localhost:" + analyze.PortNumber + "%" + req.bankName + "%" + req.accountNum + "%" + req.sequenceNum; } else { requestsString += "@" + req.request + "#localhost:" + analyze.PortNumber + "%" + req.bankName + "%" + req.accountNum + "%" + req.sequenceNum + "#" + req.amount; } } } requestsString = requestsString.replaceFirst("@", ""); String execCommand; int p = 60000 + i; if (analyze.isRandom) { execCommand = "java Client localhost:" + p + " " + banksCliParam + " " + requestsString + " " + analyze.reply_timeout + " " + analyze.request_retries + " " + analyze.resend_head + " " + analyze.prob_failure + " " + analyze.msg_send_delay + " " + analyze.prob_balance + "," + analyze.prob_deposit + "," + analyze.prob_withdraw + "," + analyze.prob_transfer; } else { execCommand = "java Client localhost:" + p + " " + banksCliParam + " " + requestsString + " " + analyze.reply_timeout + " " + analyze.request_retries + " " + analyze.resend_head + " " + analyze.prob_failure + " " + analyze.msg_send_delay; } Thread.sleep(500); System.out.println(execCommand); System.out.println("Client " + (i + 1) + " started"); Process cliPro = Runtime.getRuntime().exec(execCommand); clientPros.add(cliPro); //System.out.println(requestsString); } // Wait for all the clients to terminate for (Process clientPro : clientPros) { try { clientPro.waitFor(); System.out.println("Client process finished."); } catch (InterruptedException e) { System.out.println("Interrupted while waiting for client."); } } // Sleep for two seconds Thread.sleep(2000); // Force termination of the servers for (Process serverPro : serverPros) { serverPro.destroy(); System.out.println("Killed server."); } masterProcess.destroy(); System.out.println("Killed Master"); //System.out.println("asdf"); }
From source file:TestBufferStreamGenomicsDBImporter.java
/** * Sample driver code for testing Java VariantContext write API for GenomicsDB * The code shows two ways of using the API * (a) Iterator<VariantContext>//from w w w . j a v a 2 s. c o m * (b) Directly adding VariantContext objects * If "-iterators" is passed as the second argument, method (a) is used. */ public static void main(final String[] args) throws IOException, GenomicsDBException, ParseException { if (args.length < 2) { System.err.println("For loading: [-iterators] <loader.json> " + "<stream_name_to_file.json> [bufferCapacity rank lbRowIdx ubRowIdx useMultiChromosomeIterator]"); System.exit(-1); } int argsLoaderFileIdx = 0; if (args[0].equals("-iterators")) argsLoaderFileIdx = 1; //Buffer capacity long bufferCapacity = (args.length >= argsLoaderFileIdx + 3) ? Integer.parseInt(args[argsLoaderFileIdx + 2]) : 1024; //Specify rank (or partition idx) of this process int rank = (args.length >= argsLoaderFileIdx + 4) ? Integer.parseInt(args[argsLoaderFileIdx + 3]) : 0; //Specify smallest row idx from which to start loading. // This is useful for incremental loading into existing array long lbRowIdx = (args.length >= argsLoaderFileIdx + 5) ? Long.parseLong(args[argsLoaderFileIdx + 4]) : 0; //Specify largest row idx up to which loading should be performed - for completeness long ubRowIdx = (args.length >= argsLoaderFileIdx + 6) ? Long.parseLong(args[argsLoaderFileIdx + 5]) : Long.MAX_VALUE - 1; //Boolean to use MultipleChromosomeIterator boolean useMultiChromosomeIterator = (args.length >= argsLoaderFileIdx + 7) ? Boolean.parseBoolean(args[argsLoaderFileIdx + 6]) : false; //<loader.json> first arg String loaderJSONFile = args[argsLoaderFileIdx]; GenomicsDBImporter loader = new GenomicsDBImporter(loaderJSONFile, rank, lbRowIdx, ubRowIdx); //<stream_name_to_file.json> - useful for the driver only //JSON file that contains "stream_name": "vcf_file_path" entries FileReader mappingReader = new FileReader(args[argsLoaderFileIdx + 1]); JSONParser parser = new JSONParser(); LinkedHashMap streamNameToFileName = (LinkedHashMap) parser.parse(mappingReader, new LinkedHashFactory()); ArrayList<VCFFileStreamInfo> streamInfoVec = new ArrayList<VCFFileStreamInfo>(); long rowIdx = 0; for (Object currObj : streamNameToFileName.entrySet()) { Map.Entry<String, String> entry = (Map.Entry<String, String>) currObj; VCFFileStreamInfo currInfo = new VCFFileStreamInfo(entry.getValue(), loaderJSONFile, rank, useMultiChromosomeIterator); /** The following 2 lines are not mandatory - use initializeSampleInfoMapFromHeader() * iff you know for sure that sample names in the VCF header are globally unique * across all streams/files. If not, you have 2 options: * (a) specify your own mapping from sample index in the header to SampleInfo object * (unique_name, rowIdx) OR * (b) specify the mapping in the callset_mapping_file (JSON) and pass null to * addSortedVariantContextIterator() */ LinkedHashMap<Integer, GenomicsDBImporter.SampleInfo> sampleIndexToInfo = new LinkedHashMap<Integer, GenomicsDBImporter.SampleInfo>(); rowIdx = GenomicsDBImporter.initializeSampleInfoMapFromHeader(sampleIndexToInfo, currInfo.mVCFHeader, rowIdx); int streamIdx = -1; if (args[0].equals("-iterators")) streamIdx = loader.addSortedVariantContextIterator(entry.getKey(), currInfo.mVCFHeader, currInfo.mIterator, bufferCapacity, VariantContextWriterBuilder.OutputType.BCF_STREAM, sampleIndexToInfo); //pass sorted VC iterators else //use buffers - VCs will be provided by caller streamIdx = loader.addBufferStream(entry.getKey(), currInfo.mVCFHeader, bufferCapacity, VariantContextWriterBuilder.OutputType.BCF_STREAM, sampleIndexToInfo); currInfo.mStreamIdx = streamIdx; streamInfoVec.add(currInfo); } if (args[0].equals("-iterators")) { //Much simpler interface if using Iterator<VariantContext> loader.importBatch(); assert loader.isDone(); } else { //Must be called after all iterators/streams added - no more iterators/streams // can be added once this function is called loader.setupGenomicsDBImporter(); //Counts and tracks buffer streams for which new data must be supplied //Initialized to all the buffer streams int numExhaustedBufferStreams = streamInfoVec.size(); int[] exhaustedBufferStreamIdxs = new int[numExhaustedBufferStreams]; for (int i = 0; i < numExhaustedBufferStreams; ++i) exhaustedBufferStreamIdxs[i] = i; while (!loader.isDone()) { //Add data for streams that were exhausted in the previous round for (int i = 0; i < numExhaustedBufferStreams; ++i) { VCFFileStreamInfo currInfo = streamInfoVec.get(exhaustedBufferStreamIdxs[i]); boolean added = true; while (added && (currInfo.mIterator.hasNext() || currInfo.mNextVC != null)) { if (currInfo.mNextVC != null) added = loader.add(currInfo.mNextVC, currInfo.mStreamIdx); if (added) if (currInfo.mIterator.hasNext()) currInfo.mNextVC = currInfo.mIterator.next(); else currInfo.mNextVC = null; } } loader.importBatch(); numExhaustedBufferStreams = (int) loader.getNumExhaustedBufferStreams(); for (int i = 0; i < numExhaustedBufferStreams; ++i) exhaustedBufferStreamIdxs[i] = loader.getExhaustedBufferStreamIndex(i); } } }
From source file:edu.nyu.vida.data_polygamy.feature_identification.IndexCreation.java
/** * @param args/*from ww w .j a v a 2 s. c o m*/ */ @SuppressWarnings({ "deprecation" }) public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException { Options options = new Options(); Option forceOption = new Option("f", "force", false, "force the computation of the index and events " + "even if files already exist"); forceOption.setRequired(false); options.addOption(forceOption); Option thresholdOption = new Option("t", "use-custom-thresholds", false, "use custom thresholds for regular and rare events, defined in HDFS_HOME/" + FrameworkUtils.thresholdDir + " file"); thresholdOption.setRequired(false); options.addOption(thresholdOption); Option gOption = new Option("g", "group", true, "set group of datasets for which the indices and events" + " will be computed"); gOption.setRequired(true); gOption.setArgName("GROUP"); gOption.setArgs(Option.UNLIMITED_VALUES); options.addOption(gOption); Option machineOption = new Option("m", "machine", true, "machine identifier"); machineOption.setRequired(true); machineOption.setArgName("MACHINE"); machineOption.setArgs(1); options.addOption(machineOption); Option nodesOption = new Option("n", "nodes", true, "number of nodes"); nodesOption.setRequired(true); nodesOption.setArgName("NODES"); nodesOption.setArgs(1); options.addOption(nodesOption); Option s3Option = new Option("s3", "s3", false, "data on Amazon S3"); s3Option.setRequired(false); options.addOption(s3Option); Option awsAccessKeyIdOption = new Option("aws_id", "aws-id", true, "aws access key id; " + "this is required if the execution is on aws"); awsAccessKeyIdOption.setRequired(false); awsAccessKeyIdOption.setArgName("AWS-ACCESS-KEY-ID"); awsAccessKeyIdOption.setArgs(1); options.addOption(awsAccessKeyIdOption); Option awsSecretAccessKeyOption = new Option("aws_key", "aws-id", true, "aws secrect access key; " + "this is required if the execution is on aws"); awsSecretAccessKeyOption.setRequired(false); awsSecretAccessKeyOption.setArgName("AWS-SECRET-ACCESS-KEY"); awsSecretAccessKeyOption.setArgs(1); options.addOption(awsSecretAccessKeyOption); Option bucketOption = new Option("b", "s3-bucket", true, "bucket on s3; " + "this is required if the execution is on aws"); bucketOption.setRequired(false); bucketOption.setArgName("S3-BUCKET"); bucketOption.setArgs(1); options.addOption(bucketOption); Option helpOption = new Option("h", "help", false, "display this message"); helpOption.setRequired(false); options.addOption(helpOption); HelpFormatter formatter = new HelpFormatter(); CommandLineParser parser = new PosixParser(); CommandLine cmd = null; try { cmd = parser.parse(options, args); } catch (ParseException e) { formatter.printHelp("hadoop jar data-polygamy.jar " + "edu.nyu.vida.data_polygamy.feature_identification.IndexCreation", options, true); System.exit(0); } if (cmd.hasOption("h")) { formatter.printHelp("hadoop jar data-polygamy.jar " + "edu.nyu.vida.data_polygamy.feature_identification.IndexCreation", options, true); System.exit(0); } boolean s3 = cmd.hasOption("s3"); String s3bucket = ""; String awsAccessKeyId = ""; String awsSecretAccessKey = ""; if (s3) { if ((!cmd.hasOption("aws_id")) || (!cmd.hasOption("aws_key")) || (!cmd.hasOption("b"))) { System.out.println( "Arguments 'aws_id', 'aws_key', and 'b'" + " are mandatory if execution is on AWS."); formatter.printHelp("hadoop jar data-polygamy.jar " + "edu.nyu.vida.data_polygamy.feature_identification.IndexCreation", options, true); System.exit(0); } s3bucket = cmd.getOptionValue("b"); awsAccessKeyId = cmd.getOptionValue("aws_id"); awsSecretAccessKey = cmd.getOptionValue("aws_key"); } boolean snappyCompression = false; boolean bzip2Compression = false; String machine = cmd.getOptionValue("m"); int nbNodes = Integer.parseInt(cmd.getOptionValue("n")); Configuration s3conf = new Configuration(); if (s3) { s3conf.set("fs.s3.awsAccessKeyId", awsAccessKeyId); s3conf.set("fs.s3.awsSecretAccessKey", awsSecretAccessKey); s3conf.set("bucket", s3bucket); } String datasetNames = ""; String datasetIds = ""; ArrayList<String> shortDataset = new ArrayList<String>(); ArrayList<String> shortDatasetIndex = new ArrayList<String>(); HashMap<String, String> datasetAgg = new HashMap<String, String>(); HashMap<String, String> datasetId = new HashMap<String, String>(); HashMap<String, HashMap<Integer, Double>> datasetRegThreshold = new HashMap<String, HashMap<Integer, Double>>(); HashMap<String, HashMap<Integer, Double>> datasetRareThreshold = new HashMap<String, HashMap<Integer, Double>>(); Path path = null; FileSystem fs = FileSystem.get(new Configuration()); BufferedReader br; boolean removeExistingFiles = cmd.hasOption("f"); boolean isThresholdUserDefined = cmd.hasOption("t"); for (String dataset : cmd.getOptionValues("g")) { // getting aggregates String[] aggregate = FrameworkUtils.searchAggregates(dataset, s3conf, s3); if (aggregate.length == 0) { System.out.println("No aggregates found for " + dataset + "."); continue; } // getting aggregates header String aggregatesHeaderFileName = FrameworkUtils.searchAggregatesHeader(dataset, s3conf, s3); if (aggregatesHeaderFileName == null) { System.out.println("No aggregate header for " + dataset); continue; } String aggregatesHeader = s3bucket + FrameworkUtils.preProcessingDir + "/" + aggregatesHeaderFileName; shortDataset.add(dataset); datasetId.put(dataset, null); if (s3) { path = new Path(aggregatesHeader); fs = FileSystem.get(path.toUri(), s3conf); } else { path = new Path(fs.getHomeDirectory() + "/" + aggregatesHeader); } br = new BufferedReader(new InputStreamReader(fs.open(path))); datasetAgg.put(dataset, br.readLine().split("\t")[1]); br.close(); if (s3) fs.close(); } if (shortDataset.size() == 0) { System.out.println("No datasets to process."); System.exit(0); } // getting dataset id if (s3) { path = new Path(s3bucket + FrameworkUtils.datasetsIndexDir); fs = FileSystem.get(path.toUri(), s3conf); } else { path = new Path(fs.getHomeDirectory() + "/" + FrameworkUtils.datasetsIndexDir); } br = new BufferedReader(new InputStreamReader(fs.open(path))); String line = br.readLine(); while (line != null) { String[] dt = line.split("\t"); if (datasetId.containsKey(dt[0])) { datasetId.put(dt[0], dt[1]); datasetNames += dt[0] + ","; datasetIds += dt[1] + ","; } line = br.readLine(); } br.close(); datasetNames = datasetNames.substring(0, datasetNames.length() - 1); datasetIds = datasetIds.substring(0, datasetIds.length() - 1); Iterator<String> it = shortDataset.iterator(); while (it.hasNext()) { String dataset = it.next(); if (datasetId.get(dataset) == null) { System.out.println("No dataset id for " + dataset); System.exit(0); } } // getting user defined thresholds if (isThresholdUserDefined) { if (s3) { path = new Path(s3bucket + FrameworkUtils.thresholdDir); fs = FileSystem.get(path.toUri(), s3conf); } else { path = new Path(fs.getHomeDirectory() + "/" + FrameworkUtils.thresholdDir); } br = new BufferedReader(new InputStreamReader(fs.open(path))); line = br.readLine(); while (line != null) { // getting dataset name String dataset = line.trim(); HashMap<Integer, Double> regThresholds = new HashMap<Integer, Double>(); HashMap<Integer, Double> rareThresholds = new HashMap<Integer, Double>(); line = br.readLine(); while ((line != null) && (line.split("\t").length > 1)) { // getting attribute ids and thresholds String[] keyVals = line.trim().split("\t"); int att = Integer.parseInt(keyVals[0].trim()); regThresholds.put(att, Double.parseDouble(keyVals[1].trim())); rareThresholds.put(att, Double.parseDouble(keyVals[2].trim())); line = br.readLine(); } datasetRegThreshold.put(dataset, regThresholds); datasetRareThreshold.put(dataset, rareThresholds); } br.close(); } if (s3) fs.close(); // datasets that will use existing merge tree ArrayList<String> useMergeTree = new ArrayList<String>(); // creating index for each spatio-temporal resolution FrameworkUtils.createDir(s3bucket + FrameworkUtils.indexDir, s3conf, s3); HashSet<String> input = new HashSet<String>(); for (String dataset : shortDataset) { String indexCreationOutputFileName = s3bucket + FrameworkUtils.indexDir + "/" + dataset + "/"; String mergeTreeFileName = s3bucket + FrameworkUtils.mergeTreeDir + "/" + dataset + "/"; if (removeExistingFiles) { FrameworkUtils.removeFile(indexCreationOutputFileName, s3conf, s3); FrameworkUtils.removeFile(mergeTreeFileName, s3conf, s3); FrameworkUtils.createDir(mergeTreeFileName, s3conf, s3); } else if (datasetRegThreshold.containsKey(dataset)) { FrameworkUtils.removeFile(indexCreationOutputFileName, s3conf, s3); if (FrameworkUtils.fileExists(mergeTreeFileName, s3conf, s3)) { useMergeTree.add(dataset); } } if (!FrameworkUtils.fileExists(indexCreationOutputFileName, s3conf, s3)) { input.add(s3bucket + FrameworkUtils.aggregatesDir + "/" + dataset); shortDatasetIndex.add(dataset); } } if (input.isEmpty()) { System.out.println("All the input datasets have indices."); System.out.println("Use -f in the beginning of the command line to force the computation."); System.exit(0); } String aggregateDatasets = ""; it = input.iterator(); while (it.hasNext()) { aggregateDatasets += it.next() + ","; } Job icJob = null; Configuration icConf = new Configuration(); Machine machineConf = new Machine(machine, nbNodes); String jobName = "index"; String indexOutputDir = s3bucket + FrameworkUtils.indexDir + "/tmp/"; FrameworkUtils.removeFile(indexOutputDir, s3conf, s3); icConf.set("dataset-name", datasetNames); icConf.set("dataset-id", datasetIds); if (!useMergeTree.isEmpty()) { String useMergeTreeStr = ""; for (String dt : useMergeTree) { useMergeTreeStr += dt + ","; } icConf.set("use-merge-tree", useMergeTreeStr.substring(0, useMergeTreeStr.length() - 1)); } for (int i = 0; i < shortDataset.size(); i++) { String dataset = shortDataset.get(i); String id = datasetId.get(dataset); icConf.set("dataset-" + id + "-aggregates", datasetAgg.get(dataset)); if (datasetRegThreshold.containsKey(dataset)) { HashMap<Integer, Double> regThresholds = datasetRegThreshold.get(dataset); String thresholds = ""; for (int att : regThresholds.keySet()) { thresholds += String.valueOf(att) + "-" + String.valueOf(regThresholds.get(att)) + ","; } icConf.set("regular-" + id, thresholds.substring(0, thresholds.length() - 1)); } if (datasetRareThreshold.containsKey(dataset)) { HashMap<Integer, Double> rareThresholds = datasetRareThreshold.get(dataset); String thresholds = ""; for (int att : rareThresholds.keySet()) { thresholds += String.valueOf(att) + "-" + String.valueOf(rareThresholds.get(att)) + ","; } icConf.set("rare-" + id, thresholds.substring(0, thresholds.length() - 1)); } } icConf.set("mapreduce.tasktracker.map.tasks.maximum", String.valueOf(machineConf.getMaximumTasks())); icConf.set("mapreduce.tasktracker.reduce.tasks.maximum", String.valueOf(machineConf.getMaximumTasks())); icConf.set("mapreduce.jobtracker.maxtasks.perjob", "-1"); icConf.set("mapreduce.reduce.shuffle.parallelcopies", "20"); icConf.set("mapreduce.input.fileinputformat.split.minsize", "0"); icConf.set("mapreduce.task.io.sort.mb", "200"); icConf.set("mapreduce.task.io.sort.factor", "100"); //icConf.set("mapreduce.task.timeout", "1800000"); machineConf.setMachineConfiguration(icConf); if (s3) { machineConf.setMachineConfiguration(icConf); icConf.set("fs.s3.awsAccessKeyId", awsAccessKeyId); icConf.set("fs.s3.awsSecretAccessKey", awsSecretAccessKey); icConf.set("bucket", s3bucket); } if (snappyCompression) { icConf.set("mapreduce.map.output.compress", "true"); icConf.set("mapreduce.map.output.compress.codec", "org.apache.hadoop.io.compress.SnappyCodec"); //icConf.set("mapreduce.output.fileoutputformat.compress.codec", "org.apache.hadoop.io.compress.SnappyCodec"); } if (bzip2Compression) { icConf.set("mapreduce.map.output.compress", "true"); icConf.set("mapreduce.map.output.compress.codec", "org.apache.hadoop.io.compress.BZip2Codec"); //icConf.set("mapreduce.output.fileoutputformat.compress.codec", "org.apache.hadoop.io.compress.BZip2Codec"); } icJob = new Job(icConf); icJob.setJobName(jobName); icJob.setMapOutputKeyClass(AttributeResolutionWritable.class); icJob.setMapOutputValueClass(SpatioTemporalFloatWritable.class); icJob.setOutputKeyClass(AttributeResolutionWritable.class); icJob.setOutputValueClass(TopologyTimeSeriesWritable.class); //icJob.setOutputKeyClass(Text.class); //icJob.setOutputValueClass(Text.class); icJob.setMapperClass(IndexCreationMapper.class); icJob.setReducerClass(IndexCreationReducer.class); icJob.setNumReduceTasks(machineConf.getNumberReduces()); icJob.setInputFormatClass(SequenceFileInputFormat.class); //icJob.setOutputFormatClass(SequenceFileOutputFormat.class); LazyOutputFormat.setOutputFormatClass(icJob, SequenceFileOutputFormat.class); //LazyOutputFormat.setOutputFormatClass(icJob, TextOutputFormat.class); SequenceFileOutputFormat.setCompressOutput(icJob, true); SequenceFileOutputFormat.setOutputCompressionType(icJob, CompressionType.BLOCK); FileInputFormat.setInputDirRecursive(icJob, true); FileInputFormat.setInputPaths(icJob, aggregateDatasets.substring(0, aggregateDatasets.length() - 1)); FileOutputFormat.setOutputPath(icJob, new Path(indexOutputDir)); icJob.setJarByClass(IndexCreation.class); long start = System.currentTimeMillis(); icJob.submit(); icJob.waitForCompletion(true); System.out.println(jobName + "\t" + (System.currentTimeMillis() - start)); // moving files to right place for (String dataset : shortDatasetIndex) { String from = s3bucket + FrameworkUtils.indexDir + "/tmp/" + dataset + "/"; String to = s3bucket + FrameworkUtils.indexDir + "/" + dataset + "/"; FrameworkUtils.renameFile(from, to, s3conf, s3); } }
From source file:edu.oregonstate.eecs.mcplan.abstraction.EvaluateSimilarityFunction.java
/** * @param args/*from w ww .j a v a2s .c om*/ * @throws IOException * @throws FileNotFoundException */ public static void main(final String[] args) throws FileNotFoundException, IOException { final String experiment_file = args[0]; final File root_directory; if (args.length > 1) { root_directory = new File(args[1]); } else { root_directory = new File("."); } final CsvConfigurationParser csv_config = new CsvConfigurationParser(new FileReader(experiment_file)); final String experiment_name = FilenameUtils.getBaseName(experiment_file); final File expr_directory = new File(root_directory, experiment_name); expr_directory.mkdirs(); final Csv.Writer csv = new Csv.Writer( new PrintStream(new FileOutputStream(new File(expr_directory, "results.csv")))); final String[] parameter_headers = new String[] { "kpca.kernel", "kpca.rbf.sigma", "kpca.random_forest.Ntrees", "kpca.random_forest.max_depth", "kpca.Nbases", "multiclass.classifier", "multiclass.random_forest.Ntrees", "multiclass.random_forest.max_depth", "pairwise_classifier.max_branching", "training.label_noise" }; csv.cell("domain").cell("abstraction"); for (final String p : parameter_headers) { csv.cell(p); } csv.cell("Ntrain").cell("Ntest").cell("ami.mean").cell("ami.variance").cell("ami.confidence").newline(); for (int expr = 0; expr < csv_config.size(); ++expr) { try { final KeyValueStore expr_config = csv_config.get(expr); final Configuration config = new Configuration(root_directory.getPath(), expr_directory.getName(), expr_config); System.out.println("[Loading '" + config.training_data_single + "']"); final Instances single = WekaUtil .readLabeledDataset(new File(root_directory, config.training_data_single + ".arff")); final Instances train = new Instances(single, 0); final int[] idx = Fn.range(0, single.size()); int instance_counter = 0; Fn.shuffle(config.rng, idx); final int Ntrain = config.getInt("Ntrain_games"); // TODO: Rename? final double label_noise = config.getDouble("training.label_noise"); final int Nlabels = train.classAttribute().numValues(); assert (Nlabels > 0); for (int i = 0; i < Ntrain; ++i) { final Instance inst = single.get(idx[instance_counter++]); if (label_noise > 0 && config.rng.nextDouble() < label_noise) { int noisy_label = 0; do { noisy_label = config.rng.nextInt(Nlabels); } while (noisy_label == (int) inst.classValue()); System.out.println("Noisy label (" + inst.classValue() + " -> " + noisy_label + ")"); inst.setClassValue(noisy_label); } train.add(inst); inst.setDataset(train); } final Fn.Function2<Boolean, Instance, Instance> plausible_p = createPlausiblePredicate(config); final int Ntest = config.Ntest_games; int Ntest_added = 0; final ArrayList<Instances> tests = new ArrayList<Instances>(); while (instance_counter < single.size() && Ntest_added < Ntest) { final Instance inst = single.get(idx[instance_counter++]); boolean found = false; for (final Instances test : tests) { // Note that 'plausible_p' should be transitive if (plausible_p.apply(inst, test.get(0))) { WekaUtil.addInstance(test, inst); if (test.size() == 30) { Ntest_added += test.size(); } else if (test.size() > 30) { Ntest_added += 1; } found = true; break; } } if (!found) { final Instances test = new Instances(single, 0); WekaUtil.addInstance(test, inst); tests.add(test); } } final Iterator<Instances> test_itr = tests.iterator(); while (test_itr.hasNext()) { if (test_itr.next().size() < 30) { test_itr.remove(); } } System.out.println("=== tests.size() = " + tests.size()); System.out.println("=== Ntest_added = " + Ntest_added); System.out.println("[Training]"); final Evaluator evaluator = createEvaluator(config, train); // final Instances transformed_test = evaluator.prepareInstances( test ); System.out.println("[Evaluating]"); final int Nxval = evaluator.isSensitiveToOrdering() ? 10 : 1; final MeanVarianceAccumulator ami = new MeanVarianceAccumulator(); final MeanVarianceAccumulator errors = new MeanVarianceAccumulator(); final MeanVarianceAccumulator relative_error = new MeanVarianceAccumulator(); int c = 0; for (int xval = 0; xval < Nxval; ++xval) { for (final Instances test : tests) { // TODO: Debugging WekaUtil.writeDataset(new File(config.root_directory), "test_" + (c++), test); // transformed_test.randomize( new RandomAdaptor( config.rng ) ); // final ClusterContingencyTable ct = evaluator.evaluate( transformed_test ); test.randomize(new RandomAdaptor(config.rng)); final ClusterContingencyTable ct = evaluator.evaluate(test); System.out.println(ct); int Nerrors = 0; final MeanVarianceAccumulator mv = new MeanVarianceAccumulator(); for (int i = 0; i < ct.R; ++i) { final int max = Fn.max(ct.n[i]); Nerrors += (ct.a[i] - max); mv.add(((double) ct.a[i]) / ct.N * Nerrors / ct.a[i]); } errors.add(Nerrors); relative_error.add(mv.mean()); System.out.println("exemplar: " + test.get(0)); System.out.println("Nerrors = " + Nerrors); final PrintStream ct_out = new PrintStream( new FileOutputStream(new File(expr_directory, "ct_" + expr + "_" + xval + ".csv"))); ct.writeCsv(ct_out); ct_out.close(); final double ct_ami = ct.adjustedMutualInformation_max(); if (Double.isNaN(ct_ami)) { System.out.println("! ct_ami = NaN"); } else { ami.add(ct_ami); } System.out.println(); } } System.out.println("errors = " + errors.mean() + " (" + errors.confidence() + ")"); System.out.println( "relative_error = " + relative_error.mean() + " (" + relative_error.confidence() + ")"); System.out.println("AMI_max = " + ami.mean() + " (" + ami.confidence() + ")"); csv.cell(config.domain).cell(config.get("abstraction.discovery")); for (final String p : parameter_headers) { csv.cell(config.get(p)); } csv.cell(Ntrain).cell(Ntest).cell(ami.mean()).cell(ami.variance()).cell(ami.confidence()).newline(); } catch (final Exception ex) { ex.printStackTrace(); } } }
From source file:de.prozesskraft.pradar.parts.PradarPartUi3.java
/** * @param args// w w w.ja va 2 s . co m */ public static void main(String[] args) { /*---------------------------- get options from ini-file ----------------------------*/ File inifile = new java.io.File( WhereAmI.getInstallDirectoryAbsolutePath(PradarPartUi3.class) + "/" + "../etc/pradar-gui.ini"); if (inifile.exists()) { try { ini = new Ini(inifile); } catch (InvalidFileFormatException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } catch (IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } } else { System.err.println("ini file does not exist: " + inifile.getAbsolutePath()); System.exit(1); } /*---------------------------- create boolean options ----------------------------*/ Option help = new Option("help", "print this message"); Option v = new Option("v", "prints version and build-date"); /*---------------------------- create argument options ----------------------------*/ Option dbfile = OptionBuilder.withArgName("dbfile").hasArg().withDescription("[optional] dbfile") // .isRequired() .create("dbfile"); /*---------------------------- create options object ----------------------------*/ Options options = new Options(); options.addOption(help); options.addOption(v); options.addOption(dbfile); /*---------------------------- create the parser ----------------------------*/ CommandLineParser parser = new GnuParser(); try { // parse the command line arguments line = parser.parse(options, args); } catch (Exception exp) { // oops, something went wrong System.err.println("Parsing failed. Reason: " + exp.getMessage()); } /*---------------------------- usage/help ----------------------------*/ if (line.hasOption("help")) { HelpFormatter formatter = new HelpFormatter(); // formatter.printHelp("checkin --version [% version %]", options); formatter.printHelp("pradar-gui", options); System.exit(0); } if (line.hasOption("v")) { System.out.println("author: alexander.vogel@caegroup.de"); System.out.println("version: [% version %]"); System.out.println("date: [% date %]"); System.exit(0); } /*---------------------------- die lizenz ueberpruefen und ggf abbrechen ----------------------------*/ // check for valid license ArrayList<String> allPortAtHost = new ArrayList<String>(); allPortAtHost.add(ini.get("license-server", "license-server-1")); allPortAtHost.add(ini.get("license-server", "license-server-2")); allPortAtHost.add(ini.get("license-server", "license-server-3")); MyLicense lic = new MyLicense(allPortAtHost, "1", "user-edition", "0.1"); // lizenz-logging ausgeben for (String actLine : (ArrayList<String>) lic.getLog()) { System.err.println(actLine); } // abbruch, wenn lizenz nicht valide if (!lic.isValid()) { System.exit(1); } /*---------------------------- other things ----------------------------*/ // gui final Display display = new Display(); Realm.runWithDefault(SWTObservables.getRealm(display), new Runnable() { public void run() { try { Shell shell = new Shell(display); shell.setText("pradar " + "[% version %]"); // set an icon if (this.getClass().getResourceAsStream("/logoSymbol50Transp.png") != null) { shell.setImage( new Image(display, this.getClass().getResourceAsStream("/logoSymbol50Transp.png"))); } else if ((new java.io.File("logoSymbol50Transp.png")).exists()) { shell.setImage(new Image(display, "logoSymbol50Transp.png")); } shell.setLayout(new FillLayout()); // shell.setSize(1500, 1000); shell.setMaximized(true); Composite composite = new Composite(shell, SWT.NO_FOCUS); GridLayout gl_composite = new GridLayout(2, false); gl_composite.marginWidth = 0; gl_composite.marginHeight = 0; new PradarPartUi3(composite); try { shell.open(); while (!shell.isDisposed()) { if (!display.readAndDispatch()) { display.sleep(); } } } finally { if (!shell.isDisposed()) { shell.dispose(); } } } finally { display.dispose(); } } }); System.exit(0); }
From source file:it.univpm.deit.semedia.musicuri.core.MusicURISearch.java
/** * Identifies the MusicURIReference that most closely matches the given audio file * @param args the audio file to identify *//*from w w w . j a v a2s. c o m*/ public static void main(String[] args) throws Exception { MusicURISearch engine = new MusicURISearch((Toolset.getCWD() + "db\\"), "MusicURIReferences.db"); // MusicURIDatabase db = new MusicURIDatabase ("C:/Eclipse/workspace/MusicURI/db/", "MusicURIReferences.db"); // MusicURISearch engine = new MusicURISearch (db); //MusicURISearch engine = new MusicURISearch ("D:/10References/", "MusicURIReferences.db"); //***************************************************************************** //************************* F I L E I N P U T *************************** //***************************************************************************** if ((args.length == 1) && (new File(args[0]).exists())) { // get the file's canonical path File givenHandle = new File(args[0]); boolean finalResortIsCombinedDistance = true; String queryAudioCanonicalPath = givenHandle.getCanonicalPath(); System.out.println("Input: " + queryAudioCanonicalPath); PerformanceStatistic tempStat; if (givenHandle.isDirectory()) { File[] list = givenHandle.listFiles(); if (list.length == 0) { System.out.println("Directory is empty"); return; } else { ArrayList allStats = new ArrayList(); File currentFile; int truePositives = 0; int falsePositives = 0; int trueNegatives = 0; int falseNegatives = 0; if (finalResortIsCombinedDistance) System.out.println(" Final resort is combined distance"); else System.out.println(" Final resort is audio signature distance"); for (int i = 0; i < list.length; i++) { currentFile = list[i]; try { if (Toolset.isSupportedAudioFile(currentFile)) { System.out.println("\nIdentifying : " + currentFile.getName()); tempStat = engine.getIdentificationPerformance(new MusicURIQuery(givenHandle), true, true, 0.09f, finalResortIsCombinedDistance); //identify (new MusicURIQuery(currentFile), true, true, 0.09f, finalResortIsCombinedDistance, 1); if (tempStat != null) allStats.add(tempStat); if (tempStat.isTruePositive()) truePositives++; if (tempStat.isFalsePositive()) falsePositives++; if (tempStat.isTrueNegative()) trueNegatives++; if (tempStat.isFalseNegative()) falseNegatives++; System.out.println( "\nTrue Positives : " + truePositives + "/" + allStats.size()); System.out .println("False Positives : " + falsePositives + "/" + allStats.size()); System.out .println("True Negatives : " + trueNegatives + "/" + allStats.size()); System.out .println("False Negatives : " + falseNegatives + "/" + allStats.size()); } } catch (Exception e) { e.printStackTrace(); } } System.out.println("\n\nStatistics for Test Case: " + queryAudioCanonicalPath); engine.mergeStatistics(allStats); } } //end if givenHandle is Directory if (givenHandle.isFile()) { if (Toolset.isSupportedAudioFile(givenHandle)) { //tempStat = engine.getIdentificationPerformance (new MusicURIQuery(givenHandle), true, true, 0.09f, finalResortIsCombinedDistance); tempStat = engine.getIdentificationPerformance(new MusicURIQuery(givenHandle), false, false, 0.09f, false); //identify (new MusicURIQuery(givenHandle), true, true, 0.09f, finalResortIsCombinedDistance, 1); if (tempStat != null) { System.out.println("\nIdentification completed"); //tempStat.printStatistics(); ArrayList allStats = new ArrayList(); allStats.add(tempStat); engine.mergeStatistics(allStats); } else System.out.println("Error in identification "); } } } //end if else { System.err.println("MusicURISearch"); System.err.println("Usage: java it.univpm.deit.semedia.musicuri.core.MusicURISearch {unknown.mp3}"); } }
From source file:com.adobe.aem.demo.communities.Loader.java
public static void main(String[] args) { String hostname = null;/*from w w w. java2 s . c o m*/ String port = null; String altport = null; String csvfile = null; String location = null; String language = "en"; String analytics = null; String adminPassword = "admin"; String[] url = new String[10]; // Handling 10 levels maximum for nested comments boolean reset = false; boolean configure = false; int urlLevel = 0; int row = 0; HashMap<String, ArrayList<String>> learningpaths = new HashMap<String, ArrayList<String>>(); // Command line options for this tool Options options = new Options(); options.addOption("h", true, "Hostname"); options.addOption("p", true, "Port"); options.addOption("a", true, "Alternate Port"); options.addOption("f", true, "CSV file"); options.addOption("r", false, "Reset"); options.addOption("u", true, "Admin Password"); options.addOption("c", false, "Configure"); options.addOption("s", true, "Analytics Endpoint"); options.addOption("t", false, "Analytics Tracking"); CommandLineParser parser = new BasicParser(); try { CommandLine cmd = parser.parse(options, args); if (cmd.hasOption("h")) { hostname = cmd.getOptionValue("h"); } if (cmd.hasOption("p")) { port = cmd.getOptionValue("p"); } if (cmd.hasOption("a")) { altport = cmd.getOptionValue("a"); } if (cmd.hasOption("f")) { csvfile = cmd.getOptionValue("f"); } if (cmd.hasOption("u")) { adminPassword = cmd.getOptionValue("u"); } if (cmd.hasOption("t")) { if (cmd.hasOption("s")) { analytics = cmd.getOptionValue("s"); } } if (cmd.hasOption("r")) { reset = true; } if (cmd.hasOption("c")) { configure = true; } if (csvfile == null || port == null || hostname == null) { System.out.println( "Request parameters: -h hostname -p port -a alternateport -u adminPassword -f path_to_CSV_file -r (true|false, delete content before import) -c (true|false, post additional properties)"); System.exit(-1); } } catch (ParseException ex) { logger.error(ex.getMessage()); } String componentType = null; try { logger.debug("AEM Demo Loader: Processing file " + csvfile); // Reading the CSV file, line by line Reader in = new FileReader(csvfile); Iterable<CSVRecord> records = CSVFormat.EXCEL.parse(in); for (CSVRecord record : records) { row = row + 1; logger.info("Row: " + row + ", new record: " + record.get(0)); // Let's see if we deal with a comment if (record.get(0).startsWith("#")) { // We can ignore the comment line and move on continue; } // Let's see if we need to terminate this process if (record.get(0).equals(KILL)) { System.exit(1); } // Let's see if we need to create a new Community site if (record.get(0).equals(SITE)) { // Building the form entity to be posted MultipartEntityBuilder builder = MultipartEntityBuilder.create(); builder.setCharset(MIME.UTF8_CHARSET); builder.addTextBody(":operation", "social:createSite", ContentType.create("text/plain", MIME.UTF8_CHARSET)); builder.addTextBody("_charset_", "UTF-8", ContentType.create("text/plain", MIME.UTF8_CHARSET)); String urlName = null; for (int i = 2; i < record.size() - 1; i = i + 2) { if (record.get(i) != null && record.get(i + 1) != null && record.get(i).length() > 0) { String name = record.get(i).trim(); String value = record.get(i + 1).trim(); if (value.equals("TRUE")) { value = "true"; } if (value.equals("FALSE")) { value = "false"; } if (name.equals("urlName")) { urlName = value; } if (name.equals(LANGUAGE)) { language = value; } if (name.equals(BANNER)) { File attachment = new File( csvfile.substring(0, csvfile.indexOf(".csv")) + File.separator + value); builder.addBinaryBody(BANNER, attachment, ContentType.MULTIPART_FORM_DATA, attachment.getName()); } else if (name.equals(THUMBNAIL)) { File attachment = new File( csvfile.substring(0, csvfile.indexOf(".csv")) + File.separator + value); builder.addBinaryBody(THUMBNAIL, attachment, ContentType.MULTIPART_FORM_DATA, attachment.getName()); } else { builder.addTextBody(name, value, ContentType.create("text/plain", MIME.UTF8_CHARSET)); } } } // Site creation String siteId = doPost(hostname, port, "/content.social.json", "admin", adminPassword, builder.build(), "response/siteId"); // Site publishing, if there's a publish instance to publish to if (!port.equals(altport)) { List<NameValuePair> nameValuePairs = new ArrayList<NameValuePair>(); nameValuePairs.add(new BasicNameValuePair("id", "nobot")); nameValuePairs.add(new BasicNameValuePair(":operation", "social:publishSite")); nameValuePairs .add(new BasicNameValuePair("path", "/content/sites/" + urlName + "/" + language)); doPost(hostname, port, "/communities/sites.html", "admin", adminPassword, new UrlEncodedFormEntity(nameValuePairs), null); // Wait for site to be available on Publish doWait(hostname, altport, "admin", adminPassword, (siteId != null ? siteId : urlName) + "-groupadministrators"); } continue; } // Let's see if we need to create a new Tag if (record.get(0).equals(TAG)) { // Building the form entity to be posted MultipartEntityBuilder builder = MultipartEntityBuilder.create(); builder.setCharset(MIME.UTF8_CHARSET); builder.addTextBody("_charset_", "UTF-8", ContentType.create("text/plain", MIME.UTF8_CHARSET)); for (int i = 1; i < record.size() - 1; i = i + 2) { if (record.get(i) != null && record.get(i + 1) != null && record.get(i).length() > 0 && record.get(i + 1).length() > 0) { String name = record.get(i).trim(); String value = record.get(i + 1).trim(); builder.addTextBody(name, value, ContentType.create("text/plain", MIME.UTF8_CHARSET)); } } // Tag creation doPost(hostname, port, "/bin/tagcommand", "admin", adminPassword, builder.build(), null); continue; } // Let's see if we need to create a new Community site template, and if we can do it (script run against author instance) if (record.get(0).equals(SITETEMPLATE)) { // Building the form entity to be posted MultipartEntityBuilder builder = MultipartEntityBuilder.create(); builder.setCharset(MIME.UTF8_CHARSET); builder.addTextBody(":operation", "social:createSiteTemplate", ContentType.create("text/plain", MIME.UTF8_CHARSET)); builder.addTextBody("_charset_", "UTF-8", ContentType.create("text/plain", MIME.UTF8_CHARSET)); for (int i = 2; i < record.size() - 1; i = i + 2) { if (record.get(i) != null && record.get(i + 1) != null && record.get(i).length() > 0) { String name = record.get(i).trim(); String value = record.get(i + 1).trim(); builder.addTextBody(name, value, ContentType.create("text/plain", MIME.UTF8_CHARSET)); } } // Site template creation doPost(hostname, port, "/content.social.json", "admin", adminPassword, builder.build(), null); continue; } // Let's see if we need to create a new Community group if (record.get(0).equals(GROUP)) { // Building the form entity to be posted MultipartEntityBuilder builder = MultipartEntityBuilder.create(); builder.setCharset(MIME.UTF8_CHARSET); builder.addTextBody(":operation", "social:createCommunityGroup", ContentType.create("text/plain", MIME.UTF8_CHARSET)); builder.addTextBody("_charset_", "UTF-8", ContentType.create("text/plain", MIME.UTF8_CHARSET)); for (int i = 3; i < record.size() - 1; i = i + 2) { if (record.get(i) != null && record.get(i + 1) != null && record.get(i).length() > 0) { String name = record.get(i).trim(); String value = record.get(i + 1).trim(); if (value.equals("TRUE")) { value = "true"; } if (value.equals("FALSE")) { value = "false"; } if (name.equals(IMAGE)) { File attachment = new File( csvfile.substring(0, csvfile.indexOf(".csv")) + File.separator + value); builder.addBinaryBody(IMAGE, attachment, ContentType.MULTIPART_FORM_DATA, attachment.getName()); } else { builder.addTextBody(name, value, ContentType.create("text/plain", MIME.UTF8_CHARSET)); } } } // Group creation String memberGroupId = doPost(hostname, port, record.get(1), getUserName(record.get(2)), getPassword(record.get(2), adminPassword), builder.build(), "response/memberGroupId"); // Wait for group to be available on Publish, if available logger.debug("Waiting for completion of Community Group creation"); doWait(hostname, port, "admin", adminPassword, memberGroupId); continue; } // Let's see if it's simple Sling Delete request if (record.get(0).equals(SLINGDELETE)) { doDelete(hostname, port, record.get(1), "admin", adminPassword); continue; } // Let's see if we need to add users to an AEM Group if ((record.get(0).equals(GROUPMEMBERS) || record.get(0).equals(SITEMEMBERS)) && record.get(GROUP_INDEX_NAME) != null) { // Checking if we have a member group for this site String groupName = record.get(GROUP_INDEX_NAME); if (record.get(0).equals(SITEMEMBERS)) { // Let's fetch the siteId for this Community Site Url String siteConfig = doGet(hostname, port, groupName, "admin", adminPassword, null); try { String siteId = new JSONObject(siteConfig).getString("siteId"); if (siteId != null) groupName = "community-" + siteId + "-members"; logger.debug("Member group name is " + groupName); } catch (Exception e) { logger.error(e.getMessage()); } } // Pause until the group can found doWait(hostname, port, "admin", adminPassword, groupName); List<NameValuePair> nameValuePairs = new ArrayList<NameValuePair>(); nameValuePairs.add(new BasicNameValuePair("filter", "[{\"operation\":\"like\",\"rep:principalName\":\"" + groupName + "\"}]")); nameValuePairs.add(new BasicNameValuePair("type", "groups")); String groupList = doGet(hostname, port, "/libs/social/console/content/content/userlist.social.0.10.json", "admin", adminPassword, nameValuePairs); logger.debug("List of groups" + groupList); if (groupList.indexOf(groupName) > 0) { logger.debug("Group was found on " + port); try { JSONArray jsonArray = new JSONObject(groupList).getJSONArray("items"); if (jsonArray.length() == 1) { JSONObject jsonObject = jsonArray.getJSONObject(0); String groupPath = jsonObject.getString("path"); logger.debug("Group path is " + groupPath); // Constructing a multi-part POST for group membership MultipartEntityBuilder builder = MultipartEntityBuilder.create(); builder.setCharset(MIME.UTF8_CHARSET); builder.setMode(HttpMultipartMode.BROWSER_COMPATIBLE); List<NameValuePair> groupNameValuePairs = buildNVP(record, 2); for (NameValuePair nameValuePair : groupNameValuePairs) { builder.addTextBody(nameValuePair.getName(), nameValuePair.getValue(), ContentType.create("text/plain", MIME.UTF8_CHARSET)); } // Adding the list of group members doPost(hostname, port, groupPath + ".rw.userprops.html", "admin", adminPassword, builder.build(), null); } else { logger.info("We have more than one match for a group with this name!"); } } catch (Exception e) { logger.error(e.getMessage()); } } continue; } // Let's see if it's user related if (record.get(0).equals(USERS)) { //First we need to get the path to the user node String json = doGet(hostname, port, "/libs/granite/security/currentuser.json", getUserName(record.get(1)), getPassword(record.get(1), adminPassword), null); if (json != null) { try { // Fetching the home property String home = new JSONObject(json).getString("home"); if (record.get(2).equals(PREFERENCES)) { home = home + "/preferences"; } else { home = home + "/profile"; } logger.debug(home); // Now we can post all the preferences or the profile List<NameValuePair> nameValuePairs = buildNVP(record, 3); doPost(hostname, port, home, "admin", adminPassword, new UrlEncodedFormEntity(nameValuePairs), null); } catch (Exception e) { logger.error(e.getMessage()); } } continue; } // Let's see if we deal with a new block of content or just a new entry if (record.get(0).equals(CALENDAR) || record.get(0).equals(SLINGPOST) || record.get(0).equals(RATINGS) || record.get(0).equals(BLOG) || record.get(0).equals(JOURNAL) || record.get(0).equals(COMMENTS) || record.get(0).equals(REVIEWS) || record.get(0).equals(FILES) || record.get(0).equals(SUMMARY) || record.get(0).equals(ACTIVITIES) || record.get(0).equals(JOIN) || record.get(0).equals(FOLLOW) || record.get(0).equals(MESSAGE) || record.get(0).equals(ASSET) || record.get(0).equals(AVATAR) || record.get(0).equals(RESOURCE) || record.get(0).equals(LEARNING) || record.get(0).equals(QNA) || record.get(0).equals(FORUM)) { // New block of content, we need to reset the processing to first Level componentType = record.get(0); url[0] = record.get(1); urlLevel = 0; if (!componentType.equals(SLINGPOST) && reset) { int pos = record.get(1).indexOf("/jcr:content"); if (pos > 0) doDelete(hostname, port, "/content/usergenerated" + record.get(1).substring(0, pos), "admin", adminPassword); } // If the Configure command line flag is set, we try to configure the component with all options enabled if (componentType.equals(SLINGPOST) || configure) { String configurePath = getConfigurePath(record.get(1)); List<NameValuePair> nameValuePairs = buildNVP(record, 2); if (nameValuePairs.size() > 2) // Only do this when really have configuration settings doPost(hostname, port, configurePath, "admin", adminPassword, new UrlEncodedFormEntity(nameValuePairs), null); } // We're done with this line, moving on to the next line in the CSV file continue; } // Let's see if we need to indent the list, if it's a reply or a reply to a reply if (record.get(1).length() != 1) continue; // We need a valid level indicator if (Integer.parseInt(record.get(1)) > urlLevel) { url[++urlLevel] = location; logger.debug("Incremented urlLevel to: " + urlLevel + ", with a new location:" + location); } else if (Integer.parseInt(record.get(1)) < urlLevel) { urlLevel = Integer.parseInt(record.get(1)); logger.debug("Decremented urlLevel to: " + urlLevel); } // Get the credentials or fall back to password String password = getPassword(record.get(0), adminPassword); String userName = getUserName(record.get(0)); // Adding the generic properties for all POST requests List<NameValuePair> nameValuePairs = new ArrayList<NameValuePair>(); if (!componentType.equals(RESOURCE)) nameValuePairs.add(new BasicNameValuePair("id", "nobot")); nameValuePairs.add(new BasicNameValuePair("_charset_", "UTF-8")); // Setting some specific fields depending on the content type if (componentType.equals(COMMENTS)) { nameValuePairs.add(new BasicNameValuePair(":operation", "social:createComment")); nameValuePairs.add(new BasicNameValuePair("message", record.get(2))); } // Creates a forum post (or reply) if (componentType.equals(FORUM)) { nameValuePairs.add(new BasicNameValuePair(":operation", "social:createForumPost")); nameValuePairs.add(new BasicNameValuePair("subject", record.get(2))); nameValuePairs.add(new BasicNameValuePair("message", record.get(3))); } // Follows a user (followedId) for the user posting the request if (componentType.equals(FOLLOW)) { nameValuePairs.add(new BasicNameValuePair(":operation", "social:follow")); nameValuePairs.add(new BasicNameValuePair("userId", "/social/authors/" + userName)); nameValuePairs.add(new BasicNameValuePair("followedId", "/social/authors/" + record.get(2))); } // Uploading Avatar picture if (componentType.equals(AVATAR)) { nameValuePairs.add(new BasicNameValuePair(":operation", "social:changeAvatar")); } // Joins a user (posting the request) to a Community Group (path) if (componentType.equals(JOIN)) { nameValuePairs.add(new BasicNameValuePair(":operation", "social:joinCommunityGroup")); int pos = url[0].indexOf("/configuration.social.json"); if (pos > 0) nameValuePairs.add(new BasicNameValuePair("path", url[0].substring(0, pos) + ".html")); else continue; // Invalid record } // Creates a new private message if (componentType.equals(MESSAGE)) { nameValuePairs.add(new BasicNameValuePair(":operation", "social:createMessage")); nameValuePairs.add(new BasicNameValuePair("sendMail", "Sending...")); nameValuePairs.add(new BasicNameValuePair("content", record.get(4))); nameValuePairs.add(new BasicNameValuePair("subject", record.get(3))); nameValuePairs.add(new BasicNameValuePair("serviceSelector", "/bin/community")); nameValuePairs.add(new BasicNameValuePair("to", "/social/authors/" + record.get(2))); nameValuePairs.add(new BasicNameValuePair("userId", "/social/authors/" + record.get(2))); nameValuePairs.add(new BasicNameValuePair(":redirect", "//messaging.html")); nameValuePairs.add(new BasicNameValuePair(":formid", "generic_form")); nameValuePairs.add(new BasicNameValuePair(":formstart", "/content/sites/communities/messaging/compose/jcr:content/content/primary/start")); } // Creates a file or a folder if (componentType.equals(FILES)) { // Top level is always assumed to be a folder, second level files, and third and subsequent levels comments on files if (urlLevel == 0) { nameValuePairs.add(new BasicNameValuePair(":operation", "social:createFileLibraryFolder")); nameValuePairs.add(new BasicNameValuePair("name", record.get(2))); nameValuePairs.add(new BasicNameValuePair("message", record.get(3))); } else if (urlLevel == 1) { nameValuePairs.add(new BasicNameValuePair(":operation", "social:createComment")); } } // Creates a question, a reply or mark a reply as the best answer if (componentType.equals(QNA)) { if (urlLevel == 0) { nameValuePairs.add(new BasicNameValuePair(":operation", "social:createQnaPost")); nameValuePairs.add(new BasicNameValuePair("subject", record.get(2))); nameValuePairs.add(new BasicNameValuePair("message", record.get(3))); } else if (urlLevel == 1) { nameValuePairs.add(new BasicNameValuePair(":operation", "social:createQnaPost")); nameValuePairs.add(new BasicNameValuePair("message", record.get(3))); } else if (urlLevel == 2) { nameValuePairs.add(new BasicNameValuePair(":operation", "social:selectAnswer")); } } // Creates an article or a comment if (componentType.equals(JOURNAL) || componentType.equals(BLOG)) { nameValuePairs.add(new BasicNameValuePair(":operation", "social:createJournalComment")); nameValuePairs.add(new BasicNameValuePair("subject", record.get(2))); StringBuffer message = new StringBuffer("<p>" + record.get(3) + "</p>"); //We might have more paragraphs to add to the blog or journal article for (int i = 6; i < record.size(); i++) { if (record.get(i).length() > 0) { message.append("<p>" + record.get(i) + "</p>"); } } //We might have some tags to add to the blog or journal article if (record.get(5).length() > 0) { nameValuePairs.add(new BasicNameValuePair("tags", record.get(5))); } nameValuePairs.add(new BasicNameValuePair("message", message.toString())); } // Creates a review or a comment if (componentType.equals(REVIEWS)) { nameValuePairs.add(new BasicNameValuePair("message", record.get(2))); // This might be a top level review, or a comment on a review or another comment if (urlLevel == 0) { nameValuePairs.add(new BasicNameValuePair(":operation", "social:createReview")); nameValuePairs.add(new BasicNameValuePair("ratings", record.get(3))); if (record.size() > 4 && record.get(4).length() > 0) { // If we are dealing with a non-existent resource, then the design drives the behavior nameValuePairs.add(new BasicNameValuePair("scf:resourceType", "social/reviews/components/hbs/reviews")); nameValuePairs.add(new BasicNameValuePair("scf:included", record.get(4))); } } else { nameValuePairs.add(new BasicNameValuePair(":operation", "social:createComment")); } } // Creates a rating if (componentType.equals(RATINGS)) { nameValuePairs.add(new BasicNameValuePair(":operation", "social:postTallyResponse")); nameValuePairs.add(new BasicNameValuePair("tallyType", "Rating")); nameValuePairs.add(new BasicNameValuePair("response", record.get(2))); } // Creates a DAM asset if (componentType.equals(ASSET) && record.get(ASSET_INDEX_NAME).length() > 0) { nameValuePairs.add(new BasicNameValuePair("fileName", record.get(ASSET_INDEX_NAME))); } // Creates an enablement resource if (componentType.equals(RESOURCE)) { nameValuePairs.add(new BasicNameValuePair(":operation", "se:createResource")); List<NameValuePair> otherNameValuePairs = buildNVP(record, RESOURCE_INDEX_PROPERTIES); nameValuePairs.addAll(otherNameValuePairs); // Adding the site nameValuePairs.add(new BasicNameValuePair("site", "/content/sites/" + record.get(RESOURCE_INDEX_SITE) + "/resources/en")); // Building the cover image fragment if (record.get(RESOURCE_INDEX_THUMBNAIL).length() > 0) { nameValuePairs.add(new BasicNameValuePair("cover-image", doThumbnail(hostname, port, adminPassword, csvfile, record.get(RESOURCE_INDEX_THUMBNAIL)))); } else { nameValuePairs.add(new BasicNameValuePair("cover-image", "")); } // Building the asset fragment String coverPath = "/content/dam/" + record.get(RESOURCE_INDEX_SITE) + "/resource-assets/" + record.get(2) + "/jcr:content/renditions/cq5dam.thumbnail.319.319.png"; String coverSource = "dam"; String assets = "[{\"cover-img-path\":\"" + coverPath + "\",\"thumbnail-source\":\"" + coverSource + "\",\"asset-category\":\"enablementAsset:dam\",\"resource-asset-name\":null,\"state\":\"A\",\"asset-path\":\"/content/dam/" + record.get(RESOURCE_INDEX_SITE) + "/resource-assets/" + record.get(2) + "\"}]"; nameValuePairs.add(new BasicNameValuePair("assets", assets)); logger.debug("assets:" + assets); } // Creates a learning path if (componentType.equals(LEARNING)) { nameValuePairs.add(new BasicNameValuePair(":operation", "se:editLearningPath")); List<NameValuePair> otherNameValuePairs = buildNVP(record, RESOURCE_INDEX_PROPERTIES); nameValuePairs.addAll(otherNameValuePairs); // Adding the site nameValuePairs.add(new BasicNameValuePair("site", "/content/sites/" + record.get(RESOURCE_INDEX_SITE) + "/resources/en")); // Building the cover image fragment if (record.get(RESOURCE_INDEX_THUMBNAIL).length() > 0) { nameValuePairs.add(new BasicNameValuePair("card-image", doThumbnail(hostname, port, adminPassword, csvfile, record.get(RESOURCE_INDEX_THUMBNAIL)))); } // Building the learning path fragment StringBuffer assets = new StringBuffer("[\""); if (learningpaths.get(record.get(2)) != null) { ArrayList<String> paths = learningpaths.get(record.get(2)); int i = 0; for (String path : paths) { assets.append("{\\\"type\\\":\\\"linked-resource\\\",\\\"path\\\":\\\""); assets.append(path); assets.append("\\\"}"); if (i++ < paths.size() - 1) { assets.append("\",\""); } } } else { logger.debug("No asset for this learning path"); } assets.append("\"]"); nameValuePairs.add(new BasicNameValuePair("learningpath-items", assets.toString())); logger.debug("Learning path:" + assets.toString()); } // Creates a calendar event if (componentType.equals(CALENDAR)) { nameValuePairs.add(new BasicNameValuePair(":operation", "social:createEvent")); try { JSONObject event = new JSONObject(); // Building the JSON fragment for a new calendar event event.accumulate("subject", record.get(2)); event.accumulate("message", record.get(3)); event.accumulate("location", record.get(4)); event.accumulate("tags", ""); event.accumulate("undefined", "update"); String startDate = record.get(5); startDate = startDate.replaceAll("YYYY", Integer.toString(Calendar.getInstance().get(Calendar.YEAR))); startDate = startDate.replaceAll("MM", Integer.toString(1 + Calendar.getInstance().get(Calendar.MONTH))); event.accumulate("start", startDate); String endDate = record.get(6); endDate = endDate.replaceAll("YYYY", Integer.toString(Calendar.getInstance().get(Calendar.YEAR))); endDate = endDate.replaceAll("MM", Integer.toString(1 + Calendar.getInstance().get(Calendar.MONTH))); event.accumulate("end", endDate); nameValuePairs.add(new BasicNameValuePair("event", event.toString())); } catch (Exception ex) { logger.error(ex.getMessage()); } } // Constructing a multi-part POST request MultipartEntityBuilder builder = MultipartEntityBuilder.create(); builder.setCharset(MIME.UTF8_CHARSET); builder.setMode(HttpMultipartMode.BROWSER_COMPATIBLE); for (NameValuePair nameValuePair : nameValuePairs) { builder.addTextBody(nameValuePair.getName(), nameValuePair.getValue(), ContentType.create("text/plain", MIME.UTF8_CHARSET)); } // See if we have attachments for this new post - or some other actions require a form nonetheless if ((componentType.equals(ASSET) || componentType.equals(AVATAR) || componentType.equals(FORUM) || (componentType.equals(JOURNAL)) || componentType.equals(BLOG)) && record.size() > 4 && record.get(ASSET_INDEX_NAME).length() > 0) { File attachment = new File(csvfile.substring(0, csvfile.indexOf(".csv")) + File.separator + record.get(ASSET_INDEX_NAME)); ContentType ct = ContentType.MULTIPART_FORM_DATA; if (record.get(ASSET_INDEX_NAME).indexOf(".mp4") > 0) { ct = ContentType.create("video/mp4", MIME.UTF8_CHARSET); } else if (record.get(ASSET_INDEX_NAME).indexOf(".jpg") > 0 || record.get(ASSET_INDEX_NAME).indexOf(".jpeg") > 0) { ct = ContentType.create("image/jpeg", MIME.UTF8_CHARSET); } else if (record.get(ASSET_INDEX_NAME).indexOf(".png") > 0) { ct = ContentType.create("image/png", MIME.UTF8_CHARSET); } else if (record.get(ASSET_INDEX_NAME).indexOf(".pdf") > 0) { ct = ContentType.create("application/pdf", MIME.UTF8_CHARSET); } else if (record.get(ASSET_INDEX_NAME).indexOf(".zip") > 0) { ct = ContentType.create("application/zip", MIME.UTF8_CHARSET); } builder.addBinaryBody("file", attachment, ct, attachment.getName()); logger.debug("Adding file to payload with name: " + attachment.getName() + " and type: " + ct.getMimeType()); } // If it's a resource or a learning path, we need the path to the resource for subsequent publishing String jsonElement = "location"; if (componentType.equals(RESOURCE)) { jsonElement = "changes/argument"; } if (componentType.equals(LEARNING)) { jsonElement = "path"; } if (componentType.equals(ASSET)) { jsonElement = null; } // This call generally returns the path to the content fragment that was just created location = Loader.doPost(hostname, port, url[urlLevel], userName, password, builder.build(), jsonElement); // If we are loading a DAM asset, we are waiting for all renditions to be generated before proceeding if (componentType.equals(ASSET)) { int pathIndex = url[urlLevel].lastIndexOf(".createasset.html"); if (pathIndex > 0) doWaitPath(hostname, port, adminPassword, url[urlLevel].substring(0, pathIndex) + "/" + record.get(ASSET_INDEX_NAME) + "/jcr:content/renditions", "nt:file"); } // Let's see if it needs to be added to a learning path if (componentType.equals(RESOURCE) && record.get(RESOURCE_INDEX_PATH).length() > 0 && location != null) { // Adding the location to a list of a resources for this particular Learning Path if (learningpaths.get(record.get(RESOURCE_INDEX_PATH)) == null) learningpaths.put(record.get(RESOURCE_INDEX_PATH), new ArrayList<String>()); logger.debug("Adding resource to Learning path: " + record.get(RESOURCE_INDEX_PATH)); ArrayList<String> locations = learningpaths.get(record.get(RESOURCE_INDEX_PATH)); locations.add(location); learningpaths.put(record.get(RESOURCE_INDEX_PATH), locations); } // If it's a Learning Path, we publish it when possible if (componentType.equals(LEARNING) && !port.equals(altport) && location != null) { // Publishing the learning path List<NameValuePair> publishNameValuePairs = new ArrayList<NameValuePair>(); publishNameValuePairs.add(new BasicNameValuePair(":operation", "se:publishEnablementContent")); publishNameValuePairs.add(new BasicNameValuePair("replication-action", "activate")); logger.debug("Publishing a learning path from: " + location); Loader.doPost(hostname, port, location, userName, password, new UrlEncodedFormEntity(publishNameValuePairs), null); // Waiting for the learning path to be published Loader.doWait(hostname, altport, "admin", adminPassword, location.substring(1 + location.lastIndexOf("/")) // Only search for groups with the learning path in it ); // Decorate the resources within the learning path with comments and ratings, randomly generated ArrayList<String> paths = learningpaths.get(record.get(2)); for (String path : paths) { doDecorate(hostname, altport, path, record, analytics); } } // If it's an Enablement Resource, a lot of things need to happen... // Step 1. If it's a SCORM resource, we wait for the SCORM metadata workflow to be complete before proceeding // Step 2. We publish the resource // Step 3. We set a new first published date on the resource (3 weeks earlier) so that reporting data is more meaningful // Step 4. We wait for the resource to be available on publish (checking that associated groups are available) // Step 5. We retrieve the json for the resource on publish to retrieve the Social endpoints // Step 6. We post ratings and comments for each of the enrollees on publish if (componentType.equals(RESOURCE) && !port.equals(altport) && location != null) { // Wait for the data to be fully copied doWaitPath(hostname, port, adminPassword, location + "/assets/asset", "nt:file"); // If we are dealing with a SCORM asset, we wait a little bit before publishing the resource to that the SCORM workflow is completed if (record.get(2).indexOf(".zip") > 0) { doSleep(10000, "SCORM Resource, waiting for workflow to complete"); } // Publishing the resource List<NameValuePair> publishNameValuePairs = new ArrayList<NameValuePair>(); publishNameValuePairs.add(new BasicNameValuePair(":operation", "se:publishEnablementContent")); publishNameValuePairs.add(new BasicNameValuePair("replication-action", "activate")); logger.debug("Publishing a resource from: " + location); Loader.doPost(hostname, port, location, userName, password, new UrlEncodedFormEntity(publishNameValuePairs), null); // Waiting for the resource to be published Loader.doWait(hostname, altport, "admin", adminPassword, location.substring(1 + location.lastIndexOf("/")) // Only search for groups with the resource path in it ); // Setting the first published timestamp so that reporting always comes with 3 weeks of data after building a new demo instance DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss"); Calendar cal = Calendar.getInstance(); cal.add(Calendar.DATE, REPORTINGDAYS); List<NameValuePair> publishDateNameValuePairs = new ArrayList<NameValuePair>(); publishDateNameValuePairs .add(new BasicNameValuePair("date-first-published", dateFormat.format(cal.getTime()))); logger.debug("Setting the publish date for a resource from: " + location); doPost(hostname, port, location, userName, password, new UrlEncodedFormEntity(publishDateNameValuePairs), null); // Adding comments and ratings for this resource doDecorate(hostname, altport, location, record, analytics); } } } catch (IOException e) { logger.error(e.getMessage()); } }