List of usage examples for java.lang StringBuilder toString
@Override
@HotSpotIntrinsicCandidate
public String toString()
From source file:com.era7.bioinfo.annotation.AutomaticQualityControl.java
public static void main(String[] args) { if (args.length != 4) { System.out.println("This program expects four parameters: \n" + "1. Gene annotation XML filename \n" + "2. Reference protein set (.fasta)\n" + "3. Output TXT filename\n" + "4. Initial Blast XML results filename (the one used at the very beginning of the semiautomatic annotation process)\n"); } else {//from w ww .j a v a 2s. com BufferedWriter outBuff = null; try { File inFile = new File(args[0]); File fastaFile = new File(args[1]); File outFile = new File(args[2]); File blastFile = new File(args[3]); //Primero cargo todos los datos del archivo xml del blast BufferedReader buffReader = new BufferedReader(new FileReader(blastFile)); StringBuilder stBuilder = new StringBuilder(); String line = null; while ((line = buffReader.readLine()) != null) { stBuilder.append(line); } buffReader.close(); System.out.println("Creating blastoutput..."); BlastOutput blastOutput = new BlastOutput(stBuilder.toString()); System.out.println("BlastOutput created! :)"); stBuilder.delete(0, stBuilder.length()); HashMap<String, String> blastProteinsMap = new HashMap<String, String>(); ArrayList<Iteration> iterations = blastOutput.getBlastOutputIterations(); for (Iteration iteration : iterations) { blastProteinsMap.put(iteration.getQueryDef().split("\\|")[1].trim(), iteration.toString()); } //freeing some memory blastOutput = null; //------------------------------------------------------------------------ //Initializing writer for output file outBuff = new BufferedWriter(new FileWriter(outFile)); //reading gene annotation xml file..... buffReader = new BufferedReader(new FileReader(inFile)); stBuilder = new StringBuilder(); line = null; while ((line = buffReader.readLine()) != null) { stBuilder.append(line); } buffReader.close(); XMLElement genesXML = new XMLElement(stBuilder.toString()); //freeing some memory I don't need anymore stBuilder.delete(0, stBuilder.length()); //reading file with the reference proteins set ArrayList<String> proteinsReferenceSet = new ArrayList<String>(); buffReader = new BufferedReader(new FileReader(fastaFile)); while ((line = buffReader.readLine()) != null) { if (line.charAt(0) == '>') { proteinsReferenceSet.add(line.split("\\|")[1]); } } buffReader.close(); Element pGenes = genesXML.asJDomElement().getChild(PredictedGenes.TAG_NAME); List<Element> contigs = pGenes.getChildren(ContigXML.TAG_NAME); System.out.println("There are " + contigs.size() + " contigs to be checked... "); outBuff.write("There are " + contigs.size() + " contigs to be checked... \n"); outBuff.write("Proteins reference set: \n"); for (String st : proteinsReferenceSet) { outBuff.write(st + ","); } outBuff.write("\n"); for (Element elem : contigs) { ContigXML contig = new ContigXML(elem); //escribo el id del contig en el que estoy outBuff.write("Checking contig: " + contig.getId() + "\n"); outBuff.flush(); List<XMLElement> geneList = contig.getChildrenWith(PredictedGene.TAG_NAME); System.out.println("geneList.size() = " + geneList.size()); int numeroDeGenesParaAnalizar = geneList.size() / FACTOR; if (numeroDeGenesParaAnalizar == 0) { numeroDeGenesParaAnalizar++; } ArrayList<Integer> indicesUtilizados = new ArrayList<Integer>(); outBuff.write("\nThe contig has " + geneList.size() + " predicted genes, let's analyze: " + numeroDeGenesParaAnalizar + "\n"); for (int j = 0; j < numeroDeGenesParaAnalizar; j++) { int geneIndex; boolean geneIsDismissed = false; do { geneIsDismissed = false; geneIndex = (int) Math.round(Math.floor(Math.random() * geneList.size())); PredictedGene tempGene = new PredictedGene(geneList.get(geneIndex).asJDomElement()); if (tempGene.getStatus().equals(PredictedGene.STATUS_DISMISSED)) { geneIsDismissed = true; } } while (indicesUtilizados.contains(new Integer(geneIndex)) && geneIsDismissed); indicesUtilizados.add(geneIndex); System.out.println("geneIndex = " + geneIndex); //Ahora hay que sacar el gen correspondiente al indice y hacer el control de calidad PredictedGene gene = new PredictedGene(geneList.get(geneIndex).asJDomElement()); outBuff.write("\nAnalyzing gene with id: " + gene.getId() + " , annotation uniprot id: " + gene.getAnnotationUniprotId() + "\n"); outBuff.write("eValue: " + gene.getEvalue() + "\n"); //--------------PETICION POST HTTP BLAST---------------------- PostMethod post = new PostMethod(BLAST_URL); post.addParameter("program", "blastx"); post.addParameter("sequence", gene.getSequence()); post.addParameter("database", "uniprotkb"); post.addParameter("email", "ppareja@era7.com"); post.addParameter("exp", "1e-10"); post.addParameter("stype", "dna"); // execute the POST HttpClient client = new HttpClient(); int status = client.executeMethod(post); System.out.println("status post = " + status); InputStream inStream = post.getResponseBodyAsStream(); String fileName = "jobid.txt"; FileOutputStream outStream = new FileOutputStream(new File(fileName)); byte[] buffer = new byte[1024]; int len; while ((len = inStream.read(buffer)) != -1) { outStream.write(buffer, 0, len); } outStream.close(); //Once the file is created I just have to read one line in order to extract the job id buffReader = new BufferedReader(new FileReader(new File(fileName))); String jobId = buffReader.readLine(); buffReader.close(); System.out.println("jobId = " + jobId); //--------------HTTP CHECK JOB STATUS REQUEST---------------------- GetMethod get = new GetMethod(CHECK_JOB_STATUS_URL + jobId); String jobStatus = ""; do { try { Thread.sleep(1000);//sleep for 1000 ms } catch (InterruptedException ie) { //If this thread was intrrupted by nother thread } status = client.executeMethod(get); //System.out.println("status get = " + status); inStream = get.getResponseBodyAsStream(); fileName = "jobStatus.txt"; outStream = new FileOutputStream(new File(fileName)); while ((len = inStream.read(buffer)) != -1) { outStream.write(buffer, 0, len); } outStream.close(); //Once the file is created I just have to read one line in order to extract the job id buffReader = new BufferedReader(new FileReader(new File(fileName))); jobStatus = buffReader.readLine(); //System.out.println("jobStatus = " + jobStatus); buffReader.close(); } while (!jobStatus.equals(FINISHED_JOB_STATUS)); //Once I'm here the blast should've already finished //--------------JOB RESULTS HTTP REQUEST---------------------- get = new GetMethod(JOB_RESULT_URL + jobId + "/out"); status = client.executeMethod(get); System.out.println("status get = " + status); inStream = get.getResponseBodyAsStream(); fileName = "jobResults.txt"; outStream = new FileOutputStream(new File(fileName)); while ((len = inStream.read(buffer)) != -1) { outStream.write(buffer, 0, len); } outStream.close(); //--------parsing the blast results file----- TreeSet<GeneEValuePair> featuresBlast = new TreeSet<GeneEValuePair>(); buffReader = new BufferedReader(new FileReader(new File(fileName))); while ((line = buffReader.readLine()) != null) { if (line.length() > 3) { String prefix = line.substring(0, 3); if (prefix.equals("TR:") || prefix.equals("SP:")) { String[] columns = line.split(" "); String id = columns[1]; //System.out.println("id = " + id); String e = ""; String[] arraySt = line.split("\\.\\.\\."); if (arraySt.length > 1) { arraySt = arraySt[1].trim().split(" "); int contador = 0; for (int k = 0; k < arraySt.length && contador <= 2; k++) { String string = arraySt[k]; if (!string.equals("")) { contador++; if (contador == 2) { e = string; } } } } else { //Number before e- String[] arr = arraySt[0].split("e-")[0].split(" "); String numeroAntesE = arr[arr.length - 1]; String numeroDespuesE = arraySt[0].split("e-")[1].split(" ")[0]; e = numeroAntesE + "e-" + numeroDespuesE; } double eValue = Double.parseDouble(e); //System.out.println("eValue = " + eValue); GeneEValuePair g = new GeneEValuePair(id, eValue); featuresBlast.add(g); } } } GeneEValuePair currentGeneEValuePair = new GeneEValuePair(gene.getAnnotationUniprotId(), gene.getEvalue()); System.out.println("currentGeneEValuePair.id = " + currentGeneEValuePair.id); System.out.println("currentGeneEValuePair.eValue = " + currentGeneEValuePair.eValue); boolean blastContainsGene = false; for (GeneEValuePair geneEValuePair : featuresBlast) { if (geneEValuePair.id.equals(currentGeneEValuePair.id)) { blastContainsGene = true; //le pongo la e que tiene en el wu-blast para poder comparar currentGeneEValuePair.eValue = geneEValuePair.eValue; break; } } if (blastContainsGene) { outBuff.write("The protein was found in the WU-BLAST result.. \n"); //Una vez que se que esta en el blast tengo que ver que sea la mejor GeneEValuePair first = featuresBlast.first(); outBuff.write("Protein with best eValue according to the WU-BLAST result: " + first.id + " , " + first.eValue + "\n"); if (first.id.equals(currentGeneEValuePair.id)) { outBuff.write("Proteins with best eValue match up \n"); } else { if (first.eValue == currentGeneEValuePair.eValue) { outBuff.write( "The one with best eValue is not the same protein but has the same eValue \n"); } else if (first.eValue > currentGeneEValuePair.eValue) { outBuff.write( "The one with best eValue is not the same protein but has a worse eValue :) \n"); } else { outBuff.write( "The best protein from BLAST has an eValue smaller than ours, checking if it's part of the reference set...\n"); //System.exit(-1); if (proteinsReferenceSet.contains(first.id)) { //The protein is in the reference set and that shouldn't happen outBuff.write( "The protein was found on the reference set, checking if it belongs to the same contig...\n"); String iterationSt = blastProteinsMap.get(gene.getAnnotationUniprotId()); if (iterationSt != null) { outBuff.write( "The protein was found in the BLAST used at the beginning of the annotation process.\n"); Iteration iteration = new Iteration(iterationSt); ArrayList<Hit> hits = iteration.getIterationHits(); boolean contigFound = false; Hit errorHit = null; for (Hit hit : hits) { if (hit.getHitDef().indexOf(contig.getId()) >= 0) { contigFound = true; errorHit = hit; break; } } if (contigFound) { outBuff.write( "ERROR: A hit from the same contig was find in the Blast file: \n" + errorHit.toString() + "\n"); } else { outBuff.write("There is no hit with the same contig! :)\n"); } } else { outBuff.write( "The protein is NOT in the BLAST used at the beginning of the annotation process.\n"); } } else { //The protein was not found on the reference set so everything's ok outBuff.write( "The protein was not found on the reference, everything's ok :)\n"); } } } } else { outBuff.write("The protein was NOT found on the WU-BLAST !! :( \n"); //System.exit(-1); } } } } catch (Exception ex) { ex.printStackTrace(); } finally { try { //closing outputfile outBuff.close(); } catch (IOException ex) { Logger.getLogger(AutomaticQualityControl.class.getName()).log(Level.SEVERE, null, ex); } } } }
From source file:com.twitter.hraven.rest.client.HRavenRestClient.java
public static void main(String[] args) throws IOException { String apiHostname = null;// w w w. jav a2 s .c om String cluster = null; String username = null; String batchDesc = null; String signature = null; int limit = 2; boolean useHBaseAPI = false; boolean dumpJson = false; boolean hydrateTasks = false; List<String> taskResponseFilters = new ArrayList<String>(); List<String> jobResponseFilters = new ArrayList<String>(); List<String> flowResponseFilters = new ArrayList<String>(); List<String> configFields = new ArrayList<String>(); StringBuffer usage = new StringBuffer("Usage: java "); usage.append(HRavenRestClient.class.getName()).append(" [-options]\n"); usage.append("Returns data from recent flows and their associated jobs\n"); usage.append("where options include: \n"); usage.append(" -a <API hostname> [required]\n"); usage.append(" -c <cluster> [required]\n"); usage.append(" -u <username> [required]\n"); usage.append(" -f <flowName> [required]\n"); usage.append(" -s <signature>\n"); usage.append(" -l <limit>\n"); usage.append(" -h - print this message and return\n"); usage.append(" -H - use HBase API, not the REST API\n"); usage.append(" -j - output json\n"); usage.append(" -t - retrieve task information as well"); usage.append(" -w - config field to be included in job response"); usage.append(" -z - field to be included in task response"); usage.append(" -y - field to be included in job response"); usage.append(" -x - field to be included in flow response"); for (int i = 0; i < args.length; i++) { if ("-a".equals(args[i])) { apiHostname = args[++i]; continue; } else if ("-c".equals(args[i])) { cluster = args[++i]; continue; } else if ("-u".equals(args[i])) { username = args[++i]; continue; } else if ("-f".equals(args[i])) { batchDesc = args[++i]; continue; } else if ("-s".equals(args[i])) { signature = args[++i]; continue; } else if ("-l".equals(args[i])) { limit = Integer.parseInt(args[++i]); continue; } else if ("-H".equals(args[i])) { useHBaseAPI = true; continue; } else if ("-j".equals(args[i])) { dumpJson = true; continue; } else if ("-t".equals(args[i])) { hydrateTasks = true; continue; } else if ("-z".equals(args[i])) { String taskFilters = args[++i]; taskResponseFilters = Arrays.asList(taskFilters.split(",")); continue; } else if ("-y".equals(args[i])) { String jobFilters = args[++i]; jobResponseFilters = Arrays.asList(jobFilters.split(",")); continue; } else if ("-x".equals(args[i])) { String flowFilters = args[++i]; flowResponseFilters = Arrays.asList(flowFilters.split(",")); continue; } else if ("-w".equals(args[i])) { String configFilters = args[++i]; configFields = Arrays.asList(configFilters.split(",")); continue; } else if ("-h".equals(args[i])) { System.err.println(usage.toString()); System.exit(1); } else { } } if (apiHostname == null || cluster == null || username == null || batchDesc == null) { System.err.println(usage.toString()); System.exit(1); } List<Flow> flows; if (useHBaseAPI) { JobHistoryService jobHistoryService = new JobHistoryService(HBaseConfiguration.create()); flows = jobHistoryService.getFlowSeries(cluster, username, batchDesc, signature, hydrateTasks, limit); } else { HRavenRestClient client = new HRavenRestClient(apiHostname, 100000, 100000); // use this call to call flows without configs flows = client.fetchFlows(cluster, username, batchDesc, signature, flowResponseFilters, jobResponseFilters, limit); // use this call to call flows with configs flows = client.fetchFlowsWithConfig(cluster, username, batchDesc, signature, limit, flowResponseFilters, jobResponseFilters, configFields); // use this call to call flows with config patterns flows = client.fetchFlowsWithConfig(cluster, username, batchDesc, signature, limit, flowResponseFilters, jobResponseFilters, configFields); if (hydrateTasks) { for (Flow flow : flows) { for (JobDetails jd : flow.getJobs()) { String jobId = jd.getJobId(); List<TaskDetails> td = client.fetchTaskDetails(cluster, jobId, taskResponseFilters); jd.addTasks(td); } } } } if (dumpJson) { ObjectMapper om = ObjectMapperProvider.createCustomMapper(); SimpleModule module = new SimpleModule("hRavenModule", new Version(0, 4, 0, null)); module.addSerializer(Flow.class, new FlowSerializer()); module.addSerializer(JobDetails.class, new JobDetailsSerializer()); om.registerModule(module); if (flows.size() > 0) { System.out.println(om.writeValueAsString(flows.get(0))); } return; } System.out.println("Found " + flows.size() + " flows"); StringBuilder sb = new StringBuilder(); sb.append("\t\t").append("jobId"); sb.append("\t\t").append("version"); sb.append("\t\t").append("status"); sb.append("\t").append("maps"); sb.append("\t").append("reduces"); sb.append("\t").append("rBytesRead"); sb.append("\t").append("feature"); sb.append("\t\t\t").append("alias"); System.out.println(sb.toString()); int i = 0; for (Flow flow : flows) { long minSubmitTime = -1, maxFinishTime = -1; for (JobDetails job : flow.getJobs()) { if (minSubmitTime == -1 && job.getSubmitTime() > 0) { minSubmitTime = job.getSubmitTime(); } minSubmitTime = Math.min(minSubmitTime, job.getSubmitTime()); maxFinishTime = Math.max(maxFinishTime, job.getFinishTime()); } if (minSubmitTime > 0 && maxFinishTime > 0) { System.out.println(String.format("Flow #%d: %s - %s", i++, DATE_FORMAT.format(minSubmitTime), DATE_FORMAT.format(maxFinishTime))); } else { System.out.println(String.format("Flow #%d:", i++)); } for (JobDetails job : flow.getJobs()) { sb = new StringBuilder(); sb.append(" - ").append(job.getJobId()); sb.append("\t").append(job.getVersion()); sb.append("\t").append(job.getStatus()); sb.append("\t").append(job.getTotalMaps()); sb.append("\t").append(job.getTotalReduces()); long reduceBytesRead = job.getReduceCounters().getCounter("FileSystemCounters", "FILE_BYTES_READ") != null ? job.getReduceCounters().getCounter("FileSystemCounters", "FILE_BYTES_READ") .getValue() : -1; sb.append("\t").append(reduceBytesRead); sb.append("\t").append(job.getConfiguration().get("pig.job.feature")); sb.append("\t").append(job.getConfiguration().get("pig.alias")); System.out.println(sb.toString()); } } }
From source file:fi.iki.elonen.SimpleWebServer.java
/** * Starts as a standalone file server and waits for Enter. *//* ww w . j a v a 2 s . c o m*/ public static void main(String[] args) { // Defaults int port = 8080; String host = null; // bind to all interfaces by default List<File> rootDirs = new ArrayList<File>(); boolean quiet = false; String cors = null; Map<String, String> options = new HashMap<String, String>(); // Parse command-line, with short and long versions of the options. for (int i = 0; i < args.length; ++i) { if ("-h".equalsIgnoreCase(args[i]) || "--host".equalsIgnoreCase(args[i])) { host = args[i + 1]; } else if ("-p".equalsIgnoreCase(args[i]) || "--port".equalsIgnoreCase(args[i])) { if (args[i + 1].equals("public")) { port = PUBLIC; } else if (args[i + 1].equals("private")) { port = PRIVATE; } else { port = Integer.parseInt(args[i + 1]); } } else if ("-q".equalsIgnoreCase(args[i]) || "--quiet".equalsIgnoreCase(args[i])) { quiet = true; } else if ("-d".equalsIgnoreCase(args[i]) || "--dir".equalsIgnoreCase(args[i])) { rootDirs.add(new File(args[i + 1]).getAbsoluteFile()); } else if (args[i].startsWith("--cors")) { cors = "*"; int equalIdx = args[i].indexOf('='); if (equalIdx > 0) { cors = args[i].substring(equalIdx + 1); } } else if ("--licence".equalsIgnoreCase(args[i])) { System.out.println(SimpleWebServer.LICENCE + "\n"); } else if (args[i].startsWith("-X:")) { int dot = args[i].indexOf('='); if (dot > 0) { String name = args[i].substring(0, dot); String value = args[i].substring(dot + 1, args[i].length()); options.put(name, value); } } } if (rootDirs.isEmpty()) { rootDirs.add(new File(".").getAbsoluteFile()); } options.put("host", host); options.put("port", "" + port); options.put("quiet", String.valueOf(quiet)); StringBuilder sb = new StringBuilder(); for (File dir : rootDirs) { if (sb.length() > 0) { sb.append(":"); } try { sb.append(dir.getCanonicalPath()); } catch (IOException ignored) { } } options.put("home", sb.toString()); ServiceLoader<WebServerPluginInfo> serviceLoader = ServiceLoader.load(WebServerPluginInfo.class); for (WebServerPluginInfo info : serviceLoader) { String[] mimeTypes = info.getMimeTypes(); for (String mime : mimeTypes) { String[] indexFiles = info.getIndexFilesForMimeType(mime); if (!quiet) { System.out.print("# Found plugin for Mime type: \"" + mime + "\""); if (indexFiles != null) { System.out.print(" (serving index files: "); for (String indexFile : indexFiles) { System.out.print(indexFile + " "); } } System.out.println(")."); } registerPluginForMimeType(indexFiles, mime, info.getWebServerPlugin(mime), options); } } ServerRunner.executeInstance(new SimpleWebServer(host, port, rootDirs, quiet, cors)); }
From source file:org.overlord.sramp.governance.shell.commands.Pkg2SrampCommand.java
/** * Main entry point - for use outside the interactive shell. * @param args/*from w w w.ja v a2 s . co m*/ * @throws Exception */ public static void main(String[] args) throws Exception { String brmsPackageName = "SRAMPPackage"; //$NON-NLS-1$ String tag = "LATEST"; //$NON-NLS-1$ String brmsBaseUrl = "http://localhost:8080/drools-guvnor"; //$NON-NLS-1$ String brmsUserId = "admin"; //$NON-NLS-1$ String brmsPassword = "admin"; //$NON-NLS-1$ if (args.length > 0) brmsPackageName = args[0]; if (args.length > 1) tag = args[1]; if (args.length > 2) brmsBaseUrl = args[2]; if (args.length > 3) brmsUserId = args[3]; if (args.length > 4) brmsPassword = args[4]; StringBuilder argLine = new StringBuilder(); argLine.append(brmsPackageName).append(" ").append(tag) //$NON-NLS-1$ .append(" ").append(brmsBaseUrl) //$NON-NLS-1$ .append(" ").append(brmsUserId) //$NON-NLS-1$ .append(" ").append(brmsPassword); //$NON-NLS-1$ SrampAtomApiClient client = new SrampAtomApiClient("http://localhost:8080/s-ramp-server"); //$NON-NLS-1$ QName clientVarName = new QName("s-ramp", "client"); //$NON-NLS-1$ //$NON-NLS-2$ Pkg2SrampCommand cmd = new Pkg2SrampCommand(); ShellContext context = new SimpleShellContext(); context.setVariable(clientVarName, client); cmd.setArguments(new Arguments(argLine.toString())); cmd.setContext(context); cmd.execute(); }
From source file:com.appeligo.responsetest.ServerResponseChecker.java
/** * @param args/*from w w w.java 2s .c o m*/ */ public static void main(String[] args) { PatternLayout pattern = new PatternLayout("%d{ISO8601} %-5p [%-c{1} - %t] - %m%n"); ConsoleAppender consoleAppender = new ConsoleAppender(pattern); LevelRangeFilter infoFilter = new LevelRangeFilter(); infoFilter.setLevelMin(Level.INFO); consoleAppender.addFilter(infoFilter); BasicConfigurator.configure(consoleAppender); String configFile = "/etc/flip.tv/responsetest.xml"; if (args.length > 0) { if (args.length == 2 && args[0].equals("-config")) { configFile = args[1]; } else { log.error("Usage: java " + ServerResponseChecker.class.getName() + " [-config <xmlfile>]"); System.exit(1); } } try { XMLConfiguration config = new XMLConfiguration(configFile); logFile = config.getString("logFile", logFile); servlet = config.getString("servlet", servlet); timeoutSeconds = config.getLong("timeoutSeconds", timeoutSeconds); responseTimeThresholdSeconds = config.getLong("responseTimeThresholdSeconds", responseTimeThresholdSeconds); reporter = config.getString("reporter", reporter); smtpServer = config.getString("smtpServer", smtpServer); smtpUsername = config.getString("smtpUsername", smtpUsername); smtpPassword = config.getString("smtpPassword", smtpPassword); smtpDebug = config.getBoolean("smtpDebug", smtpDebug); mailTo = config.getString("mailTo", mailTo); } catch (ConfigurationException e) { e.printStackTrace(); } marker = logFile + ".mailed"; try { BasicConfigurator.configure(new RollingFileAppender(pattern, logFile, true)); } catch (IOException e1) { e1.printStackTrace(); } // Add email appender SMTPAppender mailme = new SMTPAppender(); LevelRangeFilter warnFilter = new LevelRangeFilter(); warnFilter.setLevelMin(Level.WARN); mailme.addFilter(warnFilter); mailme.setSMTPDebug(smtpDebug); mailme.setSMTPHost(smtpServer); mailme.setTo(mailTo); mailme.setFrom(reporter + " <" + smtpUsername + ">"); mailme.setBufferSize(1); mailme.setSubject(servlet + " Not Responding!"); mailme.setSMTPUsername(smtpUsername); mailme.setSMTPPassword(smtpPassword); mailme.setLayout(new SimpleLayout()); mailme.activateOptions(); mailme.setLayout(pattern); BasicConfigurator.configure(mailme); long before; ConnectionThread connectionThread = new ConnectionThread(); connectionThread.start(); synchronized (connectionThread) { connectionThread.setOkToGo(true); connectionThread.notifyAll(); before = System.currentTimeMillis(); long delay = timeoutSeconds * 1000; while (!done && delay > 0) { try { connectionThread.wait(delay); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } delay -= (System.currentTimeMillis() - before); } } long after = System.currentTimeMillis(); responseMillis = after - before; String reportStatus = "Could not report"; try { StringBuilder sb = new StringBuilder(); sb.append(servlet + "/responsetest/report.action"); sb.append("?reporter=" + URLEncoder.encode(reporter)); sb.append("&status=" + URLEncoder.encode(status)); sb.append("&bytesRead=" + bytesRead); sb.append("&timedOut=" + (!done)); if (throwable == null) { sb.append("&exception=none"); } else { sb.append("&exception=" + URLEncoder.encode(throwable.getClass().getName() + "-" + throwable.getMessage())); } sb.append("&responseMillis=" + responseMillis); URL reportURL = new URL(sb.toString()); connection = (HttpURLConnection) reportURL.openConnection(); connection.connect(); reportStatus = connection.getResponseCode() + " - " + connection.getResponseMessage(); } catch (Throwable t) { reportStatus = t.getClass().getName() + "-" + t.getMessage(); } StringBuilder sb = new StringBuilder(); sb.append(servlet + ": "); sb.append(status + ", " + bytesRead + " bytes, "); if (done) { sb.append("DONE, "); } else { sb.append("TIMED OUT, "); } sb.append(responseMillis + " millisecond response, "); sb.append(" report status=" + reportStatus); File markerFile = new File(marker); if (done && status.startsWith("200") && (throwable == null)) { if ((responseMillis / 1000) < responseTimeThresholdSeconds) { if (markerFile.exists()) { markerFile.delete(); } log.debug(sb.toString()); } else { if (markerFile.exists()) { log.info(sb.toString()); } else { try { new FileOutputStream(marker).close(); log.warn(sb.toString()); } catch (IOException e) { log.info(sb.toString()); log.info("Can't send email alert because could not write marker file: " + marker + ". " + e.getMessage()); } } } } else { if (throwable != null) { StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); throwable.printStackTrace(pw); sb.append(sw.toString()); } if (markerFile.exists()) { log.info(sb.toString()); } else { try { new FileOutputStream(marker).close(); log.fatal(sb.toString()); // chosen appender layout ignoresThrowable() } catch (IOException e) { log.info(sb.toString()); log.info("Can't send email alert because could not write marker file: " + marker + ". " + e.getMessage()); } } } }
From source file:ca.uqac.dim.net.verify.NetworkChecker.java
/** * Main loop. Processes command line arguments, loads the network, builds and * sends a file to NuSMV and handles its response. * @param args Command-line arguments//from w ww . jav a 2 s. c o m */ public static void main(String[] args) { // Configuration variables boolean opt_show_file = false, opt_show_messages = true, opt_show_stats = false; // Create and parse command line options Option opt = null; Options options = new Options(); options.addOption("f", "file", false, "Output NuSMV file to stdout"); options.addOption("s", "time", false, "Display statistics"); options.addOption("h", "help", false, "Show usage information"); options.addOption("q", "quiet", false, "Do not display any message or explanation"); opt = new Option("i", "file", true, "Input directory"); opt.setRequired(true); options.addOption(opt); CommandLineParser parser = new PosixParser(); CommandLine cmd = null; try { cmd = parser.parse(options, args); } catch (ParseException e) { System.err.println("Error parsing command line arguments"); showUsage(options); System.exit(1); } if (cmd == null) { System.err.println("Error parsing command line arguments"); showUsage(options); System.exit(1); } // Get options assert cmd != null; opt_show_file = cmd.hasOption("f"); opt_show_messages = !cmd.hasOption("q"); opt_show_stats = cmd.hasOption("s"); if (opt_show_messages) System.err.println( "Distributed anomaly verifier\n(C) 2012 Sylvain Hall, Universit du Qubec Chicoutimi\n"); // Get input directory and blob assert cmd.hasOption("i"); String slash = System.getProperty("file.separator"); String input_dir = cmd.getOptionValue("i"); int loc = input_dir.lastIndexOf(slash); String dir_name = "", blob = ""; if (loc == -1) { dir_name = ""; blob = input_dir; } else { dir_name = input_dir.substring(0, loc); blob = input_dir.substring(loc + 1); } // Load network Network net = null; try { net = loadNetworkFromDirectory(dir_name, blob, slash); } catch (FileNotFoundException e) { System.err.print("Error reading "); System.err.println(e.getMessage()); System.exit(1); } catch (IOException e) { System.err.println(e); System.exit(1); } // Build NuSMV file assert net != null; StringBuilder sb = new StringBuilder(); if (opt_show_file) { sb.append("-- File auto-generated by DistributedChecker\n"); sb.append("-- (C) 2012 Sylvain Hall, Universit du Qubec Chicoutimi\n\n"); } sb.append(net.toSmv(opt_show_file)); // Simple shadowing String f_shadowing = "G (frozen -> !(interval_l <= rule_interval_l & interval_r >= rule_interval_r & decision != rule_decision))"; sb.append("\n\nLTLSPEC\n").append(f_shadowing); // Run NuSMV and parse its return value if (opt_show_file) { System.out.println(sb); System.exit(0); } RuntimeInfos ri = null; try { ri = runNuSMV(sb.toString()); } catch (IOException e) { System.err.println(e.getMessage()); System.exit(1); } catch (InterruptedException e) { System.err.println(e.getMessage()); System.exit(1); } assert ri != null; // If requested, compute and show an explanation from NuSMV's returned contents if (opt_show_messages) { ExplanationTrace t = null; try { t = net.explain(ri.m_return_contents); } catch (NuSmvParseException e) { System.err.println("Error reading NuSMV's answer"); System.exit(1); } if (t == null) { // No explanation => no anomaly found System.out.println("No anomaly found"); } else System.out.println(t); } // If requested, show runtime statistics if (opt_show_stats) { StringBuilder out = new StringBuilder(); out.append(net.getNodeSize()).append(","); out.append(net.getFirewallRuleSize()).append(","); out.append(net.getRoutingTableSize()).append(","); out.append(ri.m_runtime); System.out.println(out); } System.exit(0); }
From source file:com.genentech.chemistry.tool.mm.SDFMMMinimize.java
/** * Main function for running on the command line * @param args//ww w .j a v a 2 s . c o m */ public static void main(String... args) throws IOException { // Get the available options from the programs Map<String, List<String>> allowedProgramsAndForceFields = getAllowedProgramsAndForceFields(); Map<String, List<String>> allowedProgramsAndSolvents = getAllowedProgramsAndSolvents(); // create command line Options object Options options = new Options(); Option opt = new Option(OPT_INFILE, true, "input file oe-supported Use .sdf|.smi to specify the file type."); opt.setRequired(true); options.addOption(opt); opt = new Option(OPT_OUTFILE, true, "output file oe-supported. Use .sdf|.smi to specify the file type."); opt.setRequired(true); options.addOption(opt); StringBuilder programOptions = new StringBuilder("Program to use for minimization. Choices are\n"); for (String program : allowedProgramsAndForceFields.keySet()) { programOptions.append("\n*** -program " + program + " ***\n"); String forcefields = ""; for (String option : allowedProgramsAndForceFields.get(program)) { forcefields += option + " "; } programOptions.append("-forcefield " + forcefields + "\n"); String solvents = ""; for (String option : allowedProgramsAndSolvents.get(program)) { solvents += option + " "; } programOptions.append("-solvent " + solvents + "\n"); } opt = new Option(OPT_PROGRAM, true, programOptions.toString()); opt.setRequired(true); options.addOption(opt); opt = new Option(OPT_FORCEFIELD, true, "Forcefield options. See -program for choices"); opt.setRequired(false); options.addOption(opt); opt = new Option(OPT_SOLVENT, true, "Solvent options. See -program for choices"); opt.setRequired(false); options.addOption(opt); opt = new Option(OPT_FIXED_ATOM_TAG, true, "SD tag name which contains the atom numbers to be held fixed."); opt.setRequired(false); options.addOption(opt); opt = new Option(OPT_FIX_TORSION, true, "true/false. if true, the atoms in fixedAtomTag contains 4 indices of atoms defining a torsion angle to be held fixed"); opt.setRequired(false); options.addOption(opt); opt = new Option(OPT_WORKING_DIR, true, "Working directory to put files."); opt.setRequired(false); options.addOption(opt); CommandLineParser parser = new PosixParser(); CommandLine cmd = null; try { cmd = parser.parse(options, args); } catch (Exception e) { System.err.println(e.getMessage()); exitWithHelp(options); } args = cmd.getArgs(); if (args.length != 0) { System.err.println("Unknown arguments" + args); exitWithHelp(options); } if (cmd.hasOption("d")) { System.err.println("Start debugger and press return:"); new BufferedReader(new InputStreamReader(System.in)).readLine(); } String inFile = cmd.getOptionValue(OPT_INFILE); String outFile = cmd.getOptionValue(OPT_OUTFILE); String fixedAtomTag = cmd.getOptionValue(OPT_FIXED_ATOM_TAG); boolean fixTorsion = (cmd.getOptionValue(OPT_FIX_TORSION) != null && cmd.getOptionValue(OPT_FIX_TORSION).equalsIgnoreCase("true")); String programName = cmd.getOptionValue(OPT_PROGRAM); String forcefield = cmd.getOptionValue(OPT_FORCEFIELD); String solvent = cmd.getOptionValue(OPT_SOLVENT); String workDir = cmd.getOptionValue(OPT_WORKING_DIR); if (workDir == null || workDir.trim().length() == 0) workDir = "."; // Create a minimizer SDFMMMinimize minimizer = new SDFMMMinimize(); minimizer.setMethod(programName, forcefield, solvent); minimizer.run(inFile, outFile, fixedAtomTag, fixTorsion, workDir); minimizer.close(); System.err.println("Minimization complete."); }
From source file:baldrickv.s3streamingtool.S3StreamingTool.java
public static void main(String args[]) throws Exception { BasicParser p = new BasicParser(); Options o = getOptions();//from w w w . j a va 2s . co m CommandLine cl = p.parse(o, args); if (cl.hasOption('h')) { HelpFormatter hf = new HelpFormatter(); hf.setWidth(80); StringBuilder sb = new StringBuilder(); sb.append("\n"); sb.append("Upload:\n"); sb.append(" -u -r creds -s 50M -b my_bucket -f hda1.dump -t 10\n"); sb.append("Download:\n"); sb.append(" -d -r creds -s 50M -b my_bucket -f hda1.dump -t 10\n"); sb.append("Upload encrypted:\n"); sb.append(" -u -r creds -z -k secret_key -s 50M -b my_bucket -f hda1.dump -t 10\n"); sb.append("Download encrypted:\n"); sb.append(" -d -r creds -z -k secret_key -s 50M -b my_bucket -f hda1.dump -t 10\n"); sb.append("Cleanup in-progress multipart uploads\n"); sb.append(" -c -r creds -b my_bucket\n"); System.out.println(sb.toString()); hf.printHelp("See above", o); return; } int n = 0; if (cl.hasOption('d')) n++; if (cl.hasOption('u')) n++; if (cl.hasOption('c')) n++; if (cl.hasOption('m')) n++; if (n != 1) { System.err.println("Must specify at exactly one of -d, -u, -c or -m"); System.exit(-1); } if (cl.hasOption('m')) { //InputStream in = new java.io.BufferedInputStream(System.in,1024*1024*2); InputStream in = System.in; System.out.println(TreeHashGenerator.calculateTreeHash(in)); return; } require(cl, 'b'); if (cl.hasOption('d') || cl.hasOption('u')) { require(cl, 'f'); } if (cl.hasOption('z')) { require(cl, 'k'); } AWSCredentials creds = null; if (cl.hasOption('r')) { creds = Utils.loadAWSCredentails(cl.getOptionValue('r')); } else { if (cl.hasOption('i') && cl.hasOption('e')) { creds = new BasicAWSCredentials(cl.getOptionValue('i'), cl.getOptionValue('e')); } else { System.out.println("Must specify either credential file (-r) or AWS key ID and secret (-i and -e)"); System.exit(-1); } } S3StreamConfig config = new S3StreamConfig(); config.setEncryption(false); if (cl.hasOption('z')) { config.setEncryption(true); config.setSecretKey(Utils.loadSecretKey(cl.getOptionValue("k"))); } if (cl.hasOption("encryption-mode")) { config.setEncryptionMode(cl.getOptionValue("encryption-mode")); } config.setS3Bucket(cl.getOptionValue("bucket")); if (cl.hasOption("file")) { config.setS3File(cl.getOptionValue("file")); } if (cl.hasOption("threads")) { config.setIOThreads(Integer.parseInt(cl.getOptionValue("threads"))); } if (cl.hasOption("blocksize")) { String s = cl.getOptionValue("blocksize"); s = s.toUpperCase(); int multi = 1; int end = 0; while ((end < s.length()) && (s.charAt(end) >= '0') && (s.charAt(end) <= '9')) { end++; } int size = Integer.parseInt(s.substring(0, end)); if (end < s.length()) { String m = s.substring(end); if (m.equals("K")) multi = 1024; else if (m.equals("M")) multi = 1048576; else if (m.equals("G")) multi = 1024 * 1024 * 1024; else if (m.equals("KB")) multi = 1024; else if (m.equals("MB")) multi = 1048576; else if (m.equals("GB")) multi = 1024 * 1024 * 1024; else { System.out.println("Unknown suffix on block size. Only K,M and G understood."); System.exit(-1); } } size *= multi; config.setBlockSize(size); } Logger.getLogger("").setLevel(Level.FINE); S3StreamingDownload.log.setLevel(Level.FINE); S3StreamingUpload.log.setLevel(Level.FINE); config.setS3Client(new AmazonS3Client(creds)); config.setGlacierClient(new AmazonGlacierClient(creds)); config.getGlacierClient().setEndpoint("glacier.us-west-2.amazonaws.com"); if (cl.hasOption("glacier")) { config.setGlacier(true); config.setStorageInterface(new StorageGlacier(config.getGlacierClient())); } else { config.setStorageInterface(new StorageS3(config.getS3Client())); } if (cl.hasOption("bwlimit")) { config.setMaxBytesPerSecond(Double.parseDouble(cl.getOptionValue("bwlimit"))); } if (cl.hasOption('c')) { if (config.getGlacier()) { GlacierCleanupMultipart.cleanup(config); } else { S3CleanupMultipart.cleanup(config); } return; } if (cl.hasOption('d')) { config.setOutputStream(System.out); S3StreamingDownload.download(config); return; } if (cl.hasOption('u')) { config.setInputStream(System.in); S3StreamingUpload.upload(config); return; } }
From source file:de.tudarmstadt.ukp.experiments.dip.wp1.documents.Step8GoldDataAggregator.java
public static void main(String[] args) throws Exception { String inputDir = args[0] + "/"; // output dir File outputDir = new File(args[1]); File turkersConfidence = new File(args[2]); if (outputDir.exists()) { outputDir.delete();//from w w w. j av a 2s.c o m } outputDir.mkdir(); List<String> annotatorsIDs = new ArrayList<>(); // for (File f : FileUtils.listFiles(new File(inputDir), new String[] { "xml" }, false)) { // QueryResultContainer queryResultContainer = QueryResultContainer // .fromXML(FileUtils.readFileToString(f, "utf-8")); // for (QueryResultContainer.SingleRankedResult rankedResults : queryResultContainer.rankedResults) { // for (QueryResultContainer.MTurkRelevanceVote relevanceVote : rankedResults.mTurkRelevanceVotes) { // if (!annotatorsIDs.contains(relevanceVote.turkID)) // annotatorsIDs.add(relevanceVote.turkID); // } // } // } HashMap<String, Integer> countVotesForATurker = new HashMap<>(); // creates temporary file with format for mace // Hashmap annotations: key is the id of a document and a sentence // Value is an array votes[] of turkers decisions: true or false (relevant or not) // the length of this array equals the number of annotators in List<String> annotatorsIDs. // If an annotator worked on the task his decision is written in the array otherwise the value is NULL // key: queryID + clueWebID + sentenceID // value: true and false annotations TreeMap<String, Annotations> annotations = new TreeMap<>(); for (File f : FileUtils.listFiles(new File(inputDir), new String[] { "xml" }, false)) { QueryResultContainer queryResultContainer = QueryResultContainer .fromXML(FileUtils.readFileToString(f, "utf-8")); System.out.println("Reading " + f.getName()); for (QueryResultContainer.SingleRankedResult rankedResults : queryResultContainer.rankedResults) { String documentID = rankedResults.clueWebID; for (QueryResultContainer.MTurkRelevanceVote relevanceVote : rankedResults.mTurkRelevanceVotes) { Integer turkerID; if (!annotatorsIDs.contains(relevanceVote.turkID)) { annotatorsIDs.add(relevanceVote.turkID); turkerID = annotatorsIDs.size() - 1; } else { turkerID = annotatorsIDs.indexOf(relevanceVote.turkID); } Integer count = countVotesForATurker.get(relevanceVote.turkID); if (count == null) { count = 0; } count++; countVotesForATurker.put(relevanceVote.turkID, count); String id; List<Integer> trueVotes; List<Integer> falseVotes; for (QueryResultContainer.SingleSentenceRelevanceVote singleSentenceRelevanceVote : relevanceVote.singleSentenceRelevanceVotes) if (!"".equals(singleSentenceRelevanceVote.sentenceID)) { id = f.getName() + "_" + documentID + "_" + singleSentenceRelevanceVote.sentenceID; Annotations turkerVotes = annotations.get(id); if (turkerVotes == null) { trueVotes = new ArrayList<>(); falseVotes = new ArrayList<>(); turkerVotes = new Annotations(trueVotes, falseVotes); } trueVotes = turkerVotes.trueAnnotations; falseVotes = turkerVotes.falseAnnotations; if ("true".equals(singleSentenceRelevanceVote.relevant)) { // votes[turkerID] = true; trueVotes.add(turkerID); } else if ("false".equals(singleSentenceRelevanceVote.relevant)) { // votes[turkerID] = false; falseVotes.add(turkerID); } else { throw new IllegalStateException("Annotation value of sentence " + singleSentenceRelevanceVote.sentenceID + " in " + rankedResults.clueWebID + " equals " + singleSentenceRelevanceVote.relevant); } try { int allVotesCount = trueVotes.size() + falseVotes.size(); if (allVotesCount > 5) { System.err.println(id + " doesn't have 5 annotators: true: " + trueVotes.size() + " false: " + falseVotes.size()); // nasty hack, we're gonna strip some data; true votes first /* we can't do that, it breaks something down the line int toRemove = allVotesCount - 5; if (trueVotes.size() >= toRemove) { trueVotes = trueVotes .subList(0, trueVotes.size() - toRemove); } else if ( falseVotes.size() >= toRemove) { falseVotes = falseVotes .subList(0, trueVotes.size() - toRemove); } */ System.err.println("Adjusted: " + id + " doesn't have 5 annotators: true: " + trueVotes.size() + " false: " + falseVotes.size()); } } catch (IllegalStateException e) { e.printStackTrace(); } turkerVotes.trueAnnotations = trueVotes; turkerVotes.falseAnnotations = falseVotes; annotations.put(id, turkerVotes); } else { throw new IllegalStateException( "Empty Sentence ID in " + f.getName() + " for turker " + turkerID); } } } } File tmp = printHashMap(annotations, annotatorsIDs.size()); String file = TEMP_DIR + "/" + tmp.getName(); MACE.main(new String[] { "--prefix", file }); //gets the keys of the documents and sentences ArrayList<String> lines = (ArrayList<String>) FileUtils.readLines(new File(file + ".prediction")); int i = 0; TreeMap<String, TreeMap<String, ArrayList<HashMap<String, String>>>> ids = new TreeMap<>(); ArrayList<HashMap<String, String>> sentences; if (lines.size() != annotations.size()) { throw new IllegalStateException( "The size of prediction file is " + lines.size() + "but expected " + annotations.size()); } for (Map.Entry entry : annotations.entrySet()) { //1001.xml_clueweb12-1905wb-13-07360_8783 String key = (String) entry.getKey(); String[] IDs = key.split("_"); if (IDs.length > 2) { String queryID = IDs[0]; String clueWebID = IDs[1]; String sentenceID = IDs[2]; TreeMap<String, ArrayList<HashMap<String, String>>> clueWebIDs = ids.get(queryID); if (clueWebIDs == null) { clueWebIDs = new TreeMap<>(); } sentences = clueWebIDs.get(clueWebID); if (sentences == null) { sentences = new ArrayList<>(); } HashMap<String, String> sentence = new HashMap<>(); sentence.put(sentenceID, lines.get(i)); sentences.add(sentence); clueWebIDs.put(clueWebID, sentences); ids.put(queryID, clueWebIDs); } else { throw new IllegalStateException("Wrong ID " + key); } i++; } for (Map.Entry entry : ids.entrySet()) { TreeMap<Integer, String> value = (TreeMap<Integer, String>) entry.getValue(); String queryID = (String) entry.getKey(); QueryResultContainer queryResultContainer = QueryResultContainer .fromXML(FileUtils.readFileToString(new File(inputDir, queryID), "utf-8")); for (QueryResultContainer.SingleRankedResult rankedResults : queryResultContainer.rankedResults) { for (Map.Entry val : value.entrySet()) { String clueWebID = (String) val.getKey(); if (clueWebID.equals(rankedResults.clueWebID)) { List<QueryResultContainer.SingleSentenceRelevanceVote> goldEstimatedLabels = new ArrayList<>(); List<QueryResultContainer.SingleSentenceRelevanceVote> turkersVotes = new ArrayList<>(); int size = 0; int hitSize = 0; String hitID = ""; for (QueryResultContainer.MTurkRelevanceVote vote : rankedResults.mTurkRelevanceVotes) { if (!hitID.equals(vote.hitID)) { hitID = vote.hitID; hitSize = vote.singleSentenceRelevanceVotes.size(); size = size + hitSize; turkersVotes.addAll(vote.singleSentenceRelevanceVotes); } else { if (vote.singleSentenceRelevanceVotes.size() != hitSize) { hitSize = vote.singleSentenceRelevanceVotes.size(); size = size + hitSize; turkersVotes.addAll(vote.singleSentenceRelevanceVotes); } } } ArrayList<HashMap<String, String>> sentenceList = (ArrayList<HashMap<String, String>>) val .getValue(); if (sentenceList.size() != turkersVotes.size()) { try { throw new IllegalStateException("Expected size of annotations is " + turkersVotes.size() + "but found " + sentenceList.size() + " for document " + rankedResults.clueWebID + " in " + queryID); } catch (IllegalStateException ex) { ex.printStackTrace(); } } for (QueryResultContainer.SingleSentenceRelevanceVote s : turkersVotes) { String valSentence = null; for (HashMap<String, String> anno : sentenceList) { if (anno.keySet().contains(s.sentenceID)) { valSentence = anno.get(s.sentenceID); } } QueryResultContainer.SingleSentenceRelevanceVote singleSentenceVote = new QueryResultContainer.SingleSentenceRelevanceVote(); singleSentenceVote.sentenceID = s.sentenceID; if (("false").equals(valSentence)) { singleSentenceVote.relevant = "false"; } else if (("true").equals(valSentence)) { singleSentenceVote.relevant = "true"; } else { throw new IllegalStateException("Annotation value of sentence " + singleSentenceVote.sentenceID + " equals " + val.getValue()); } goldEstimatedLabels.add(singleSentenceVote); } rankedResults.goldEstimatedLabels = goldEstimatedLabels; } } } File outputFile = new File(outputDir, queryID); FileUtils.writeStringToFile(outputFile, queryResultContainer.toXML(), "utf-8"); System.out.println("Finished " + outputFile); } ArrayList<String> annotators = (ArrayList<String>) FileUtils.readLines(new File(file + ".competence")); FileWriter fileWriter; StringBuilder sb = new StringBuilder(); for (int j = 0; j < annotatorsIDs.size(); j++) { String[] s = annotators.get(0).split("\t"); Float score = Float.parseFloat(s[j]); String turkerID = annotatorsIDs.get(j); System.out.println(turkerID + " " + score + " " + countVotesForATurker.get(turkerID)); sb.append(turkerID).append(" ").append(score).append(" ").append(countVotesForATurker.get(turkerID)) .append("\n"); } fileWriter = new FileWriter(turkersConfidence); fileWriter.append(sb.toString()); fileWriter.close(); }
From source file:com.cloudhopper.httpclient.util.HttpPostMain.java
static public void main(String[] args) throws Exception { ///*from ww w .j ava 2s .c o m*/ // target urls // String strURL = "http://209.226.31.233:9009/SendSmsService/b98183b99a1f473839ce569c78b84dbd"; // Username: Twitter // Password: Twitter123 TrustManager easyTrustManager = new X509TrustManager() { public void checkClientTrusted(java.security.cert.X509Certificate[] arg0, String arg1) throws CertificateException { // allow all } public void checkServerTrusted(java.security.cert.X509Certificate[] arg0, String arg1) throws CertificateException { // allow all } public java.security.cert.X509Certificate[] getAcceptedIssuers() { return null; } }; Scheme http = new Scheme("http", PlainSocketFactory.getSocketFactory(), 80); SSLContext sslcontext = SSLContext.getInstance("TLS"); sslcontext.init(null, new TrustManager[] { easyTrustManager }, null); SSLSocketFactory sf = new SSLSocketFactory(sslcontext); sf.setHostnameVerifier(SSLSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER); Scheme https = new Scheme("https", sf, 443); //SchemeRegistry sr = new SchemeRegistry(); //sr.register(http); //sr.register(https); // create and initialize scheme registry //SchemeRegistry schemeRegistry = new SchemeRegistry(); //schemeRegistry.register(new Scheme("http", PlainSocketFactory.getSocketFactory(), 80)); // create an HttpClient with the ThreadSafeClientConnManager. // This connection manager must be used if more than one thread will // be using the HttpClient. //ThreadSafeClientConnManager cm = new ThreadSafeClientConnManager(schemeRegistry); //cm.setMaxTotalConnections(1); DefaultHttpClient client = new DefaultHttpClient(); client.getConnectionManager().getSchemeRegistry().register(https); // for (int i = 0; i < 1; i++) { // // create a new ticket id // //String ticketId = TicketUtil.generate(1, System.currentTimeMillis()); /** StringBuilder string0 = new StringBuilder(200) .append("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n") .append("<S:Envelope xmlns:S=\"http://schemas.xmlsoap.org/soap/envelope/\">\n") .append(" <S:Header>\n") .append(" <ns3:TransactionID xmlns:ns4=\"http://vmp.vzw.com/schema\"\n") .append("xmlns:ns3=\"http://www.3gpp.org/ftp/Specs/archive/23_series/23.140/schema/REL-6-MM7-1-4\">" + ticketId + "</ns3:TransactionID>\n") .append(" </S:Header>\n") .append(" <S:Body>\n") .append(" <ns2:OptinReq xmlns:ns4=\"http://schemas.xmlsoap.org/soap/envelope/\"\n") .append("xmlns:ns3=\"http://www.3gpp.org/ftp/Specs/archive/23_series/23.140/schema/REL-6-MM7-1-4\"\n") .append("xmlns:ns2=\"http://vmp.vzw.com/schema\">\n") .append(" <ns2:VASPID>twitter</ns2:VASPID>\n") .append(" <ns2:VASID>tm33t!</ns2:VASID>\n") .append(" <ns2:ShortCode>800080008001</ns2:ShortCode>\n") .append(" <ns2:Number>9257089093</ns2:Number>\n") .append(" <ns2:Source>provider</ns2:Source>\n") .append(" <ns2:Message/>\n") .append(" </ns2:OptinReq>\n") .append(" </S:Body>\n") .append("</S:Envelope>"); */ // simple send sms StringBuilder string1 = new StringBuilder(200).append("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n") .append("<soapenv:Envelope xmlns:soapenv=\"http://schemas.xmlsoap.org/soap/envelope/\" xmlns:loc=\"http://www.csapi.org/schema/parlayx/sms/send/v2_3/local\">\n") .append(" <soapenv:Header/>\n").append(" <soapenv:Body>\n").append(" <loc:sendSms>\n") .append(" <loc:addresses>tel:+16472260233</loc:addresses>\n") .append(" <loc:senderName>6388</loc:senderName>\n") .append(" <loc:message>Test Message &</loc:message>\n").append(" </loc:sendSms>\n") .append(" </soapenv:Body>\n").append("</soapenv:Envelope>\n"); // startSmsNotification - place to deliver SMS to String req = string1.toString(); logger.debug("Request XML -> \n" + req); HttpPost post = new HttpPost(strURL); StringEntity postEntity = new StringEntity(req, "ISO-8859-1"); postEntity.setContentType("text/xml; charset=\"ISO-8859-1\""); post.addHeader("SOAPAction", "\"\""); post.setEntity(postEntity); long start = System.currentTimeMillis(); client.getCredentialsProvider().setCredentials(new AuthScope("209.226.31.233", AuthScope.ANY_PORT), new UsernamePasswordCredentials("Twitter", "Twitter123")); BasicHttpContext localcontext = new BasicHttpContext(); // Generate BASIC scheme object and stick it to the local // execution context BasicScheme basicAuth = new BasicScheme(); localcontext.setAttribute("preemptive-auth", basicAuth); // Add as the first request interceptor client.addRequestInterceptor(new PreemptiveAuth(), 0); HttpResponse httpResponse = client.execute(post, localcontext); HttpEntity responseEntity = httpResponse.getEntity(); // // was the request OK? // if (httpResponse.getStatusLine().getStatusCode() != 200) { logger.error("Request failed with StatusCode=" + httpResponse.getStatusLine().getStatusCode()); } // get an input stream String responseBody = EntityUtils.toString(responseEntity); long stop = System.currentTimeMillis(); logger.debug("----------------------------------------"); logger.debug("Response took " + (stop - start) + " ms"); logger.debug(responseBody); logger.debug("----------------------------------------"); // } // When HttpClient instance is no longer needed, // shut down the connection manager to ensure // immediate deallocation of all system resources client.getConnectionManager().shutdown(); }