List of usage examples for java.io IOException printStackTrace
public void printStackTrace()
From source file:AdminExample.java
public static void main(String[] args) { HttpURLConnection connection = null; StringBuilder response = new StringBuilder(); //We are using Jackson JSON parser to serialize and deserialize the JSON. See http://wiki.fasterxml.com/JacksonHome //Feel free to use which ever library you prefer. ObjectMapper mapper = new ObjectMapper(); mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); String accessToken = "";//Insert your access token here. Note this must be from an account that is an Admin of an account. String user1Email = ""; //You need access to these two email account. String user2Email = ""; //Note Gmail and Hotmail allow email aliasing. //joe@gmail.com will get email sent to joe+user1@gmail.com try {/*from ww w . j a v a 2 s . c om*/ BufferedReader in = new BufferedReader(new InputStreamReader(System.in)); System.out.println("Adding user " + user1Email); //Add the users: User user = new User(); user.setEmail(user1Email); user.setAdmin(false); user.setLicensedSheetCreator(true); connection = (HttpURLConnection) new URL(USERS_URL).openConnection(); connection.addRequestProperty("Authorization", "Bearer " + accessToken); connection.addRequestProperty("Content-Type", "application/json"); connection.setDoOutput(true); mapper.writeValue(connection.getOutputStream(), user); Result<User> newUser1Result = mapper.readValue(connection.getInputStream(), new TypeReference<Result<User>>() { }); System.out.println( "User " + newUser1Result.result.email + " added with userId " + newUser1Result.result.getId()); user = new User(); user.setEmail(user2Email); user.setAdmin(true); user.setLicensedSheetCreator(true); connection = (HttpURLConnection) new URL(USERS_URL).openConnection(); connection.addRequestProperty("Authorization", "Bearer " + accessToken); connection.addRequestProperty("Content-Type", "application/json"); connection.setDoOutput(true); mapper.writeValue(connection.getOutputStream(), user); Result<User> newUser2Result = mapper.readValue(connection.getInputStream(), new TypeReference<Result<User>>() { }); System.out.println( "User " + newUser2Result.result.email + " added with userId " + newUser2Result.result.getId()); System.out.println("Please visit the email inbox for the users " + user1Email + " and " + user2Email + " and confirm membership to the account."); System.out.print("Press Enter to continue"); in.readLine(); //List all the users of the org connection = (HttpURLConnection) new URL(USERS_URL).openConnection(); connection.addRequestProperty("Authorization", "Bearer " + accessToken); connection.addRequestProperty("Content-Type", "application/json"); List<User> users = mapper.readValue(connection.getInputStream(), new TypeReference<List<User>>() { }); System.out.println("The following are members of your account: "); for (User orgUser : users) { System.out.println("\t" + orgUser.getEmail()); } //Create a sheet as the admin Sheet newSheet = new Sheet(); newSheet.setName("Admin's Sheet"); newSheet.setColumns(Arrays.asList(new Column("Column 1", "TEXT_NUMBER", null, true, null), new Column("Column 2", "TEXT_NUMBER", null, null, null), new Column("Column 3", "TEXT_NUMBER", null, null, null))); connection = (HttpURLConnection) new URL(SHEETS_URL).openConnection(); connection.addRequestProperty("Authorization", "Bearer " + accessToken); connection.addRequestProperty("Content-Type", "application/json"); connection.setDoOutput(true); mapper.writeValue(connection.getOutputStream(), newSheet); mapper.readValue(connection.getInputStream(), new TypeReference<Result<Sheet>>() { }); //Create a sheet as user1 newSheet = new Sheet(); newSheet.setName("User 1's Sheet"); newSheet.setColumns(Arrays.asList(new Column("Column 1", "TEXT_NUMBER", null, true, null), new Column("Column 2", "TEXT_NUMBER", null, null, null), new Column("Column 3", "TEXT_NUMBER", null, null, null))); connection = (HttpURLConnection) new URL(SHEETS_URL).openConnection(); connection.addRequestProperty("Authorization", "Bearer " + accessToken); //Here is where the magic happens - Any action performed in this call will be on behalf of the //user provided. Note that this person must be a confirmed member of your org. //Also note that the email address is url-encoded. connection.addRequestProperty("Assume-User", URLEncoder.encode(user1Email, "UTF-8")); connection.addRequestProperty("Content-Type", "application/json"); connection.setDoOutput(true); mapper.writeValue(connection.getOutputStream(), newSheet); mapper.readValue(connection.getInputStream(), new TypeReference<Result<Sheet>>() { }); //Create a sheet as user2 newSheet = new Sheet(); newSheet.setName("User 2's Sheet"); newSheet.setColumns(Arrays.asList(new Column("Column 1", "TEXT_NUMBER", null, true, null), new Column("Column 2", "TEXT_NUMBER", null, null, null), new Column("Column 3", "TEXT_NUMBER", null, null, null))); connection = (HttpURLConnection) new URL(SHEETS_URL).openConnection(); connection.addRequestProperty("Authorization", "Bearer " + accessToken); connection.addRequestProperty("Assume-User", URLEncoder.encode(user2Email, "UTF-8")); connection.addRequestProperty("Content-Type", "application/json"); connection.setDoOutput(true); mapper.writeValue(connection.getOutputStream(), newSheet); mapper.readValue(connection.getInputStream(), new TypeReference<Result<Sheet>>() { }); //List all the sheets in the org: System.out.println("The following sheets are owned by members of your account: "); connection = (HttpURLConnection) new URL(USERS_SHEETS_URL).openConnection(); connection.addRequestProperty("Authorization", "Bearer " + accessToken); connection.addRequestProperty("Content-Type", "application/json"); List<Sheet> allSheets = mapper.readValue(connection.getInputStream(), new TypeReference<List<Sheet>>() { }); for (Sheet orgSheet : allSheets) { System.out.println("\t" + orgSheet.getName() + " - " + orgSheet.getOwner()); } //Now delete user1 and transfer their sheets to user2 connection = (HttpURLConnection) new URL(USER_URL.replace(ID, newUser1Result.getResult().getId() + "") + "?transferTo=" + newUser2Result.getResult().getId()).openConnection(); connection.addRequestProperty("Authorization", "Bearer " + accessToken); connection.addRequestProperty("Assume-User", URLEncoder.encode(user2Email, "UTF-8")); connection.addRequestProperty("Content-Type", "application/json"); connection.setRequestMethod("DELETE"); Result<Object> resultObject = mapper.readValue(connection.getInputStream(), new TypeReference<Result<Object>>() { }); System.out.println("Sheets transferred : " + resultObject.getSheetsTransferred()); } catch (IOException e) { InputStream is = connection == null ? null : ((HttpURLConnection) connection).getErrorStream(); if (is != null) { BufferedReader reader = new BufferedReader(new InputStreamReader(is)); String line; try { response = new StringBuilder(); while ((line = reader.readLine()) != null) { response.append(line); } reader.close(); Result<?> result = mapper.readValue(response.toString(), Result.class); System.err.println(result.message); } catch (IOException e1) { e1.printStackTrace(); } } e.printStackTrace(); } catch (Exception e) { System.out.println("Something broke: " + e.getMessage()); e.printStackTrace(); } }
From source file:com.github.errantlinguist.latticevisualiser.LatticeVisualiser.java
/** * Parses command-line arguments for input file, window width and height, * minimum state (i.e. vertex) size and the state/vertex size * multiplier. The input file is then read and visualised. * /* w ww . j av a2 s .c o m*/ * @param args * The command-line arguments. */ public static void main(final String[] args) { final ArgParser argParser = ArgParser.getInstance(); argParser.parseArgs(args); System.out.print("Reading lattice from path: "); final File latticeInfile = argParser.getLatticeInfile(); System.out.println(latticeInfile.getPath()); DirectedSparseGraph<Integer, Edge> latticeGraph = null; try { latticeGraph = readLattice(latticeInfile); } catch (final IOException e) { System.err.println("I/O exception while reading lattice file."); e.printStackTrace(); System.exit(EX_IOERR); } catch (final ParseException e) { System.err.println("Parse exception while reading lattice file."); e.printStackTrace(); System.exit(EX_DATAERR); } final File nonwordInfile = argParser.getNonwordsInfile(); ImmutableSet<String> nonwords = null; try { nonwords = readNonwordLabelSet(nonwordInfile); } catch (final IOException e) { System.err.println("I/O exception while reading non-word label file."); e.printStackTrace(); System.exit(EX_IOERR); } catch (final ParseException e) { System.err.println("Parse exception while reading non-word label file."); e.printStackTrace(); System.exit(EX_DATAERR); } // Set the set of symbols which represent non-word labels. NOTE: // This has to be set in order for most of the visualisation to work // properly. StateType.setNonwords(nonwords); final Dimension windowDimension = argParser.getWindowDimension(); final double stateSizeMultiplier = argParser.getStateSizeMultiplier(); final int minStateSize = argParser.getMinStateSize(); final LatticeVisualiser visualizer = new LatticeVisualiser(latticeGraph, nonwords, windowDimension, stateSizeMultiplier, minStateSize); visualizer.visualise(); visualizer.show(); printInfo(latticeGraph); }
From source file:hms.hwestra.interactionrebuttal2.InteractionRebuttal2.java
public static void main(String[] args) { InteractionRebuttal2 b = new InteractionRebuttal2(); try {//from ww w . java 2 s . co m // String axisdir = "/Sync/AeroFS/cellTypeeQTL/2015-01-31-Rebuttal2/0.1.PreinigerEtAlAxesOfVariation/"; // String axisdir = "/Sync/AeroFS/cellTypeeQTL/2015-01-31-Rebuttal2/0.1.PreinigerEtAlAxesOfVariationFORCENORMAL/"; // String annot = "/Data/ProbeAnnotation/2015-03-23-HT12v3ILMNIdToArrayAddress.txt"; //// b.rewritePleinigerAxisToArrayAddress(axisdir, annot); // b.createProxysFromPleinigerAxis(axisdir); // String pcFile = "/Sync/AeroFS/cellTypeeQTL/2015-01-31-Rebuttal2/3.3.AdditionalPCsInModelTop58Probes/EGCUTData/CellTypeSpecificProbePCA.PCAOverSamplesPrincipalComponents.txt"; // String cellcountFile = "/Sync/AeroFS/cellTypeeQTL/2014-10-28-Rebuttal/EGCUTData/EGCUTValidSamplesNeutrosOnly.txt"; // String pheno = "Neutrophils"; //b.iterativelyIncreaseNumberOfPCsInCellCountPredictionModel(pcFile, cellcountFile, pheno); // InteractionRebuttal a = new InteractionRebuttal(); //// // String normal1 = "/Sync/AeroFS/cellTypeeQTL/2015-01-31-Rebuttal2/3.4.2.RobustSEPValues/EGCUT/Normal/InteractionResults.txt"; // String robust1 = "/Sync/AeroFS/cellTypeeQTL/2015-01-31-Rebuttal2/3.4.2.RobustSEPValues/EGCUT/Robust/InteractionResults.txt"; // String out = "/Sync/AeroFS/cellTypeeQTL/2015-01-31-Rebuttal2/3.4.2.RobustSEPValues/EGCUT.txt"; // b.determineInteractionPvalueAndMerge(normal1, robust1, out); // // // String normal2 = "/Sync/AeroFS/cellTypeeQTL/2015-01-31-Rebuttal2/3.4.2.RobustSEPValues/GRNG/Normal/InteractionResults.txt"; // String robust2 = "/Sync/AeroFS/cellTypeeQTL/2015-01-31-Rebuttal2/3.4.2.RobustSEPValues/GRNG/Robust/InteractionResults.txt"; // out = "/Sync/AeroFS/cellTypeeQTL/2015-01-31-Rebuttal2/3.4.2.RobustSEPValues/GRNG.txt"; // b.determineInteractionPvalueAndMerge(normal2, robust2, out); // String fileIn = "/Sync/AeroFS/cellTypeeQTL/2015-01-31-Rebuttal2/GeneExpressionData/InChianti/GSE48152_RAW.txt"; // String fileOut = "/Sync/AeroFS/cellTypeeQTL/2015-01-31-Rebuttal2/GeneExpressionData/InChianti/InChiantiData.txt"; // // b.rewriteExpressionMatrix(fileIn, fileOut, annot); // String[] filesIn = new String[]{"/Sync/AeroFS/cellTypeeQTL/2015-01-31-Rebuttal2/GeneExpressionData/Kora/E-MTAB-1708.raw.1/", // "/Sync/AeroFS/cellTypeeQTL/2015-01-31-Rebuttal2/GeneExpressionData/Kora/E-MTAB-1708.raw.2/", // "/Sync/AeroFS/cellTypeeQTL/2015-01-31-Rebuttal2/GeneExpressionData/Kora/E-MTAB-1708.raw.3/"}; // fileOut = "/Sync/AeroFS/cellTypeeQTL/2015-01-31-Rebuttal2/GeneExpressionData/Kora/KoraData.txt"; // // b.rewriteMTabToMatrix(filesIn, fileOut, annot); String[] files = new String[10]; files[0] = "/Sync/AeroFS/cellTypeeQTL/2015-01-31-Rebuttal2/3.4.2.RobustSEPValues/Meta-forceNormal/output.txt"; String[] fileNames = new String[10]; fileNames[0] = "NeutrophilProxy"; for (int i = 1; i < 10; i++) { files[i] = "/Sync/AeroFS/cellTypeeQTL/2015-01-31-Rebuttal2/0.1.PreinigerEtAlAxesOfVariationFORCENORMAL/meta/Axis" + i + ".txt"; fileNames[i] = "Axis-" + i; } String out = "/Sync/AeroFS/cellTypeeQTL/2015-01-31-Rebuttal2/0.1.PreinigerEtAlAxesOfVariationFORCENORMAL/meta/merged-noforcednormalneutro.txt"; String annot = "/Sync/AeroFS/cellTypeeQTL/2015-01-31-Rebuttal2/ilmnToGene.txt"; double threshold = 0.05 / 13037; System.out.println("P < " + threshold); b.mergeMetaFiles(files, fileNames, out, annot, threshold); } catch (IOException e) { e.printStackTrace(); } }
From source file:com.zimbra.cs.db.SQLite.java
public static void main(String args[]) { // command line argument parsing Options options = new Options(); CommandLine cl = Versions.parseCmdlineArgs(args, options); String outputDir = cl.getOptionValue("o"); File outFile = new File(outputDir, "versions-init.sql"); outFile.delete();// w w w.ja v a2 s . co m try { String redoVer = com.zimbra.cs.redolog.Version.latest().toString(); String outStr = "-- AUTO-GENERATED .SQL FILE - Generated by the SQLite versions tool\n" + "INSERT INTO config(name, value, description) VALUES\n" + "\t('db.version', '" + Versions.DB_VERSION + "', 'db schema version');\n" + "INSERT INTO config(name, value, description) VALUES\n" + "\t('index.version', '" + Versions.INDEX_VERSION + "', 'index version');\n" + "INSERT INTO config(name, value, description) VALUES\n" + "\t('redolog.version', '" + redoVer + "', 'redolog version');\n"; Writer output = new BufferedWriter(new FileWriter(outFile)); output.write(outStr); output.close(); } catch (IOException e) { System.out.println("ERROR - caught exception at\n"); e.printStackTrace(); System.exit(-1); } }
From source file:net.fenyo.mail4hotspot.service.MailManager.java
public static void main(String[] args) throws NoSuchProviderException, MessagingException { System.out.println("Salut"); // trustSSL(); /* final Properties props = new Properties(); props.put("mail.smtp.host", "my-mail-server"); props.put("mail.from", "me@example.com"); javax.mail.Session session = javax.mail.Session.getInstance(props, null); try {/*from www . j a v a 2 s . c om*/ MimeMessage msg = new MimeMessage(session); msg.setFrom(); msg.setRecipients(Message.RecipientType.TO, "you@example.com"); msg.setSubject("JavaMail hello world example"); msg.setSentDate(new Date()); msg.setText("Hello, world!\n"); Transport.send(msg); } catch (MessagingException mex) { System.out.println("send failed, exception: " + mex); }*/ final Properties props = new Properties(); //props.put("mail.host", "10.69.60.6"); //props.put("mail.user", "fenyo"); //props.put("mail.from", "fenyo@fenyo.net"); //props.put("mail.transport.protocol", "smtps"); //props.put("mail.store.protocol", "pop3s"); // [javax.mail.Provider[STORE,imap,com.sun.mail.imap.IMAPStore,Sun Microsystems, Inc], // javax.mail.Provider[STORE,imaps,com.sun.mail.imap.IMAPSSLStore,Sun Microsystems, Inc], // javax.mail.Provider[TRANSPORT,smtp,com.sun.mail.smtp.SMTPTransport,Sun Microsystems, Inc], // javax.mail.Provider[TRANSPORT,smtps,com.sun.mail.smtp.SMTPSSLTransport,Sun Microsystems, Inc], // javax.mail.Provider[STORE,pop3,com.sun.mail.pop3.POP3Store,Sun Microsystems, Inc], // javax.mail.Provider[STORE,pop3s,com.sun.mail.pop3.POP3SSLStore,Sun Microsystems, Inc]] // final Provider[] providers = session.getProviders(); javax.mail.Session session = javax.mail.Session.getInstance(props, null); session.setDebug(true); //session.setDebug(false); // final Store store = session.getStore("pop3s"); // store.connect("10.69.60.6", 995, "fenyo", "PASSWORD"); // final Store store = session.getStore("imaps"); // store.connect("10.69.60.6", 993, "fenyo", "PASSWORD"); // System.out.println(store.getDefaultFolder().getMessageCount()); //final Store store = session.getStore("pop3"); final Store store = session.getStore("pop3s"); //final Store store = session.getStore("imaps"); // store.addStoreListener(new StoreListener() { // public void notification(StoreEvent e) { // String s; // if (e.getMessageType() == StoreEvent.ALERT) // s = "ALERT: "; // else // s = "NOTICE: "; // System.out.println("XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX: " + s + e.getMessage()); // } // }); //store.connect("10.69.60.6", 110, "fenyo", "PASSWORD"); store.connect("pop.gmail.com", 995, "alexandre.fenyo@gmail.com", "PASSWORD"); //store.connect("localhost", 110, "alexandre.fenyo@yahoo.com", "PASSWORD"); //store.connect("localhost", 995, "fenyo@live.fr", "PASSWORD"); //store.connect("localhost", 995, "thisisatestforalex@aol.fr", "PASSWORD"); // final Folder[] folders = store.getPersonalNamespaces(); // for (Folder f : folders) { // System.out.println("Folder: " + f.getMessageCount()); // final Folder g = f.getFolder("INBOX"); // g.open(Folder.READ_ONLY); // System.out.println(" g:" + g.getMessageCount()); // } final Folder inbox = store.getDefaultFolder().getFolder("INBOX"); inbox.open(Folder.READ_ONLY); System.out.println("nmessages: " + inbox.getMessageCount()); final Message[] messages = inbox.getMessages(); for (Message message : messages) { System.out.println("message:"); System.out.println(" size: " + message.getSize()); try { if (message.getFrom() != null) System.out.println(" From: " + message.getFrom()[0]); } catch (final Exception ex) { System.out.println(ex.toString()); } System.out.println(" content-type: " + message.getContentType()); System.out.println(" disposition: " + message.getDisposition()); System.out.println(" description: " + message.getDescription()); System.out.println(" filename: " + message.getFileName()); System.out.println(" line count: " + message.getLineCount()); System.out.println(" message number: " + message.getMessageNumber()); System.out.println(" subject: " + message.getSubject()); try { if (message.getAllRecipients() != null) for (Address address : message.getAllRecipients()) System.out.println(" address: " + address); } catch (final Exception ex) { System.out.println(ex.toString()); } } for (Message message : messages) { System.out.println("-----------------------------------------------------"); Object content; try { content = message.getContent(); if (javax.mail.Multipart.class.isInstance(content)) { System.out.println("CONTENT OBJECT CLASS: MULTIPART"); final javax.mail.Multipart multipart = (javax.mail.Multipart) content; System.out.println("multipart content type: " + multipart.getContentType()); System.out.println("multipart count: " + multipart.getCount()); for (int i = 0; i < multipart.getCount(); i++) { System.out.println(" multipart body[" + i + "]: " + multipart.getBodyPart(i)); BodyPart part = multipart.getBodyPart(i); System.out.println(" content-type: " + part.getContentType()); } } else if (String.class.isInstance(content)) { System.out.println("CONTENT IS A STRING: {" + content + "}"); } else { System.out.println("CONTENT OBJECT CLASS: " + content.getClass().toString()); } } catch (IOException e) { e.printStackTrace(); } } store.close(); }
From source file:de.uni_koblenz.jgralab.utilities.tg2dot.Tg2Dot.java
/** * @param args/*ww w . j av a2s .c om*/ * @throws IOException * @throws GraphIOException */ public static void main(String[] args) { Tg2Dot converter = new Tg2Dot(); converter.getOptions(args); System.out.print("Starting processing of graph..."); try { converter.convert(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } System.out.println("Finished Processing."); }
From source file:edu.umn.cs.sthadoop.operations.STRangeQuery.java
public static void main(String[] args) throws Exception { // args = new String[7]; // args[0] = "/home/louai/nyc-taxi/yellowIndex"; // args[1] = "/home/louai/nyc-taxi/resultSTRQ"; // args[2] = "shape:edu.umn.cs.sthadoop.core.STPoint"; // args[3] = "rect:-74.98451232910156,35.04014587402344,-73.97936248779295,41.49399566650391"; // args[4] = "interval:2015-01-01,2015-01-02"; // args[5] = "-overwrite"; // args[6] = "-no-local"; // Query for test with output // args = new String[6]; // args[0] = "/home/louai/nyc-taxi/yellowIndex"; // args[1] = "shape:edu.umn.cs.sthadoop.core.STPoint"; // args[2] = "rect:-74.98451232910156,35.04014587402344,-73.97936248779295,41.49399566650391"; // args[3] = "interval:2015-01-01,2015-01-03"; // args[4] = "-overwrite"; // args[5 ] = "-no-local"; final OperationsParams params = new OperationsParams(new GenericOptionsParser(args)); final Path[] paths = params.getPaths(); if (paths.length <= 1 && !params.checkInput()) { printUsage();//from w w w.j a v a 2 s . c o m System.exit(1); } if (paths.length >= 2 && !params.checkInputOutput()) { printUsage(); System.exit(1); } if (params.get("rect") == null) { String x1 = "-" + Double.toString(Double.MAX_VALUE); String y1 = "-" + Double.toString(Double.MAX_VALUE); String x2 = Double.toString(Double.MAX_VALUE); String y2 = Double.toString(Double.MAX_VALUE); System.out.println(x1 + "," + y1 + "," + x2 + "," + y2); params.set("rect", x1 + "," + y1 + "," + x2 + "," + y2); // System.err.println("You must provide a query range"); // printUsage(); // System.exit(1); } if (params.get("interval") == null) { System.err.println("Temporal range missing"); printUsage(); System.exit(1); } TextSerializable inObj = params.getShape("shape"); if (!(inObj instanceof STPoint) && !(inObj instanceof STRectangle)) { LOG.error("Shape is not instance of STPoint or STRectangle"); printUsage(); System.exit(1); } // Get spatio-temporal slices. List<Path> STPaths = getIndexedSlices(params); final Path outPath = params.getOutputPath(); final Rectangle[] queryRanges = params.getShapes("rect", new Rectangle()); // All running jobs final Vector<Long> resultsCounts = new Vector<Long>(); Vector<Job> jobs = new Vector<Job>(); Vector<Thread> threads = new Vector<Thread>(); long t1 = System.currentTimeMillis(); for (Path stPath : STPaths) { final Path inPath = stPath; for (int i = 0; i < queryRanges.length; i++) { final OperationsParams queryParams = new OperationsParams(params); OperationsParams.setShape(queryParams, "rect", queryRanges[i]); if (OperationsParams.isLocal(new JobConf(queryParams), inPath)) { // Run in local mode final Rectangle queryRange = queryRanges[i]; final Shape shape = queryParams.getShape("shape"); final Path output = outPath == null ? null : (queryRanges.length == 1 ? outPath : new Path(outPath, String.format("%05d", i))); Thread thread = new Thread() { @Override public void run() { FSDataOutputStream outFile = null; final byte[] newLine = System.getProperty("line.separator", "\n").getBytes(); try { ResultCollector<Shape> collector = null; if (output != null) { FileSystem outFS = output.getFileSystem(queryParams); final FSDataOutputStream foutFile = outFile = outFS.create(output); collector = new ResultCollector<Shape>() { final Text tempText = new Text2(); @Override public synchronized void collect(Shape r) { try { tempText.clear(); r.toText(tempText); foutFile.write(tempText.getBytes(), 0, tempText.getLength()); foutFile.write(newLine); } catch (IOException e) { e.printStackTrace(); } } }; } else { outFile = null; } long resultCount = rangeQueryLocal(inPath, queryRange, shape, queryParams, collector); resultsCounts.add(resultCount); } catch (IOException e) { e.printStackTrace(); } catch (InterruptedException e) { e.printStackTrace(); } finally { try { if (outFile != null) outFile.close(); } catch (IOException e) { e.printStackTrace(); } } } }; thread.start(); threads.add(thread); } else { // Run in MapReduce mode Path outTempPath = outPath == null ? null : new Path(outPath, String.format("%05d", i) + "-" + inPath.getName()); queryParams.setBoolean("background", true); Job job = rangeQueryMapReduce(inPath, outTempPath, queryParams); jobs.add(job); } } } while (!jobs.isEmpty()) { Job firstJob = jobs.firstElement(); firstJob.waitForCompletion(false); if (!firstJob.isSuccessful()) { System.err.println("Error running job " + firstJob); System.err.println("Killing all remaining jobs"); for (int j = 1; j < jobs.size(); j++) jobs.get(j).killJob(); System.exit(1); } Counters counters = firstJob.getCounters(); Counter outputRecordCounter = counters.findCounter(Task.Counter.MAP_OUTPUT_RECORDS); resultsCounts.add(outputRecordCounter.getValue()); jobs.remove(0); } while (!threads.isEmpty()) { try { Thread thread = threads.firstElement(); thread.join(); threads.remove(0); } catch (InterruptedException e) { e.printStackTrace(); } } long t2 = System.currentTimeMillis(); System.out.println("QueryPlan:"); for (Path stPath : STPaths) { System.out.println(stPath.getName()); } System.out.println("Time for " + queryRanges.length + " jobs is " + (t2 - t1) + " millis"); System.out.println("Results counts: " + resultsCounts); }
From source file:chatbot.Chatbot.java
/************************************************************************************************* * * @param args/* www . jav a 2 s.c o m*/ */ public static void main(String[] args) { try { if (args != null && args.length > 0 && args[0].equals("-h")) { System.out.println("Usage: "); System.out.println("TFIDF -h % show this help info"); System.out.println(" -f fname % run program using a particular input file"); System.out.println(" -d fname % development mode using a particular input file"); System.out.println(" -d -s % development mode using s3 to load input files"); System.out.println("adding -snn % filters responses by non-negative sentiment"); System.out.println("adding -sm % filters responses by matching sentiment"); } else if (args != null && args.length > 1 && args[0].equals("-f")) { asResource = false; isDevelopment = false; if (ArrayUtils.contains(args, "-snn")) isExcludingNegativeSentiment = true; if (ArrayUtils.contains(args, "-sm")) isMatchingSentiment = true; run(args[1]); } else if (args != null && args.length > 1 && args[0].equals("-d")) { asResource = false; isDevelopment = true; if (ArrayUtils.contains(args, "-snn")) isExcludingNegativeSentiment = true; if (ArrayUtils.contains(args, "-sm")) isMatchingSentiment = true; if (args[1].equals("-s")) { String newFileName = "/home/vish/Documents/chatbot/cornell_movie_dialgos_corpus_parsed/" + "movie_lines_parsed.txt"; run(newFileName); } else { run(args[1]); } } } catch (IOException e) { e.printStackTrace(); } }
From source file:net.java.sen.tools.MkSenDic.java
/** * Build sen dictionary.// w w w . j a v a 2s . c om * * @param args * custom dictionary files. see dic/build.xml. */ public static void main(String args[]) { ResourceBundle rb = ResourceBundle.getBundle("dictionary"); DictionaryMaker dm1 = new DictionaryMaker(); DictionaryMaker dm2 = new DictionaryMaker(); DictionaryMaker dm3 = new DictionaryMaker(); // 1st field information of connect file. Vector rule1 = new Vector(); // 2nd field information of connect file. Vector rule2 = new Vector(); // 3rd field information of connect file. Vector rule3 = new Vector(); // 4th field information of connect file. // this field shows cost of morpheme connection // [size3*(x3*size2+x2)+x1] // [size3*(Attr1*size2+Attr2)+Attl] short score[] = new short[20131]; long start = System.currentTimeMillis(); // ///////////////////////////////////////// // // Step1. Loading connetion file. // log.info("(1/7): reading connection matrix ... "); try { log.info("connection file = " + rb.getString("text_connection_file")); log.info("charset = " + rb.getString("dic.charset")); CSVParser csvparser = new CSVParser(new FileInputStream(rb.getString("text_connection_file")), rb.getString("dic.charset")); String t[]; int line = 0; while ((t = csvparser.nextTokens()) != null) { if (t.length < 4) { log.warn("invalid line in " + rb.getString("text_connection_file") + ":" + line); log.warn(rb.getString("text_connection_file") + "may be broken."); break; } dm1.add(t[0]); rule1.add(t[0]); dm2.add(t[1]); rule2.add(t[1]); dm3.add(t[2]); rule3.add(t[2]); if (line == score.length) { score = resize(score); } score[line++] = (short) Integer.parseInt(t[3]); } // ///////////////////////////////////////// // // Step2. Building internal dictionary // log.info("(2/7): building type dictionary ... "); dm1.build(); dm2.build(); dm3.build(); // if you want check specified morpheme, you uncomment and modify // following line: /* * System.out.print("22="); dm3.getById(22); * System.out.print("368="); dm3.getById(368); * * System.out.println(dm3.getDicId("?????*,*,*,*,?")); * DictionaryMaker.debug = true; * System.out.println(dm3.getDicId("?????*,*,*,*,?")); * System.out.println(dm3.getDicIdNoCache("?????*,*,*,*,?")); */ } catch (IOException e) { e.printStackTrace(); System.exit(0); } // ------------------------------------------------- int size1 = dm1.size(); int size2 = dm2.size(); int size3 = dm3.size(); int ruleSize = rule1.size(); short matrix[] = new short[size1 * size2 * size3]; short default_cost = (short) Integer.parseInt(rb.getString("default_connection_cost")); // ///////////////////////////////////////// // // Step3. Writing Connection Matrix // log.info("(3/7): writing conection matrix (" + size1 + " x " + size2 + " x " + size3 + " = " + size1 * size2 * size3 + ") ..."); for (int i = 0; i < (int) (size1 * size2 * size3); i++) matrix[i] = default_cost; for (int i = 0; i < ruleSize; i++) { Vector r1 = dm1.getRuleIdList((String) rule1.get(i)); Vector r2 = dm2.getRuleIdList((String) rule2.get(i)); Vector r3 = dm3.getRuleIdList((String) rule3.get(i)); for (Iterator i1 = r1.iterator(); i1.hasNext();) { int ii1 = ((Integer) i1.next()).intValue(); for (Iterator i2 = r2.iterator(); i2.hasNext();) { int ii2 = ((Integer) i2.next()).intValue(); for (Iterator i3 = r3.iterator(); i3.hasNext();) { int ii3 = ((Integer) i3.next()).intValue(); int pos = size3 * (size2 * ii1 + ii2) + ii3; matrix[pos] = score[i]; } } } } try { DataOutputStream out = new DataOutputStream( new BufferedOutputStream(new FileOutputStream(rb.getString("matrix_file")))); out.writeShort(size1); out.writeShort(size2); out.writeShort(size3); for (int i1 = 0; i1 < size1; i1++) for (int i2 = 0; i2 < size2; i2++) for (int i3 = 0; i3 < size3; i3++) { out.writeShort(matrix[size3 * (size2 * i1 + i2) + i3]); // if (matrix[size3 * (size2 * i1 + i2) + i3] != // default_cost) { // } } out.close(); } catch (IOException e) { e.printStackTrace(); System.exit(0); } matrix = null; score = null; // ------------------------------------------------- int pos_start = Integer.parseInt(rb.getString("pos_start")); int pos_size = Integer.parseInt(rb.getString("pos_size")); int di = 0; int offset = 0; ArrayList dicList = new ArrayList(); // ///////////////////////////////////////// // // Step4. Reading Morpheme Information // log.info("(4/7): reading morpheme information ... "); String t = null; String[] csv = null; try { // writer for feature file. BufferedWriter bw = new BufferedWriter(new OutputStreamWriter( new FileOutputStream(rb.getString("pos_file")), rb.getString("sen.charset"))); log.info("load dic: " + rb.getString("text_dic_file")); BufferedReader dicStream = null; int custom_dic = -1; if (args.length == 0) { dicStream = new BufferedReader(new InputStreamReader( new FileInputStream(rb.getString("text_dic_file")), rb.getString("dic.charset"))); } else { custom_dic = 0; dicStream = new BufferedReader( new InputStreamReader(new FileInputStream(args[custom_dic]), rb.getString("dic.charset"))); } int line = 0; CSVData key_b = new CSVData(); CSVData pos_b = new CSVData(); while (true) { t = dicStream.readLine(); if (t == null) { dicStream.close(); custom_dic++; if (args.length == custom_dic) { break; } else { // read custum dictionary log.info("load dic: " + "args[custum_dic]"); dicStream = new BufferedReader(new InputStreamReader(new FileInputStream(args[custom_dic]), rb.getString("dic.charset"))); } continue; } CSVParser parser = new CSVParser(t); csv = parser.nextTokens(); if (csv.length < (pos_size + pos_start)) { throw new RuntimeException("format error:" + t); } key_b.clear(); pos_b.clear(); for (int i = pos_start; i < (pos_start + pos_size - 1); i++) { key_b.append(csv[i]); pos_b.append(csv[i]); } key_b.append(csv[pos_start + pos_size - 1]); pos_b.append(csv[pos_start + pos_size - 1]); for (int i = pos_start + pos_size; i < (csv.length - 1); i++) { pos_b.append(csv[i]); } pos_b.append(csv[csv.length - 1]); CToken token = new CToken(); token.rcAttr2 = (short) dm1.getDicId(key_b.toString()); token.rcAttr1 = (short) dm2.getDicId(key_b.toString()); token.lcAttr = (short) dm3.getDicId(key_b.toString()); token.posid = 0; token.posID = offset; token.length = (short) csv[0].length(); token.cost = (short) Integer.parseInt(csv[1]); dicList.add(new PairObject(csv[0], token)); byte b[] = pos_b.toString().getBytes(rb.getString("sen.charset")); offset += (b.length + 1); String pos_b_str = pos_b.toString(); bw.write(pos_b_str, 0, pos_b_str.length()); // bw.write(b, 0, b.length); bw.write(0); if (++di % 50000 == 0) log.info("" + di + "... "); } bw.close(); // ----end of writing feature.cha ---- } catch (Exception e) { log.error("Error: " + t); e.printStackTrace(); System.exit(1); } rule1 = null; rule2 = null; rule3 = null; // ///////////////////////////////////////// // // Step5. Sort lexs and write to file // log.info("(5/7): sorting lex... "); int value[] = new int[dicList.size()]; char key[][] = new char[dicList.size()][]; int spos = 0; int dsize = 0; int bsize = 0; String prev = ""; Collections.sort(dicList); // ///////////////////////////////////////// // // Step6. Writing Token Information // log.info("(6/7): writing token... "); try { // writer for token file. DataOutputStream out = new DataOutputStream( new BufferedOutputStream(new FileOutputStream(rb.getString("token_file")))); // writing 'bos' and 'eos' and 'unknown' token. CToken token = new CToken(); token.rcAttr2 = (short) dm1.getDicId(rb.getString("bos_pos")); token.rcAttr1 = (short) dm2.getDicId(rb.getString("bos_pos")); token.lcAttr = (short) dm3.getDicId(rb.getString("bos_pos")); token.write(out); token.rcAttr2 = (short) dm1.getDicId(rb.getString("eos_pos")); token.rcAttr1 = (short) dm2.getDicId(rb.getString("eos_pos")); token.lcAttr = (short) dm3.getDicId(rb.getString("eos_pos")); token.write(out); token.rcAttr2 = (short) dm1.getDicId(rb.getString("unknown_pos")); token.rcAttr1 = (short) dm2.getDicId(rb.getString("unknown_pos")); token.lcAttr = (short) dm3.getDicId(rb.getString("unknown_pos")); token.posID = -1; token.write(out); log.info("key size = " + key.length); for (int i = 0; i < key.length; i++) { String k = (String) ((PairObject) dicList.get(i)).key; if (!prev.equals(k) && i != 0) { key[dsize] = ((String) ((PairObject) dicList.get(spos)).key).toCharArray(); value[dsize] = bsize + (spos << 8); dsize++; bsize = 1; spos = i; } else { bsize++; } prev = (String) ((PairObject) dicList.get(i)).key; ((CToken) (((PairObject) dicList.get(i)).value)).write(out); } out.flush(); out.close(); } catch (Exception e) { e.printStackTrace(); System.exit(1); } key[dsize] = ((String) ((PairObject) dicList.get(spos)).key).toCharArray(); value[dsize] = bsize + (spos << 8); dsize++; dm1 = null; dm2 = null; dm3 = null; dicList = null; // ///////////////////////////////////////// // // Step7. Build Double Array // log.info("(7/7): building Double-Array (size = " + dsize + ") ..."); DoubleArrayTrie da = new DoubleArrayTrie(); da.build(key, null, value, dsize); try { da.save(rb.getString("double_array_file")); } catch (Exception e) { e.printStackTrace(); } log.info("total time = " + (System.currentTimeMillis() - start) / 1000 + "[ms]"); }
From source file:HLA.java
public static void main(String[] args) throws IOException { if (!isVersionOrHigher()) { System.err.println("JRE of 1.8+ is required to run Kourami. Exiting."); System.exit(1);//from w w w .jav a 2 s.c o m } CommandLineParser parser = new DefaultParser(); Options options = HLA.createOption(); Options helponlyOpts = HLA.createHelpOption(); String[] bams = null; CommandLine line = null; boolean exitRun = false; try { CommandLine helpcheck = new DefaultParser().parse(helponlyOpts, args, true); if (helpcheck.getOptions().length > 0) HLA.help(options); else { line = parser.parse(options, args); if (line.hasOption("h"))//help")) HLA.help(options); else { if (line.hasOption("a")) HLA.TYPEADDITIONAL = true; HLA.OUTPREFIX = line.getOptionValue("o");//outfilePrefix"); String tmploc = line.getOptionValue("d");//msaDirectory"); HLA.MSAFILELOC = tmploc; if (tmploc.endsWith(File.separator)) HLA.MSAFILELOC = tmploc.substring(0, tmploc.length() - 1); if (!new File(HLA.MSAFILELOC).exists() || !new File(HLA.MSAFILELOC).isDirectory()) { System.err.println("Given msaDirectory: " + HLA.MSAFILELOC + "\t does NOT exist or is NOT a directory."); exitRun = true; } else if (!new File(HLA.MSAFILELOC + File.separator + "hla_nom_g.txt").exists()) { System.err.println("hla_nom_g.txt NOT FOUND in " + HLA.MSAFILELOC); System.err .println("Please download hla_nom_g.txt from the same IMGT Release as msa files."); exitRun = true; } } bams = line.getArgs(); if (bams.length < 1 || (bams.length == 1 && bams[bams.length - 1].equals("DEBUG1228"))) throw new ParseException("At least 1 bam file is required. See Usage:"); else { if (bams.length > 1 && bams[bams.length - 1].equals("DEBUG1228")) { String[] tmpbams = new String[bams.length - 1]; for (int i = 0; i < bams.length - 1; i++) tmpbams[i] = bams[i]; bams = tmpbams; HLA.DEBUG = true; } for (String b : bams) if (!new File(b).exists()) { System.err .println("Input bam : " + b + " DOES NOT exist. Please check the bam exists."); exitRun = true; } } } if (exitRun) throw new ParseException("Exitting . . ."); } catch (ParseException e) { System.err.println(e.getMessage()); //System.err.println("Failed to parse command line args. Check usage."); HLA.help(options); } String[] list = { "A", "B", "C", "DQA1", "DQB1", "DRB1" }; String[] extList = { "A", "B", "C", "DQA1", "DQB1", "DRB1", "DOA", "DMA", "DMB", "DPA1", "DPB1", "DRA", "DRB3", "DRB5", "F", "G", "H", "J", "L" }; //,"DPA1", "DPB1", "DRA", "DRB4", "F", "G" , "H", "J" ,"K", "L", "V"}; //,"DPA1", "DPB1", "DRA", "DRB3", "DRB4", "F", "G" , "H", "J" ,"K", "L", "V"}; if (HLA.TYPEADDITIONAL) list = extList; File[] bamfiles = new File[bams.length]; for (int i = 0; i < bams.length; i++) bamfiles[i] = new File(bams[i]); //check if <HLA.OUTPREFIX>.result is writable //if not exit. BufferedWriter resultWriter = null; try { resultWriter = new BufferedWriter(new FileWriter(HLA.OUTPREFIX + ".result")); } catch (IOException ioe) { ioe.printStackTrace(); System.err.println("\n\n>>> CANNOT open output file: " + HLA.OUTPREFIX + ".result <<<\n\n"); HLA.help(options); } HLA.log = new LogHandler(); for (int i = 0; i < args.length; i++) HLA.log.append(" " + args[i]); HLA.log.appendln(); try { System.err.println("----------------REF GRAPH CONSTRUCTION--------------"); HLA.log.appendln("----------------REF GRAPH CONSTRUCTION--------------"); HLA hla = new HLA(list, HLA.MSAFILELOC + File.separator + "hla_nom_g.txt"); //1. bubble counting before loading reads. //System.err.println("----------------BUBBLE COUNTING: REF GRAPH--------------"); //HLA.log.appendln("----------------BUBBLE COUNTING: REF GRAPH--------------"); //hla.countStems(); System.err.println("---------------- READ LOADING --------------"); HLA.log.appendln("---------------- READ LOADING --------------"); hla.loadReads(bamfiles); System.err.println("---------------- GRAPH CLEANING --------------"); HLA.log.appendln("---------------- GRAPH CLEANING --------------"); hla.flattenInsertionNodes(list); hla.removeUnused(list); hla.removeStems(list); /*updating error prob*/ hla.updateErrorProb(); hla.log.flush(); StringBuffer resultBuffer = new StringBuffer(); HLA.DEBUG3 = HLA.DEBUG; hla.countBubblesAndMerge(list, resultBuffer); hla.writeResults(resultBuffer, resultWriter); } catch (Exception e) { e.printStackTrace(); HLA.log.outToFile(); System.exit(-1); } /*printingWeights*/ //hla.printWeights(); HLA.log.outToFile(); HLA.log.appendln("NEW_NODE_ADDED:\t" + HLA.NEW_NODE_ADDED); HLA.log.appendln("HOPPPING:\t" + HLA.HOPPING); HLA.log.appendln("INSERTION_NODE_ADDED:\t" + HLA.INSERTION_NODE_ADDED); HLA.log.appendln("INSERTION_WITH_NO_NEW_NODE:\t" + HLA.INSERTION_WITH_NO_NEW_NODE); HLA.log.appendln("INSERTION_COUNTS:\t" + HLA.INSERTION); }