List of usage examples for java.io PrintStream close
public void close()
From source file:ctd.services.getCleanData2.java
public String cleanData() { String message = ""; String timestamp = new java.util.Date().getTime() + ""; try {// w w w. ja va 2 s . c o m CleanDataResult result = new CleanDataResult(); String error_message = ""; //get parameters. ResourceBundle res = ResourceBundle.getBundle("settings"); ResourceBundle cdf_list = ResourceBundle.getBundle("cdf"); //Base directory ftp folder: Here the temporary subfolders are found for each set of CEL-files, and the final assaytoken-based folder. String ftp_folder = res.getString("ws.upload_folder"); String rscript_cleandata = res.getString("ws.rscript_cleandata"); String rscript = res.getString("ws.rscript"); //db String db_username = res.getString("db.username"); String db_password = res.getString("db.password"); String db_database = res.getString("db.database"); //retrieve the information on the assignment from the database SessionFactory sessionFactory = new Configuration().configure().buildSessionFactory(); Session session = sessionFactory.openSession(); Transaction tr = session.beginTransaction(); Query q = session.createQuery( "from Ticket where password='" + getPassword() + "' AND ctd_REF='" + getCTD_REF() + "'"); Ticket ticket = null; String closed = ""; if (q.list().size() != 0) { ticket = (Ticket) q.list().get(0); closed = ticket.getClosed(); } if (ticket == null) { error_message = "Ticket password and CTD_REF don't match."; } if (closed.equals("yes")) { error_message = "Ticket is already used for normalization of these CEL-files."; ticket = null; } if (ticket != null) { //get the folder String folder = ticket.getFolder(); String zip_folder = ftp_folder + folder; //get contents File dir = new File(zip_folder); //find the zip file. File[] files = dir.listFiles(new FileFilter() { public boolean accept(File pathname) { return pathname.isFile(); } }); String cel_zip_file = ""; String zip_file = ""; String gct_file = ""; for (int i = 0; i < files.length; i++) { String file = files[i].getName(); if (file.contains("zip")) { // Add the timestamp to the zip files[i].renameTo(new File(zip_folder + "/" + timestamp + "_zip.zip")); file = timestamp + "_zip.zip"; cel_zip_file = file; zip_file = zip_folder + "/" + cel_zip_file; gct_file = zip_folder + "/" + timestamp + "_gctfile"; } } Process p3 = Runtime.getRuntime().exec("chmod 777 " + zip_file); ////////////////////////////////////////////////////////////////// //Do a system call to normalize. R. (zip_folder zip_file gct_file rscript) String args = rscript + " --verbose --vanilla " + rscript_cleandata + " -i" + zip_file + " -o" + gct_file + " -w" + zip_folder; Logger.getLogger(getTicket.class.getName()).log(Level.INFO, timestamp + ": Running: " + args); Process p = Runtime.getRuntime().exec(args); // Check if CEL files are unzipped allready // This is done by checking every 5 seconds for the existence of a .chip file // This is a bad way of doing this, in future versions of CTD // the output of the R scripts should be parsed boolean do_loop = true; while (do_loop) { File dir2 = new File(zip_folder); String[] files2 = dir2.list(); //Check if CEL files are allready there for (int i = 0; i < files2.length; i++) { String file = files2[i]; if (file.endsWith("chip")) { do_loop = false; try { Thread.sleep(5000); } catch (InterruptedException ex) { Logger.getLogger(getCleanData.class.getName()).log(Level.SEVERE, null, timestamp + ": " + ex); } } } } Logger.getLogger(getTicket.class.getName()).log(Level.INFO, timestamp + ": rscript has finished."); File dir2 = new File(zip_folder); String[] files2 = dir2.list(); String chip_file = ""; String chip_file_db = ""; ArrayList<String> unziped_files = new ArrayList<String>(); for (int i = 0; i < files2.length; i++) { String file = files2[i]; if (file.endsWith("CEL")) { unziped_files.add(file); } if (file.endsWith("chip")) { chip_file = file; chip_file_db = chip_file.split("_CDF_")[1]; File fileFile = new File(chip_file); fileFile.renameTo(new File(zip_folder + "/" + chip_file_db)); //Making the file correspond to the database entry. Duplicates can be safely overwritten, and will be. } } //Check if all CEL files are derived from the same chip. //This is essential for normalization. //initiate check hashmap. This map contains all the unique chip definition file names. There should be only one per analysis. ArrayList<StudySampleAssay> map = new ArrayList<StudySampleAssay>(); for (int i = 0; i < unziped_files.size(); i++) { String cel_file = unziped_files.get(i); StudySampleAssay ssa = new StudySampleAssay(); // Open the file that is the first // command line parameter //String cel_file_path = zip_folder + "/" + cel_file; String name = cel_file; ssa.setNameRawfile(name); ssa.setXREF(getCTD_REF()); map.add(ssa); } ticket.getStudySampleAssaies().addAll(map); session.saveOrUpdate(ticket); session.persist(ticket); tr.commit(); session.close(); //Storage chip definition file (CDF), creation gct file and database storage. SessionFactory sessionFactory1 = new Configuration().configure().buildSessionFactory(); Session session1 = sessionFactory1.openSession(); //check if cdf (chip definition file) is allready stored, if not, store it. List<ChipAnnotation> chip_annotation = null; Query q2 = session1.createQuery("from Chip Where Name='" + chip_file_db + "'"); if (q2.uniqueResult() != null) { Chip chip = (Chip) q2.list().get(0); chip_annotation = chip.getChipAnnotation(); } if (q2.uniqueResult() == null) { //Add this chip and its annotation Chip chip_new = new Chip(); chip_new.setName(chip_file_db); //read chip file String chip_file_path = zip_folder + "/" + chip_file; chip_annotation = readChip(chip_file_path); //Store the whole chip_new.getChipAnnotation().addAll(chip_annotation); Transaction tr1 = session1.beginTransaction(); session1.save(chip_new); session1.persist(chip_new); tr1.commit(); session1.close(); } //create the temp file for storage of the data_insert file. String data_file = zip_folder + "/expression.txt"; FileOutputStream out = null; PrintStream pr = null; out = new FileOutputStream(data_file); pr = new PrintStream(out); //create array data input file for the database table, find correct foreign keys. //get the study_sample_assay id and the probeset ids. SessionFactory sessionFactory2 = new Configuration().configure().buildSessionFactory(); Session session2 = sessionFactory2.openSession(); //Get the cip_annotation_id Query q3 = session2.createQuery("from Chip Where Name='" + chip_file_db + "'"); Chip chip = (Chip) q3.list().get(0); chip_annotation = chip.getChipAnnotation(); Iterator it2 = chip_annotation.iterator(); //for speed, put the chip annotation id in a hashmap HashMap<String, String> chip_annotation_ids = new HashMap<String, String>(); while (it2.hasNext()) { ChipAnnotation ca = (ChipAnnotation) it2.next(); String id = ca.getId().toString(); String ps = ca.getProbeset(); chip_annotation_ids.put(ps, id); } //Create the .gct-files try { Query qt = session2.createQuery("from Ticket where password='" + getPassword() + "' AND ctd_REF='" + getCTD_REF() + "'"); ticket = null; if (qt.list().size() != 0) { ticket = (Ticket) qt.list().get(0); } Iterator it3 = ticket.getStudySampleAssaies().iterator(); while (it3.hasNext()) { StudySampleAssay ssa = (StudySampleAssay) it3.next(); String name_raw_file = ssa.getNameRawfile(); String sampleToken = getSampletokens().get(name_raw_file); String ssa_id = ssa.getId().toString(); error_message = error_message + name_raw_file; String gct_file_generated = gct_file + ".gct"; ArrayList<Double> values = writeFile(pr, chip_annotation_ids, ssa_id, gct_file_generated, name_raw_file.replaceAll(".CEL", "")); Statistics stat = new Statistics(); stat.setData(values); Double average = stat.getAverage(); Double std = stat.getSTD(); ssa.setXREF(getCTD_REF()); ssa.setAverage(average); ssa.setStudyToken(getStudytoken()); ssa.setSampleToken(sampleToken); ssa.setStd(std); } } catch (IOException e) { Logger.getLogger(getTicket.class.getName()).log(Level.SEVERE, timestamp + ": ERROR IN getCleanData2: " + e.getMessage() + " " + e.getLocalizedMessage()); } pr.close(); out.close(); //update ticket Transaction tr2 = session2.beginTransaction(); session2.update(ticket); session2.persist(ticket); tr2.commit(); session2.close(); //import the data into the database String u = "--user=" + db_username; String passw = "--password=" + db_password; String[] commands = new String[] { "mysqlimport", u, passw, "--local", db_database, data_file }; Process p4 = Runtime.getRuntime().exec(commands); message = message + " RMA and GRSN on the CEL-files is done, data is stored."; //close the ticket when finished, normalization can only be performed once by the client. CloseTicket(); //Remove zip and data file (expression.txt) File fileFolderOld = new File(zip_folder); File fileFolderDest = new File(res.getString("ws.upload_folder") + getCTD_REF()); File[] listOfFiles = fileFolderOld.listFiles(); for (int i = 0; i < listOfFiles.length; i++) { if (listOfFiles[i].getPath().toLowerCase().endsWith(".zip") || listOfFiles[i].getPath().toLowerCase().endsWith("expression.txt")) { try { listOfFiles[i].delete(); } catch (Exception e) { Logger.getLogger(getTicket.class.getName()).log(Level.SEVERE, timestamp + ": ERROR IN getCleanData2 (try to delete): " + e.toString()); } } else { try { FileUtils.copyFileToDirectory(listOfFiles[i], fileFolderDest, false); listOfFiles[i].delete(); } catch (Exception e) { Logger.getLogger(getTicket.class.getName()).log(Level.SEVERE, timestamp + ": ERROR IN getCleanData2 (try to copy): " + e.toString()); } } } // Remove temporary folder try { fileFolderOld.delete(); } catch (Exception e) { Logger.getLogger(getTicket.class.getName()).log(Level.SEVERE, timestamp + ": ERROR IN getCleanData2: " + e.toString()); } // -------------------------------------------- // This piece of code is added in order to cleanup all the files // of aborted upload procedures. It checks for these old folders // (more than a day old and a temporaty name (which is just a number // from 1 upwards. It is assumed that a temporary folder has a // name shorter than 10 chars) and removes these files and folders File folderData = new File(res.getString("ws.upload_folder")); long lngTimestamp = new java.util.Date().getTime(); listOfFiles = folderData.listFiles(); for (int i = 0; i < listOfFiles.length; i++) { if (listOfFiles[i].lastModified() < (lngTimestamp - 10000) && listOfFiles[i].getName().length() < 10) { // This folder is more than a day old // We know it is a temporary folder because the name is less than 10 chars long File[] lstDelete = listOfFiles[i].listFiles(); for (int j = 0; j < lstDelete.length; j++) { // Delete all content of the old folder lstDelete[j].delete(); } // Delete the old folder if (!listOfFiles[i].delete()) { Logger.getLogger(getTicket.class.getName()).log(Level.SEVERE, "delSample(): Folder deletion failed: " + listOfFiles[i].getName()); } } } // -------------------------------------------- } // set the messages of the response result.setErrorMessage(error_message); result.setMessage(message); // Use SKARINGA in order to create the JSON response ObjectTransformer trans = null; try { trans = ObjectTransformerFactory.getInstance().getImplementation(); message = trans.serializeToString(result); } catch (NoImplementationException ex) { Logger.getLogger(getTicket.class.getName()).log(Level.SEVERE, "SKARINGA ERROR IN getCleanData2: " + ex.getLocalizedMessage()); } } catch (Exception e) { Logger.getLogger(getTicket.class.getName()).log(Level.SEVERE, timestamp + ": ERROR IN getCleanData2: " + e.toString()); } return message; }
From source file:org.apache.asterix.external.parser.test.RecordWithMetaTest.java
@SuppressWarnings({ "unchecked", "rawtypes" }) @Test// w w w. j a v a2s . c om public void runTest() throws Exception { File file = new File("target/beer.adm"); File expected = new File(getClass().getResource("/results/beer.txt").toURI().getPath()); try { FileUtils.deleteQuietly(file); PrintStream printStream = new PrintStream(Files.newOutputStream(Paths.get(file.toURI()))); // create key type IAType[] keyTypes = { BuiltinType.ASTRING }; String keyName = "id"; List<String> keyNameAsList = new ArrayList<>(1); keyNameAsList.add(keyName); // create record type String[] recordFieldNames = {}; IAType[] recordFieldTypes = {}; recordType = new ARecordType("value", recordFieldNames, recordFieldTypes, true); // create the meta type String[] metaFieldNames = { keyName, "flags", "expiration", "cas", "rev", "vbid", "dtype" }; IAType[] metaFieldTypes = { BuiltinType.ASTRING, BuiltinType.AINT32, BuiltinType.AINT64, BuiltinType.AINT64, BuiltinType.AINT32, BuiltinType.AINT32, BuiltinType.AINT32 }; ARecordType metaType = new ARecordType("meta", metaFieldNames, metaFieldTypes, true); int valueIndex = 4; char delimiter = ','; int numOfTupleFields = 3; int[] pkIndexes = { 0 }; int[] pkIndicators = { 1 }; List<Path> paths = new ArrayList<>(); paths.add(Paths.get(getClass().getResource("/beer.csv").toURI())); FileSystemWatcher watcher = new FileSystemWatcher(paths, null, false); // create input stream LocalFSInputStream inputStream = new LocalFSInputStream(watcher); // create reader record reader QuotedLineRecordReader lineReader = new QuotedLineRecordReader(true, inputStream, ExternalDataConstants.DEFAULT_QUOTE); // create csv with json record reader CSVToRecordWithMetadataAndPKConverter recordConverter = new CSVToRecordWithMetadataAndPKConverter( valueIndex, delimiter, metaType, recordType, pkIndicators, pkIndexes, keyTypes); // create the value parser <ADM in this case> ADMDataParser valueParser = new ADMDataParser(recordType, false); // create parser. RecordWithMetadataParser parser = new RecordWithMetadataParser(metaType, valueParser, recordConverter); // create serializer deserializer and printer factories ISerializerDeserializer[] serdes = new ISerializerDeserializer[keyTypes.length + 2]; IPrinterFactory[] printerFactories = new IPrinterFactory[keyTypes.length + 2]; for (int i = 0; i < keyTypes.length; i++) { serdes[i + 2] = AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(keyTypes[i]); printerFactories[i + 2] = AqlADMPrinterFactoryProvider.INSTANCE.getPrinterFactory(keyTypes[i]); } serdes[0] = AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(recordType); serdes[1] = AqlSerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(metaType); printerFactories[0] = AqlADMPrinterFactoryProvider.INSTANCE.getPrinterFactory(recordType); printerFactories[1] = AqlADMPrinterFactoryProvider.INSTANCE.getPrinterFactory(metaType); // create output descriptor IPrinter[] printers = new IPrinter[printerFactories.length]; for (int i = 0; i < printerFactories.length; i++) { printers[i] = printerFactories[i].createPrinter(); } ArrayTupleBuilder tb = new ArrayTupleBuilder(numOfTupleFields); while (lineReader.hasNext()) { IRawRecord<char[]> record = lineReader.next(); tb.reset(); parser.parse(record, tb.getDataOutput()); tb.addFieldEndOffset(); parser.parseMeta(tb.getDataOutput()); tb.addFieldEndOffset(); parser.appendLastParsedPrimaryKeyToTuple(tb); //print tuple printTuple(tb, printers, printStream); } lineReader.close(); printStream.close(); Assert.assertTrue(FileUtils.contentEquals(file, expected)); } catch (Throwable th) { System.err.println("TEST FAILED"); th.printStackTrace(); throw th; } finally { FileUtils.deleteQuietly(file); } System.err.println("TEST PASSED."); }
From source file:com.netscape.cms.servlet.csadmin.ConfigurationUtils.java
public static void importLDIFS(String param, LDAPConnection conn, boolean suppressErrors) throws IOException, EPropertyNotFound, EBaseException { IConfigStore cs = CMS.getConfigStore(); logger.debug("importLDIFS: param=" + param); String v = cs.getString(param); String baseDN = cs.getString("internaldb.basedn"); String database = cs.getString("internaldb.database"); String instancePath = cs.getString("instanceRoot"); String instanceId = cs.getString("instanceId"); String cstype = cs.getString("cs.type"); String dbuser = cs.getString("preop.internaldb.dbuser", "uid=" + DBUSER + ",ou=people," + baseDN); String configDir = instancePath + File.separator + cstype.toLowerCase() + File.separator + "conf"; StringTokenizer tokenizer = new StringTokenizer(v, ","); while (tokenizer.hasMoreTokens()) { String token = tokenizer.nextToken().trim(); int index = token.lastIndexOf("/"); String name = token;/*from w w w . j ava 2s .co m*/ if (index != -1) { name = token.substring(index + 1); } logger.debug("importLDIFS(): ldif file = " + token); String filename = configDir + File.separator + name; logger.debug("importLDIFS(): ldif file copy to " + filename); PrintStream ps = null; BufferedReader in = null; in = new BufferedReader(new InputStreamReader(new FileInputStream(token), "UTF-8")); ps = new PrintStream(filename, "UTF-8"); while (in.ready()) { String s = in.readLine(); int n = s.indexOf("{"); if (n == -1) { ps.println(s); } else { boolean endOfline = false; while (n != -1) { ps.print(s.substring(0, n)); int n1 = s.indexOf("}"); String tok = s.substring(n + 1, n1); if (tok.equals("instanceId")) { ps.print(instanceId); } else if (tok.equals("rootSuffix")) { ps.print(baseDN); } else if (tok.equals("database")) { ps.print(database); } else if (tok.equals("dbuser")) { ps.print(dbuser); } if ((s.length() + 1) == n1) { endOfline = true; break; } s = s.substring(n1 + 1); n = s.indexOf("{"); } if (!endOfline) { ps.println(s); } } } in.close(); ps.close(); ArrayList<String> errors = new ArrayList<String>(); LDAPUtil.importLDIF(conn, filename, errors); if (!errors.isEmpty()) { logger.error("importLDIFS(): LDAP Errors in importing " + filename); for (String error : errors) { logger.error(error); } if (!suppressErrors) { throw new EBaseException("LDAP Errors in importing " + filename); } } } }
From source file:com.cyberway.issue.io.arc.ARC2WCDX.java
public static Object[] createWcdx(ARCReader reader) { reader.setDigest(true);/*w ww. ja v a2s. com*/ String wcdxPath = reader.getReaderIdentifier().replaceAll("\\.arc(\\.gz)?$", ".wcdx.gz"); File wcdxFile = new File(wcdxPath + ".open"); PrintStream writer = null; long count = 0; try { writer = new PrintStream(new GZIPOutputStream(new FileOutputStream(wcdxFile))); // write header: legend + timestamp StringBuilder legend = new StringBuilder(); appendField(legend, "CDX"); appendField(legend, "surt-uri"); appendField(legend, "b"); // ARC timestamp appendField(legend, "http-date"); appendField(legend, "s"); // status code appendField(legend, "m"); // media type appendField(legend, "sha1"); // content sha1 appendField(legend, "g"); // ARC name appendField(legend, "V"); // start offset appendField(legend, "end-offset"); // TODO: implement appendField(legend, "n"); // ARC record length TODO: verify appendField(legend, "http-content-length"); appendField(legend, "http-last-modified"); appendField(legend, "http-expires"); appendField(legend, "http-etag"); appendField(legend, "http-location"); appendField(legend, "e"); // IP appendField(legend, "a"); // original URL // WCDX version+creation time: crude version control appendField(legend, WCDX_VERSION + "@" + ArchiveUtils.get14DigitDate()); writer.println(legend.toString()); Iterator iter = reader.iterator(); count = 0; while (iter.hasNext()) { ARCRecord record = (ARCRecord) iter.next(); record.close(); ARCRecordMetaData h = (ARCRecordMetaData) record.getHeader(); Header[] httpHeaders = record.getHttpHeaders(); if (httpHeaders == null) { httpHeaders = new Header[0]; } HeaderGroup hg = new HeaderGroup(); hg.setHeaders(httpHeaders); StringBuilder builder = new StringBuilder(); // SURT-form URI appendField(builder, SURT.fromURI(h.getUrl())); // record timestamp ('b') appendField(builder, h.getDate()); // http header date appendTimeField(builder, hg.getFirstHeader("Date")); // response code ('s') appendField(builder, h.getStatusCode()); // media type ('m') appendField(builder, h.getMimetype()); // content checksum (like 'c', but here Base32 SHA1) appendField(builder, record.getDigestStr()); // arc name ('g') appendField(builder, reader.getFileName()); // compressed start offset ('V') appendField(builder, h.getOffset()); // compressed end offset (?) // appendField(builder, // reader.getInputStream() instanceof RepositionableStream // ? ((GzippedInputStream)reader.getInputStream()).vPosition() // : "-"); // TODO; leave unavail for now appendField(builder, "-"); // uncompressed (declared in ARC headerline) record length appendField(builder, h.getLength()); // http header content-length appendField(builder, hg.getFirstHeader("Content-Length")); // http header mod-date appendTimeField(builder, hg.getFirstHeader("Last-Modified")); // http header expires appendTimeField(builder, hg.getFirstHeader("Expires")); // http header etag appendField(builder, hg.getFirstHeader("ETag")); // http header redirect ('Location' header?) appendField(builder, hg.getFirstHeader("Location")); // ip ('e') appendField(builder, h.getIp()); // original URI appendField(builder, h.getUrl()); // TODO MAYBE - a title from inside content? writer.println(builder.toString()); count++; } wcdxFile.renameTo(new File(wcdxPath)); } catch (IOException e) { // soldier on: but leave '.open' wcdx file as indicator of error if (!wcdxFile.exists()) { try { wcdxFile.createNewFile(); } catch (IOException e1) { // TODO Auto-generated catch block throw new RuntimeException(e1); } } } catch (RuntimeException e) { // soldier on: but leave '.open' wcdx file as indicator of error if (!wcdxFile.exists()) { try { wcdxFile.createNewFile(); } catch (IOException e1) { // TODO Auto-generated catch block throw new RuntimeException(e1); } } } finally { if (writer != null) { writer.close(); } } return new Object[] { wcdxPath, count }; }
From source file:nl.vu.psy.rite.Rite.java
public void run() { if (run) {//from w ww . ja va 2 s . c o m Recipe r = null; try { System.out.println("Starting work cycle.."); long sleepTime = Long.parseLong(getProperty(PropertyKeys.INTERVAL)); long scrubDelay = Long.parseLong(getProperty(PropertyKeys.SCRUBDELAY)); long idleDelay = Long.parseLong(getProperty(PropertyKeys.IDLEDELAY)); int maxFailures = Integer.parseInt(getProperty(PropertyKeys.MAXFAILURES)); int maxScrubs = Integer.parseInt(getProperty(PropertyKeys.MAXSCRUBS)); int maxRecipes = Integer.parseInt(getProperty(PropertyKeys.MAXRECIPES)); while (!lifeTimeExceeded() && !halt) { // Check commands ClientCommand co = rh.getClientCommand(identifier); if (co == null) { if (idle) { System.out.println( "-------------------------------------------------------------------------------"); System.out.println("Idle."); System.out.println("Time: " + TimeStamp.dateToString(new Date())); System.out.println( "-------------------------------------------------------------------------------"); try { Thread.sleep(idleDelay); } catch (InterruptedException e) { System.out.println("The work cycle was interrupted: " + e.getMessage()); System.out.println("=== MARK: " + TimeStamp.dateToString(new Date()) + " ==="); return; } System.out.println("=== MARK: " + TimeStamp.dateToString(new Date()) + " ==="); } else { r = rh.lockRecipe(); // Lock and retrieve recipe if (r == null) { System.out.println( "-------------------------------------------------------------------------------"); System.out.println("No recipe. Scrubbing host."); System.out.println("Time: " + TimeStamp.dateToString(new Date())); System.out.println( "-------------------------------------------------------------------------------"); rh.scrubHost(); scrubs++; if (scrubs > maxScrubs) { System.out.println( "The maximum number of scrubs has been reached. This client will shutdown..."); halt = true; } System.out.println("=== MARK: " + TimeStamp.dateToString(new Date()) + " ==="); try { Thread.sleep(scrubDelay); } catch (InterruptedException e) { System.out.println("The work cycle was interrupted: " + e.getMessage()); System.out.println("=== MARK: " + TimeStamp.dateToString(new Date()) + " ==="); return; } } else { // Reset scrub counter scrubs = 0; // Set up streams for recipe output PrintStream out = null; PrintStream err = null; try { out = new PrintStream( new FileOutputStream("recipe." + r.getIdentifier() + ".stdout")); PrintStream teeOut = new PrintStream(new TeeOutputStream(System.out, out)); System.setOut(teeOut); err = new PrintStream( new FileOutputStream("recipe." + r.getIdentifier() + ".stderr")); PrintStream teeErr = new PrintStream(new TeeOutputStream(System.err, err)); System.setErr(teeErr); } catch (FileNotFoundException e) { // Absorb System.out.println( "Could not tee output streams to file. Outputting to main application streams only."); } System.out.println( "-------------------------------------------------------------------------------"); System.out.println("Starting recipe: " + r.getIdentifier() + "."); System.out.println("Time: " + TimeStamp.dateToString(new Date())); System.out.println( "-------------------------------------------------------------------------------"); recipeCooker.setRecipe(r); // Run recipe // Wait for completion while (!r.hasCompleted()) { try { Thread.sleep(sleepTime); } catch (InterruptedException e) { System.out.println("The work cycle was interrupted: " + e.getMessage()); System.out.println("Attempting release of: " + r.getIdentifier()); r = recipeCooker.getRecipe(); recipeCooker.removeRecipe(); rh.releaseRecipe(r); System.out.println( "=== MARK: " + TimeStamp.dateToString(new Date()) + " ==="); return; } } r = recipeCooker.getRecipe(); recipeCooker.removeRecipe(); rh.releaseRecipe(r); recipes++; if (r.hasFailed()) { failures++; } if (failures >= maxFailures) { System.out.println( "The maximum number of recipe failures has been reached. This client will shutdown..."); halt = true; } if (maxRecipes > -1 && recipes >= maxRecipes) { System.out.println( "The maximum number of completed recipes has been reached. This client will shutdown..."); halt = true; } System.out.println("=== MARK: " + TimeStamp.dateToString(new Date()) + " ==="); System.setOut(System.out); System.setErr(System.err); if (out != null) { out.close(); out = null; } if (err != null) { err.close(); err = null; } } } } else { // Handle command // FIXME not too fond of all these flags System.out.println( "-------------------------------------------------------------------------------"); System.out.println("Got command: " + co.getCommand()); System.out.println("Time: " + TimeStamp.dateToString(new Date())); System.out.println( "-------------------------------------------------------------------------------"); switch (co.getCommand()) { case HALT: System.out.println("Halting client..."); halt = true; break; case IDLE: System.out.println("Setting client to idle..."); idle = true; break; case RUN: System.out.println("Setting client to run..."); idle = false; break; default: break; } System.out.println("=== MARK: " + TimeStamp.dateToString(new Date()) + " ==="); } } //System.out.println("Shutting down..."); if (rh.hasLock()) { System.out.println("Attempting release of: " + r.getIdentifier()); r = recipeCooker.getRecipe(); recipeCooker.removeRecipe(); rh.releaseRecipe(r); System.out.println("=== MARK: " + TimeStamp.dateToString(new Date()) + " ==="); return; } } catch (Exception e) { System.out.println("An exception was encountered while running: " + e.getMessage()); System.out.println("Exiting!"); return; } } }
From source file:de.juwimm.cms.remote.ContentServiceSpringImpl.java
@Override protected void handleDeployEdition(Integer editionId) throws Exception { try {/*from w w w .jav a 2s .c o m*/ editionCronService.logEditionStatusInfo(LiveserverDeployStatus.CreateDeployFileForExport, editionId); if (log.isDebugEnabled()) log.debug("Start creating Edition"); EditionHbm edition = getEditionHbmDao().load(editionId); PrintStream out = createEditionOutputStream(edition); // if (log.isDebugEnabled()) log.debug("siteToXml"); getEditionHbmDao().siteToXml(edition.getSiteId(), out, edition); System.gc(); if (log.isDebugEnabled()) log.debug("picturesToXmlRecursive"); getEditionHbmDao().picturesToXmlRecursive(null, edition.getSiteId(), out, edition); System.gc(); if (log.isDebugEnabled()) log.debug("documentsToXmlRecursive"); getEditionHbmDao().documentsToXmlRecursive(null, edition.getSiteId(), out, true, edition); System.gc(); if (log.isDebugEnabled()) log.debug("unitsToXmlRecursive"); getEditionHbmDao().unitsToXmlRecursive(edition.getSiteId(), out, edition); System.gc(); if (log.isDebugEnabled()) log.debug("hostsToXmlRecursive"); getEditionHbmDao().hostsToXmlRecursive(edition.getSiteId(), out, edition); if (log.isDebugEnabled()) log.debug("viewdocumentsToXmlRecursive"); getEditionHbmDao().viewdocumentsToXmlRecursive(edition.getSiteId(), out, edition); if (log.isDebugEnabled()) log.debug("realmsToXmlRecursive"); getEditionHbmDao().realmsToXmlRecursive(edition.getSiteId(), out, edition); System.gc(); if (log.isDebugEnabled()) log.debug("Creating ViewComponent Data"); Iterator vdIt = getViewDocumentHbmDao().findAll(edition.getSiteId()).iterator(); while (vdIt.hasNext()) { ViewDocumentHbm vdl = (ViewDocumentHbm) vdIt.next(); getViewComponentHbmDao().toXml(vdl.getViewComponent(), null, true, true, false, false, -1, false, false, Constants.DEPLOY_TYPE_FULL, out); } if (log.isDebugEnabled()) log.debug("Finished creating ViewComponent Data"); out.println("</edition>"); out.flush(); out.close(); out = null; if (log.isDebugEnabled()) log.debug("Finished creating Edition"); } catch (Exception e) { if (log.isDebugEnabled()) log.debug("Error while creating Edition", e); editionCronService.logEditionStatusException(editionId, e.getMessage()); throw new UserException(e.getMessage(), e); } }
From source file:de.juwimm.cms.remote.ContentServiceSpringImpl.java
@Override protected void handleDeployUnitEdition(Integer editionId, Integer unitId) throws Exception { try {/*from ww w .j a v a2 s . co m*/ if (log.isDebugEnabled()) log.debug("Start creating unitEdition"); editionCronService.logEditionStatusInfo(LiveserverDeployStatus.CreateDeployFileForExport, editionId); //if (log.isInfoEnabled())log.info("createDeployFile " + AuthenticationHelper.getUserName()); EditionHbm edition = getEditionHbmDao().load(editionId); PrintStream out = createEditionOutputStream(edition); if (log.isDebugEnabled()) log.debug("creating outputstream for unitEdition"); // site info is needed to connect to the live server if (log.isDebugEnabled()) log.debug("siteToXml"); getEditionHbmDao().siteToXml(edition.getSiteId(), out, edition); System.gc(); // Alle ..toXML + unitID to reuse them in unitDeploy if (log.isDebugEnabled()) log.debug("picturesToXmlRecursive"); getEditionHbmDao().picturesToXmlRecursive(unitId, edition.getSiteId(), out, edition); System.gc(); if (log.isDebugEnabled()) log.debug("documentsToXmlRecursive"); getEditionHbmDao().documentsToXmlRecursive(unitId, edition.getSiteId(), out, true, edition); System.gc(); if (log.isDebugEnabled()) log.debug("unitToXml"); getEditionHbmDao().unitToXml(unitId, out, edition); System.gc(); // if (log.isDebugEnabled()) log.debug("viewdocumentsToXmlRecursive"); // getEditionHbmDao().viewdocumentsToXmlRecursive(edition.getSiteId(), out, edition); if (log.isDebugEnabled()) log.debug("realmsToXmlUsed: unit - " + unitId); getEditionHbmDao().realmsToXmlUsed(unitId, out, edition); System.gc(); if (log.isDebugEnabled()) log.debug("Creating ViewComponent Data"); Iterator vdIt = getViewDocumentHbmDao().findAll(edition.getSiteId()).iterator(); // while (vdIt.hasNext()) { ViewDocumentHbm vdl = (ViewDocumentHbm) vdIt.next(); ViewComponentHbm vch = getViewComponentHbmDao().find4Unit(unitId, vdl.getViewDocumentId()); if (vch != null) getViewComponentHbmDao().toXml(vch, unitId, true, true, true, false, -1, false, false, Constants.DEPLOY_TYPE_UNIT, out); } if (log.isDebugEnabled()) log.debug("Finished creating ViewComponent Data"); out.println("</edition>"); out.flush(); out.close(); out = null; if (log.isDebugEnabled()) log.debug("Finished creating unitEdition"); } catch (Exception e) { if (log.isDebugEnabled()) log.debug("error while creating unitEdition", e); editionCronService.logEditionStatusException(editionId, e.getMessage()); throw new UserException(e.getMessage(), e); } }
From source file:de.juwimm.cms.remote.ContentServiceSpringImpl.java
/** * Creates a new FULL-Edition for the active site and returns it as SOAP-Attachment. * /*w w w . jav a 2s. com*/ * @see de.juwimm.cms.remote.ContentServiceSpring#exportEditionFull() */ @Override protected InputStream handleExportEditionFull() throws Exception { try { if (log.isInfoEnabled()) log.info("createEditionForExport " + AuthenticationHelper.getUserName()); File fle = File.createTempFile("edition_full_export", ".xml.gz"); FileOutputStream fout = new FileOutputStream(fle); GZIPOutputStream gzoudt = new GZIPOutputStream(fout); PrintStream out = new PrintStream(gzoudt, true, "UTF-8"); UserHbm invoker = getUserHbmDao().load(AuthenticationHelper.getUserName()); SiteHbm site = invoker.getActiveSite(); if (log.isDebugEnabled()) log.debug("Invoker is: " + invoker.getUserId() + " within Site " + site.getName()); EditionHbm edition = getEditionHbmDao().create("INTERIMEDITION", null, null, true); if (log.isDebugEnabled()) log.debug("Dummy-Editon create"); out.println("<edition>"); if (log.isDebugEnabled()) log.debug("picturesToXmlRecursive"); getEditionHbmDao().picturesToXmlRecursive(null, site.getSiteId(), out, edition); System.gc(); if (log.isDebugEnabled()) log.debug("documentsToXmlRecursive"); getEditionHbmDao().documentsToXmlRecursive(null, site.getSiteId(), out, true, edition); System.gc(); if (log.isDebugEnabled()) log.debug("unitsToXmlRecursive"); getEditionHbmDao().unitsToXmlRecursive(site.getSiteId(), out, edition); System.gc(); if (log.isDebugEnabled()) log.debug("hostsToXmlRecursive"); getEditionHbmDao().hostsToXmlRecursive(site.getSiteId(), out, edition); if (log.isDebugEnabled()) log.debug("viewdocumentsToXmlRecursive"); getEditionHbmDao().viewdocumentsToXmlRecursive(site.getSiteId(), out, edition); if (log.isDebugEnabled()) log.debug("realmsToXmlRecursive"); getEditionHbmDao().realmsToXmlRecursive(site.getSiteId(), out, edition); System.gc(); if (log.isDebugEnabled()) log.debug("Creating ViewComponent Data"); Iterator vdIt = getViewDocumentHbmDao().findAll(site.getSiteId()).iterator(); while (vdIt.hasNext()) { ViewDocumentHbm vdl = (ViewDocumentHbm) vdIt.next(); // vdl.getViewComponent().toXml(null, 0, true, false, 1, false, false, out); getViewComponentHbmDao().toXml(vdl.getViewComponent(), null, true, false, -1, false, false, out); } if (log.isDebugEnabled()) log.debug("Finished creating ViewComponent Data"); out.println("</edition>"); getEditionHbmDao().remove(edition); out.flush(); out.close(); out = null; return new FileInputStream(fle); } catch (Exception e) { throw new UserException(e.getMessage(), e); } }
From source file:de.juwimm.cms.remote.ContentServiceSpringImpl.java
@Override protected void handleDeployRootUnitEdition(Integer editionId) throws Exception { try {//from w w w.j a v a 2 s .c o m editionCronService.logEditionStatusInfo(LiveserverDeployStatus.CreateDeployFileForExport, editionId); if (log.isDebugEnabled()) log.debug("Start creating RootEdition"); EditionHbm edition = getEditionHbmDao().load(editionId); PrintStream out = createEditionOutputStream(edition); // site info is needed to connect to the live server if (log.isDebugEnabled()) log.debug("siteToXml"); getEditionHbmDao().siteToXml(edition.getSiteId(), out, edition); System.gc(); // Alle ..toXML + unitID to reuse them in unitDeploy if (log.isDebugEnabled()) log.debug("picturesToXmlRecursive"); //TODO: change -1 to null and check edition.type later .... - for the moment -1 = rootdeploy getEditionHbmDao().picturesToXmlRecursive(-1, edition.getSiteId(), out, edition); System.gc(); if (log.isDebugEnabled()) log.debug("documentsToXmlRecursive"); //TODO: change -1 to null and check edition.type later .... - for the moment -1 = rootdeploy getEditionHbmDao().documentsToXmlRecursive(-1, edition.getSiteId(), out, true, edition); System.gc(); if (log.isDebugEnabled()) log.debug("unitsToXmlRecursive"); getEditionHbmDao().unitsToXmlRecursive(edition.getSiteId(), out, edition); System.gc(); // if (log.isDebugEnabled()) log.debug("viewdocumentsToXmlRecursive"); // getEditionHbmDao().viewdocumentsToXmlRecursive(edition.getSiteId(), out, edition); if (log.isDebugEnabled()) log.debug("hostsToXmlRecursive"); getEditionHbmDao().hostsToXmlRecursive(edition.getSiteId(), out, edition); if (log.isDebugEnabled()) log.debug("viewdocumentsToXmlRecursive"); getEditionHbmDao().viewdocumentsToXmlRecursive(edition.getSiteId(), out, edition); if (log.isDebugEnabled()) log.debug("realmsToXmlRecursive"); getEditionHbmDao().realmsToXmlUsed(edition.getUnitId(), out, edition); System.gc(); if (log.isDebugEnabled()) log.debug("Creating ViewComponent Data"); Iterator vdIt = getViewDocumentHbmDao().findAll(edition.getSiteId()).iterator(); while (vdIt.hasNext()) { ViewDocumentHbm vdl = (ViewDocumentHbm) vdIt.next(); getViewComponentHbmDao().toXml(vdl.getViewComponent(), edition.getUnitId(), true, true, true, false, -1, false, false, Constants.DEPLOY_TYPE_ROOT, out); } out.println("</edition>"); out.flush(); out.close(); log.info("output file closed."); out = null; if (log.isDebugEnabled()) log.debug("Finished creating Edition"); } catch (Exception e) { if (log.isWarnEnabled()) log.warn("Error while creating RootEdition"); editionCronService.logEditionStatusException(editionId, e.getMessage()); throw new UserException(e.getMessage(), e); } }
From source file:fr.certu.chouette.command.ExchangeCommand.java
/** * @param manager/*from ww w . j a v a2s . c o m*/ * @param parameters * @return */ @SuppressWarnings("incomplete-switch") public List<NeptuneIdentifiedObject> executeImport(INeptuneManager<NeptuneIdentifiedObject> manager, Map<String, List<String>> parameters) { String reportFileName = getSimpleString(parameters, "reportfile", ""); String reportFormat = getSimpleString(parameters, "reportformat", "txt"); boolean append = getBoolean(parameters, "append"); String format = getSimpleString(parameters, "format"); PrintStream stream = System.out; String encoding = Charset.defaultCharset().toString(); if (!reportFileName.isEmpty()) { try { if (reportFormat.equals("json")) { encoding = "UTF-8"; } stream = new PrintStream(new FileOutputStream(new File(reportFileName), append), true, encoding); } catch (IOException e) { System.err.println("cannot open file :" + reportFileName + " " + e.getMessage()); reportFileName = ""; } } try { List<FormatDescription> formats = manager.getImportFormats(null); FormatDescription description = null; for (FormatDescription formatDescription : formats) { if (formatDescription.getName().equalsIgnoreCase(format)) { description = formatDescription; break; } } if (description == null) { throw new IllegalArgumentException( "format " + format + " unavailable, check command getImportFormats for list "); } List<ParameterValue> values = new ArrayList<ParameterValue>(); for (ParameterDescription desc : description.getParameterDescriptions()) { String name = desc.getName(); String key = name.toLowerCase(); List<String> vals = parameters.get(key); if (vals == null) { if (desc.isMandatory()) { throw new IllegalArgumentException( "parameter -" + name + " is required, check command getImportFormats for list "); } } else { if (desc.isCollection()) { ListParameterValue val = new ListParameterValue(name); switch (desc.getType()) { case FILEPATH: val.setFilepathList(vals); break; case STRING: val.setStringList(vals); break; case FILENAME: val.setFilenameList(vals); break; default: throw new IllegalArgumentException( "parameter -" + name + " invalid, check command getImportFormats for list "); } values.add(val); } else { if (vals.size() != 1) { throw new IllegalArgumentException("parameter -" + name + " must be unique, check command getImportFormats for list "); } String simpleval = vals.get(0); SimpleParameterValue val = new SimpleParameterValue(name); switch (desc.getType()) { case FILEPATH: val.setFilepathValue(simpleval); break; case STRING: val.setStringValue(simpleval); break; case FILENAME: val.setFilenameValue(simpleval); break; case BOOLEAN: val.setBooleanValue(Boolean.parseBoolean(simpleval)); break; case INTEGER: val.setIntegerValue(Long.parseLong(simpleval)); break; case DATE: val.setDateValue(toCalendar(simpleval)); break; } values.add(val); } } } ReportHolder ireport = new ReportHolder(); ReportHolder vreport = new ReportHolder(); List<NeptuneIdentifiedObject> beans = manager.doImport(null, format, values, ireport, vreport); if (ireport.getReport() != null) { Report r = ireport.getReport(); if (reportFormat.equals("json")) { stream.println(r.toJSON()); } else { stream.println(r.getLocalizedMessage()); printItems(stream, "", r.getItems()); } } if (vreport.getReport() != null) { Report r = vreport.getReport(); if (reportFormat.equals("json")) { stream.println(r.toJSON()); } else { stream.println(r.getLocalizedMessage()); printItems(stream, "", r.getItems()); } } if (beans == null || beans.isEmpty()) { System.out.println("import failed"); } else { System.out.println("beans count = " + beans.size()); } return beans; } catch (ChouetteException e) { log.error(e.getMessage()); Throwable caused = e.getCause(); while (caused != null) { log.error("caused by " + caused.getMessage()); caused = caused.getCause(); } throw new RuntimeException("import failed , see log for details"); } finally { if (!reportFileName.isEmpty()) { stream.close(); } } }