List of usage examples for java.io ByteArrayInputStream close
public void close() throws IOException
From source file:com.orange.mmp.midlet.MidletManager.java
/** * Create a JAD for PFM download//from www . ja v a 2 s.co m * @param ticketId The delivery ticket ID * @param mobile The mobile to use * @param signMidlet indicate if the midlet must be signed * @param service Indicate the hostname to use in JAD (null for default) * @throws IOException */ @SuppressWarnings("unchecked") public ByteArrayOutputStream getJad(String ticketId, Mobile mobile, boolean signMidlet, Service service) throws MMPException { ByteArrayOutputStream output = null; try { DeliveryTicket ticket = new DeliveryTicket(); ticket.setId(ticketId); ticket = DeliveryManager.getInstance().getDeliveryTicket(ticket); if (ticket == null) throw new MMPException("ticket " + ticketId + " has expired"); if (ticket.getServiceId() == null) { ticket.setServiceId(service.getId()); if (ticket.getServiceSpecific() != null) { Map<String, String> ticketMap = ticket.getServiceSpecific(); Map<String, String> serviceMap = service.getJadEntries(); Set<String> serviceKeys = serviceMap.keySet(); for (String key : serviceKeys) { ticketMap.put(key, serviceMap.get(key)); } ticket.setServiceSpecific(ticketMap); } else ticket.setServiceSpecific(service.getJadEntries()); } ticket.setUaKey(mobile.getKey()); ticket.setType(mobile.getMidletType()); if (ticket.getCallback() != null && !ticket.getCallback().equals("")) { Api api = new Api(); String[] apiArray = ticket.getCallback().split("\\."); if (apiArray.length == 2) { api.setName(apiArray[0]); String method = apiArray[1]; ApiContainerFactory.getInstance().getApiContainer().invokeApi(api, method, new Object[] { ticket }); } } Widget appWidget = WidgetManager.getInstance().getWidget(ticket.getServiceId(), mobile.getBranchId()); if (appWidget == null) throw new MMPException("application " + ticket.getServiceId() + " not found"); String appName = appWidget.getName(); Midlet midlet = new Midlet(); midlet.setType(mobile.getMidletType()); Midlet midlets[] = (Midlet[]) DaoManagerFactory.getInstance().getDaoManager().getDao("midlet") .find(midlet); if (midlets.length == 0) throw new MMPException("Midlet type not found : " + mobile.getMidletType()); else midlet = midlets[0]; JadFile jadFile = new JadFile(); jadFile.load(new File(new URI(midlet.getJadLocation()))); StringBuilder midletURL = new StringBuilder(); if (service == null) service = ServiceManager.getInstance().getDefaultService(); midletURL.append(new URL("http", service.getHostname(), "").toString()) .append(Controller.getUrlMapping()).append("/").append(mobile.getKey()).append("/") .append(appName).append(com.orange.mmp.midlet.Constants.JAR_FILE_EXTENSION); //Set default if not found if (ticket.getServiceId() == null) ticket.setServiceId(ServiceManager.getInstance().getDefaultService().getId()); //Build JAD jadFile.setValue(Constants.JAD_PARAMETER_JAR_URL, midletURL.toString()); jadFile.setValue(Constants.JAD_PARAMETER_APPNAME, appName); // Add UA key String uakey = mobile.getKey(); jadFile.setValue(Constants.JAD_PARAMETER_UAKEY, uakey); Map<String, String> servicesSpecificMap = ticket.getServiceSpecific(); if (servicesSpecificMap != null) { for (String key : servicesSpecificMap.keySet()) { jadFile.setValue(key, servicesSpecificMap.get(key)); } } String launcherLine = jadFile.getValue(Constants.JAD_PARAMETER_LAUNCHER); Matcher launcherLineMatcher = launcherPattern.matcher(launcherLine); if (launcherLineMatcher.matches()) { jadFile.setValue(Constants.JAD_PARAMETER_LAUNCHER, appName.concat(", ") .concat(launcherLineMatcher.group(2)).concat(", ").concat(launcherLineMatcher.group(3))); } else jadFile.setValue(Constants.JAD_PARAMETER_LAUNCHER, appName.concat(", ") .concat(Constants.JAD_LAUNCHER_ICON).concat(", ").concat(Constants.JAD_LAUNCHER_MAINCLASS)); //Add/Modify/Delete JAD parameters according to mobile rules JadAttributeAction[] jadActions = mobile.getJadAttributeActions(); for (JadAttributeAction jadAction : jadActions) { // Test inJad if (jadAction.getInJad().equals(ApplyCase.ALWAYS) || (signMidlet && jadAction.getInJad().equals(ApplyCase.SIGNED)) || (!signMidlet && jadAction.getInJad().equals(ApplyCase.UNSIGNED))) { if (jadAction.isAddAction() || jadAction.isModifyAction()) jadFile.setValue(jadAction.getAttribute(), jadAction.getValue(), !jadAction.isStrict()); else if (jadAction.isDeleteAction()) jadFile.deleteValue(jadAction.getAttribute()); } } ByteArrayOutputStream jarContent = this.getJar(ticket.getServiceId(), mobile, signMidlet); jadFile.setValue(Constants.JAD_PARAMETER_JAR_SIZE, String.valueOf(jarContent.size())); // SIGN Midlet if (signMidlet) { File jadTmpFile = new File(System.getProperty("java.io.tmpdir"), ticket.getId().concat(Constants.JAD_FILE_EXTENSION)); // Save tmp Jad file FileOutputStream fos = new FileOutputStream(jadTmpFile); jadFile.save(fos); // Apply commands BufferedInputStream jadIn = null; BufferedInputStream keystoreIn = null; ByteArrayInputStream jarIn = new ByteArrayInputStream(jarContent.toByteArray()); output = new ByteArrayOutputStream(); try { jadIn = new BufferedInputStream(new FileInputStream(jadTmpFile)); keystoreIn = new BufferedInputStream(new FileInputStream(this.keystoreFile)); AppDescriptor midletSigner = new AppDescriptor(); midletSigner.load(jadIn, com.orange.mmp.core.Constants.DEFAULT_ENCODING); midletSigner.loadKeyStore(keystoreIn, this.keystoreKey.toCharArray()); midletSigner.addCert(this.keystoreAlias, 1, 0); midletSigner.addJarSignature(this.keystoreAlias, this.keystoreKey.toCharArray(), jarIn); midletSigner.store(output, com.orange.mmp.core.Constants.DEFAULT_ENCODING); } catch (Exception e) { throw new MMPException(e); } finally { if (jadIn != null) jadIn.close(); if (jarIn != null) jarIn.close(); if (keystoreIn != null) keystoreIn.close(); if (jadTmpFile != null) jadTmpFile.delete(); } } //No SIGNING else { output = new ByteArrayOutputStream(); jadFile.save(output); } return output; } catch (IOException ioe) { throw new MMPException(ioe); } catch (URISyntaxException use) { throw new MMPException(use); } finally { try { if (output != null) output.close(); } catch (IOException ioe) { //NOP } } }
From source file:org.apache.hadoop.hdfs.TestBalancer.java
private void generateFileSystemLoad(long numBlocks, short replication) { String destfile = "hdfs:///user/hadoopqa/";// + BALANCER_TEMP_DIR + "/LOADGEN.DAT"; SecureRandom randgen = new SecureRandom(); ByteArrayOutputStream dat = null; ByteArrayInputStream in = null; final int CHUNK = 4096; final Configuration testConf = new Configuration(hadoopConf); try {/*w w w . j a v a2s. co m*/ testConf.setInt("dfs.replication", replication); for (int i = 0; i < numBlocks; i++) { FileSystem fs = FileSystem.get(URI.create(destfile), testConf); OutputStream out = fs.create(new Path(destfile), replication, new ProgressReporter()); dat = new ByteArrayOutputStream(DFS_BLOCK_SIZE); for (int z = 0; z < DFS_BLOCK_SIZE; z += CHUNK) { byte[] bytes = new byte[CHUNK]; randgen.nextBytes(bytes); dat.write(bytes, 0, CHUNK); } in = new ByteArrayInputStream(dat.toByteArray()); IOUtils.copyBytes(in, out, CHUNK, true); LOG.info("wrote block " + (i + 1) + " of " + numBlocks); } } catch (IOException ioExc) { LOG.warn("f/s loadgen failed!", ioExc); } finally { try { dat.close(); } catch (Exception e) { } try { in.close(); } catch (Exception e) { } } }
From source file:com.microfocus.application.automation.tools.results.RunResultRecorder.java
/** * copies, archives and creates the Test reports of LR and UFT runs. * * @param build// w w w . j a v a2 s . c om * @param listener * @param resultFiles * @param testResult * @param runWorkspace * @throws ParserConfigurationException * @throws SAXException * @throws IOException * @throws InterruptedException */ @SuppressWarnings({ "squid:S134", "squid:S135" }) private void archiveTestsReport(Run<?, ?> build, TaskListener listener, List<String> resultFiles, TestResult testResult, FilePath runWorkspace) throws ParserConfigurationException, SAXException, IOException, InterruptedException { if ((resultFiles == null) || (resultFiles.isEmpty())) { return; } ArrayList<String> zipFileNames = new ArrayList<String>(); ArrayList<FilePath> reportFolders = new ArrayList<FilePath>(); List<String> reportNames = new ArrayList<String>(); listener.getLogger() .println("Report archiving mode is set to: " + _resultsPublisherModel.getArchiveTestResultsMode()); // if user specified not to archive report if (_resultsPublisherModel.getArchiveTestResultsMode() .equals(ResultsPublisherModel.dontArchiveResults.getValue())) return; FilePath projectWS = runWorkspace; // get the artifacts directory where we will upload the zipped report // folder File artifactsDir = new File(build.getRootDir(), "archive"); artifactsDir.mkdirs(); // read each result.xml /* * The structure of the result file is: <testsuites> <testsuite> * <testcase.........report="path-to-report"/> * <testcase.........report="path-to-report"/> * <testcase.........report="path-to-report"/> * <testcase.........report="path-to-report"/> </testsuite> * </testsuites> */ // add previous report names for aggregation when using pipelines. PerformanceJobReportAction performanceJobReportAction = build.getAction(PerformanceJobReportAction.class); if (performanceJobReportAction != null) { reportNames .addAll(performanceJobReportAction.getLrResultBuildDataset().getLrScenarioResults().keySet()); } for (String resultsFilePath : resultFiles) { FilePath resultsFile = projectWS.child(resultsFilePath); List<ReportMetaData> ReportInfoToCollect = new ArrayList<ReportMetaData>(); DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder dBuilder = dbFactory.newDocumentBuilder(); Document doc = dBuilder.parse(resultsFile.read()); doc.getDocumentElement().normalize(); Node testSuiteNode = doc.getElementsByTagName("testsuite").item(0); Element testSuiteElement = (Element) testSuiteNode; if (testSuiteElement.hasAttribute("name") && testSuiteElement.getAttribute("name").endsWith(".lrs")) { // LR test NodeList testSuiteNodes = doc.getElementsByTagName("testsuite"); for (int i = 0; i < testSuiteNodes.getLength(); i++) { testSuiteNode = testSuiteNodes.item(i); testSuiteElement = (Element) testSuiteNode; if (!testSuiteElement.hasAttribute("name")) { continue; } String testFolderPath = testSuiteElement.getAttribute("name"); int testPathArr = testFolderPath.lastIndexOf('\\'); String testName = testFolderPath.substring(testPathArr + 1); reportNames.add(testName); String testStatus = ("0".equals(testSuiteElement.getAttribute("failures"))) ? "pass" : "fail"; Node testCaseNode = testSuiteElement.getElementsByTagName("testcase").item(0); if (testCaseNode == null) { listener.getLogger().println("No report folder was found in results"); return; } if (testCaseNode.getNodeType() == Node.ELEMENT_NODE) { Element testCaseElement = (Element) testCaseNode; if (!testCaseElement.hasAttribute(REPORT_NAME_FIELD)) { continue; } String reportFolderPath = testCaseElement.getAttribute(REPORT_NAME_FIELD); FilePath reportFolder = new FilePath(projectWS.getChannel(), reportFolderPath); reportFolders.add(reportFolder); FilePath testFolder = new FilePath(projectWS.getChannel(), testFolderPath); String zipFileName = getUniqueZipFileNameInFolder(zipFileNames, testFolder.getName()); FilePath archivedFile = new FilePath(new FilePath(artifactsDir), zipFileName); if (archiveFolder(reportFolder, testStatus, archivedFile, listener)) { zipFileNames.add(zipFileName); } createRichReports(reportFolder, testFolderPath, artifactsDir, reportNames, testResult, listener); createHtmlReport(reportFolder, testFolderPath, artifactsDir, reportNames, testResult); createTransactionSummary(reportFolder, testFolderPath, artifactsDir, reportNames, testResult); try { FilePath testSla = copyRunReport(reportFolder, build.getRootDir(), testFolder.getName()); if (testSla == null) { listener.getLogger().println("no RunReport.xml file was created"); } else { runReportList.add(testSla); } } catch (IOException | InterruptedException e) { listener.getLogger().println(e); } } } } else { // UFT Test boolean reportIsHtml = false; NodeList testCasesNodes = ((Element) testSuiteNode).getElementsByTagName("testcase"); Map<String, Integer> fileNameCount = new HashMap<>(); for (int i = 0; i < testCasesNodes.getLength(); i++) { Node nNode = testCasesNodes.item(i); if (nNode.getNodeType() == Node.ELEMENT_NODE) { Element eElement = (Element) nNode; if (!eElement.hasAttribute(REPORT_NAME_FIELD)) { continue; } String reportFolderPath = eElement.getAttribute(REPORT_NAME_FIELD); // e.g. "C:\UFTTest\GuiTest1\Report" String testFolderPath = eElement.getAttribute("name"); // e.g. "C:\UFTTest\GuiTest1" String testStatus = eElement.getAttribute("status"); // e.g. "pass" Node nodeSystemInfo = eElement.getElementsByTagName("system-out").item(0); String sysinfo = nodeSystemInfo.getFirstChild().getNodeValue(); String testDateTime = sysinfo.substring(0, 19); FilePath reportFolder = new FilePath(projectWS.getChannel(), reportFolderPath); boolean isParallelRunnerReport = isParallelRunnerReportPath(reportFolder); reportFolders.add(reportFolder); String archiveTestResultMode = _resultsPublisherModel.getArchiveTestResultsMode(); boolean archiveTestResult = false; //check for the new html report FilePath htmlReport = new FilePath(reportFolder, isParallelRunnerReport ? PARALLEL_RESULT_FILE : "run_results.html"); FilePath rrvReport = new FilePath(reportFolder, "Results.xml"); if (htmlReport.exists()) { reportIsHtml = true; String htmlReportDir = reportFolder.getRemote(); ReportMetaData reportMetaData = new ReportMetaData(); reportMetaData.setFolderPath(htmlReportDir); reportMetaData.setIsHtmlReport(true); reportMetaData.setDateTime(testDateTime); reportMetaData.setStatus(testStatus); reportMetaData.setIsParallelRunnerReport(isParallelRunnerReport); // we need to handle // the type for this report File testFileFullName = new File(testFolderPath); String testName = org.apache.commons.io.FilenameUtils .getName(testFileFullName.getPath()); // we must consider the case when we run the same test // in the same build Integer nameCount = 1; if (fileNameCount.containsKey(testName)) { nameCount = fileNameCount.get(testName) + 1; } // update the count for this file fileNameCount.put(testName, nameCount); testName += "[" + nameCount + "]"; String resourceUrl = "artifact/UFTReport/" + testName; reportMetaData.setResourceURL(resourceUrl); reportMetaData.setDisPlayName(testName); // use the name, not the full path //don't know reportMetaData's URL path yet, we will generate it later. ReportInfoToCollect.add(reportMetaData); listener.getLogger().println( "add html report info to ReportInfoToCollect: " + "[date]" + testDateTime); } archiveTestResult = isArchiveTestResult(testStatus, archiveTestResultMode); if (archiveTestResult && rrvReport.exists()) { if (reportFolder.exists()) { FilePath testFolder = new FilePath(projectWS.getChannel(), testFolderPath); String zipFileName = getUniqueZipFileNameInFolder(zipFileNames, testFolder.getName()); zipFileNames.add(zipFileName); listener.getLogger().println("Zipping report folder: " + reportFolderPath); ByteArrayOutputStream outstr = new ByteArrayOutputStream(); //don't use FileFilter for zip, or it will cause bug when files are on slave reportFolder.zip(outstr); /* * I did't use copyRecursiveTo or copyFrom due to * bug in * jekins:https://issues.jenkins-ci.org/browse * /JENKINS-9189 //(which is cleaimed to have been * fixed, but not. So I zip the folder to stream and * copy it to the master. */ ByteArrayInputStream instr = new ByteArrayInputStream(outstr.toByteArray()); FilePath archivedFile = new FilePath(new FilePath(artifactsDir), zipFileName); archivedFile.copyFrom(instr); listener.getLogger().println("copy from slave to master: " + archivedFile); outstr.close(); instr.close(); // add to Report list ReportMetaData reportMetaData = new ReportMetaData(); reportMetaData.setIsHtmlReport(false); reportMetaData.setIsParallelRunnerReport(false); // reportMetaData.setFolderPath(htmlReportDir); //no need for RRV File testFileFullName = new File(testFolderPath); String testName = testFileFullName.getName(); reportMetaData.setDisPlayName(testName); // use the name, not the full path String zipFileUrlName = "artifact/" + zipFileName; reportMetaData.setUrlName(zipFileUrlName); // for RRV, the file url and resource url are the same. reportMetaData.setResourceURL(zipFileUrlName); reportMetaData.setDateTime(testDateTime); reportMetaData.setStatus(testStatus); ReportInfoToCollect.add(reportMetaData); } else { listener.getLogger().println("No report folder was found in: " + reportFolderPath); } } } } if (reportIsHtml && !ReportInfoToCollect.isEmpty()) { listener.getLogger().println("begin to collectAndPrepareHtmlReports"); collectAndPrepareHtmlReports(build, listener, ReportInfoToCollect, runWorkspace); } if (!ReportInfoToCollect.isEmpty()) { // serialize report metadata File reportMetaDataXmlFile = new File(artifactsDir.getParent(), REPORTMETADATE_XML); String reportMetaDataXml = reportMetaDataXmlFile.getAbsolutePath(); writeReportMetaData2XML(ReportInfoToCollect, reportMetaDataXml, listener); // Add UFT report action try { listener.getLogger().println("Adding a report action to the current build."); HtmlBuildReportAction reportAction = new HtmlBuildReportAction(build); build.addAction(reportAction); } catch (IOException | SAXException | ParserConfigurationException ex) { listener.getLogger().println("a problem adding action: " + ex); } } } } }
From source file:org.apache.oodt.security.sso.opensso.SSOProxy.java
private IdentityDetails parseIdentityDetails(String serviceResponse) { ByteArrayInputStream is = new ByteArrayInputStream(serviceResponse.getBytes()); BufferedReader br = new BufferedReader(new InputStreamReader(is)); IdentityDetails details = new IdentityDetails(); String line = null, lastAttrKeyRead = null; try {//from w w w . j a v a2s . co m while ((line = br.readLine()) != null) { if (line.equals(IDENTITY_DETAILS_ATTR_SKIP_LINE)) { continue; } String key, val; if (line.startsWith(IDENTITY_DETAILS_REALM)) { // can't parse it the same way key = line.substring(0, IDENTITY_DETAILS_REALM.length()); val = line.substring(IDENTITY_DETAILS_REALM.length() + 1); } else { String[] lineToks = line.split("="); key = lineToks[0]; val = lineToks[1]; } if (key.equals(IDENTITY_DETAILS_NAME)) { details.setName(val); } else if (key.equals(IDENTITY_DETAILS_TYPE)) { details.setType(val); } else if (key.equals(IDENTITY_DETAILS_REALM)) { details.setRealm(val); } else if (key.equals(IDENTITY_DETAILS_GROUP)) { details.getGroups().add(val); } else if (key.equals(IDENTITY_DETAILS_ATTR_NAME)) { lastAttrKeyRead = val; } else if (key.equals(IDENTITY_DETAILS_ATTR_VALUE)) { details.getAttributes().addMetadata(lastAttrKeyRead, val); } } } catch (IOException e) { LOG.log(Level.SEVERE, e.getMessage()); LOG.log(Level.WARNING, "Error reading service response line: [" + line + "]: Message: " + e.getMessage()); } finally { try { is.close(); } catch (Exception ignore) { } try { br.close(); } catch (Exception ignore) { } } return details; }
From source file:big.BigZip.java
/** * Version 2 that permits to extract the text from a compressed file without * creating any file on the disk./*from www .j a va 2 s . c o m*/ * @param startPosition Offset where the file begins * @param endPosition Offset where the file ends * @return The source code of the compressed file */ public String extractBytesToRAM(final long startPosition, final Long endPosition) { String result = null; try { // enable random access to the BIG file (fast as heck) RandomAccessFile dataBIG = new RandomAccessFile(fileMainBIG, "r"); // jump directly to the position where the file is positioned dataBIG.seek(startPosition); // create a byte array ByteArrayOutputStream byteOutput = new ByteArrayOutputStream(); // now we start reading bytes during the mentioned interval while (dataBIG.getFilePointer() < endPosition) { // read a byte from our BIG archive int data = dataBIG.read(); byteOutput.write(data); } // flush data at this point byteOutput.flush(); // now convert the stream from input into an output (to feed the zip stream) ByteArrayInputStream byteInput = new ByteArrayInputStream(byteOutput.toByteArray()); // where we place the decompressed bytes ByteArrayOutputStream textOutput = new ByteArrayOutputStream(); // create the zip streamer final ArchiveInputStream archiveStream; archiveStream = new ArchiveStreamFactory().createArchiveInputStream("zip", byteInput); final ZipArchiveEntry entry = (ZipArchiveEntry) archiveStream.getNextEntry(); // copy all bytes from one location to the other (and decompress the data) IOUtils.copy(archiveStream, textOutput); // flush the results textOutput.flush(); // we've got the result right here! result = textOutput.toString(); // now close all the streams that we have open dataBIG.close(); byteOutput.close(); byteInput.close(); textOutput.close(); archiveStream.close(); } catch (FileNotFoundException ex) { Logger.getLogger(BigZip.class.getName()).log(Level.SEVERE, null, ex); return null; } catch (IOException ex) { Logger.getLogger(BigZip.class.getName()).log(Level.SEVERE, null, ex); return null; } catch (ArchiveException ex) { Logger.getLogger(BigZip.class.getName()).log(Level.SEVERE, null, ex); } return result; }
From source file:org.apache.qpid.server.store.derby.DerbyMessageStore.java
private void storeMetaData(Connection conn, long messageId, StorableMessageMetaData metaData) throws SQLException { if (_logger.isDebugEnabled()) { _logger.debug("Adding metadata for message " + messageId); }//from w w w. j a va 2 s .co m PreparedStatement stmt = conn.prepareStatement(INSERT_INTO_META_DATA); try { stmt.setLong(1, messageId); final int bodySize = 1 + metaData.getStorableSize(); byte[] underlying = new byte[bodySize]; underlying[0] = (byte) metaData.getType().ordinal(); java.nio.ByteBuffer buf = java.nio.ByteBuffer.wrap(underlying); buf.position(1); buf = buf.slice(); metaData.writeToBuffer(0, buf); ByteArrayInputStream bis = new ByteArrayInputStream(underlying); try { stmt.setBinaryStream(2, bis, underlying.length); int result = stmt.executeUpdate(); if (result == 0) { throw new RuntimeException("Unable to add meta data for message " + messageId); } } finally { try { bis.close(); } catch (IOException e) { throw new SQLException(e); } } } finally { stmt.close(); } }
From source file:big.BigZip.java
/** * Version 2 that permits to extract the text from a compressed file without * creating any file on the disk.//from w ww . j av a2s .c om * @param filePosition * @return The source code of the compressed file */ public String extractBytesToRAM(final long filePosition) { String result = null; try { // add the signature bytes to our start position long startPosition = filePosition + magicSignature.length(); // enable random access to the BIG file (fast as heck) RandomAccessFile dataBIG = new RandomAccessFile(fileMainBIG, "r"); // jump directly to the position where the file is positioned dataBIG.seek(startPosition); // create a byte array ByteArrayOutputStream byteOutput = new ByteArrayOutputStream(); // get the end of this file entry (by brute-force) char test = 0; long endPosition = -1; while (test != -1) { test = dataBIG.readChar(); // if the magic devil number was found.. if (test == 66) { // read the next value for confirmation byte value = dataBIG.readByte(); if (value != 73) { continue; } // we found the next entry endPosition = dataBIG.getFilePointer() - 1; break; } } // rewind back to the start position dataBIG.seek(startPosition); // now we start reading bytes during the mentioned interval while (dataBIG.getFilePointer() < endPosition) { // read a byte from our BIG archive int data = dataBIG.read(); byteOutput.write(data); } // flush data at this point byteOutput.flush(); // now convert the stream from input into an output (to feed the zip stream) ByteArrayInputStream byteInput = new ByteArrayInputStream(byteOutput.toByteArray()); // where we place the decompressed bytes ByteArrayOutputStream textOutput = new ByteArrayOutputStream(); // create the zip streamer final ArchiveInputStream archiveStream; archiveStream = new ArchiveStreamFactory().createArchiveInputStream("zip", byteInput); final ZipArchiveEntry entry = (ZipArchiveEntry) archiveStream.getNextEntry(); // copy all bytes from one location to the other (and decompress the data) IOUtils.copy(archiveStream, textOutput); // flush the results textOutput.flush(); // we've got the result right here! result = textOutput.toString(); // now close all the streams that we have open dataBIG.close(); byteOutput.close(); byteInput.close(); textOutput.close(); archiveStream.close(); } catch (FileNotFoundException ex) { Logger.getLogger(BigZip.class.getName()).log(Level.SEVERE, null, ex); return null; } catch (IOException ex) { Logger.getLogger(BigZip.class.getName()).log(Level.SEVERE, null, ex); return null; } catch (ArchiveException ex) { Logger.getLogger(BigZip.class.getName()).log(Level.SEVERE, null, ex); } return result; }
From source file:bftsmart.tom.core.Synchronizer.java
private void processSYNC(byte[] payload, int regency) { CertifiedDecision lastHighestCID = null; int currentCID = -1; HashSet<SignedObject> signedCollects = null; byte[] propose = null; int batchSize = -1; ByteArrayInputStream bis; ObjectInputStream ois;/*from w w w . jav a 2s.c o m*/ try { // deserialization of the message content bis = new ByteArrayInputStream(payload); ois = new ObjectInputStream(bis); lastHighestCID = (CertifiedDecision) ois.readObject(); signedCollects = (HashSet<SignedObject>) ois.readObject(); propose = (byte[]) ois.readObject(); batchSize = ois.readInt(); lcManager.setCollects(regency, signedCollects); currentCID = lastHighestCID.getCID() + 1; // Is the predicate "sound" true? Is the certificate for LastCID valid? if (lcManager.sound(lcManager.selectCollects(regency, currentCID)) && (!controller.getStaticConf().isBFT() || lcManager.hasValidProof(lastHighestCID))) { finalise(regency, lastHighestCID, signedCollects, propose, batchSize, false); } ois.close(); bis.close(); } catch (IOException ex) { ex.printStackTrace(); java.util.logging.Logger.getLogger(TOMLayer.class.getName()).log(Level.SEVERE, null, ex); } catch (ClassNotFoundException ex) { ex.printStackTrace(); java.util.logging.Logger.getLogger(TOMLayer.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:com.google.blockly.android.control.BlocklyController.java
/** * Loads a Workspace state from an Android {@link Bundle}, previous saved in * {@link #onSaveSnapshot(Bundle)}./*from w w w .jav a 2 s . c o m*/ * * @param savedInstanceState The activity state Bundle passed into {@link Activity#onCreate} or * {@link Activity#onRestoreInstanceState}. * @return True if a Blockly state was found and successfully loaded into the Controller. * Otherwise, false. */ public boolean onRestoreSnapshot(@Nullable Bundle savedInstanceState) { Bundle blocklyState = (savedInstanceState == null) ? null : savedInstanceState.getBundle(SNAPSHOT_BUNDLE_KEY); if (blocklyState != null) { byte[] bytes = blocklyState.getByteArray(SERIALIZED_WORKSPACE_KEY); if (bytes == null) { // Ignore all other workspace variables. return false; } ByteArrayInputStream in = new ByteArrayInputStream(bytes); try { loadWorkspaceContents(in); } catch (BlocklyParserException e) { // Ignore all other workspace state variables. Log.w(TAG, "Unable to restore Blockly state.", e); return false; } finally { try { in.close(); } catch (IOException e) { // Ignore. } } // TODO(#58): Restore the rest of the state. return true; } return false; }
From source file:org.pentaho.di.job.entries.dtdvalidator.DTDValidator.java
public boolean validate() { boolean retval = false; FileObject xmlfile = null;//from w ww .j a v a2 s. c om FileObject DTDfile = null; ByteArrayInputStream ba = null; try { if (xmlfilename != null && ((getDTDFilename() != null && !isInternDTD()) || (isInternDTD()))) { xmlfile = KettleVFS.getFileObject(getXMLFilename()); if (xmlfile.exists()) { URL xmlFile = new File(KettleVFS.getFilename(xmlfile)).toURI().toURL(); StringBuffer xmlStringbuffer = new StringBuffer(""); BufferedReader xmlBufferedReader = null; InputStreamReader is = null; try { // open XML File is = new InputStreamReader(xmlFile.openStream()); xmlBufferedReader = new BufferedReader(is); char[] buffertXML = new char[1024]; int LenXML = -1; while ((LenXML = xmlBufferedReader.read(buffertXML)) != -1) { xmlStringbuffer.append(buffertXML, 0, LenXML); } } finally { if (is != null) { is.close(); } if (xmlBufferedReader != null) { xmlBufferedReader.close(); } } // Prepare parsing ... DocumentBuilderFactory DocBuilderFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder DocBuilder = DocBuilderFactory.newDocumentBuilder(); // Let's try to get XML document encoding DocBuilderFactory.setValidating(false); ba = new ByteArrayInputStream(xmlStringbuffer.toString().getBytes("UTF-8")); Document xmlDocDTD = DocBuilder.parse(ba); if (ba != null) { ba.close(); } String encoding = null; if (xmlDocDTD.getXmlEncoding() == null) { encoding = "UTF-8"; } else { encoding = xmlDocDTD.getXmlEncoding(); } int xmlStartDTD = xmlStringbuffer.indexOf("<!DOCTYPE"); if (isInternDTD()) { // DTD find in the XML document if (xmlStartDTD != -1) { log.logBasic(BaseMessages.getString(PKG, "JobEntryDTDValidator.ERRORDTDFound.Label", getXMLFilename())); } else { setErrorMessage(BaseMessages.getString(PKG, "JobEntryDTDValidator.ERRORDTDNotFound.Label", getXMLFilename())); } } else { // DTD in external document // If we find an intern declaration, we remove it DTDfile = KettleVFS.getFileObject(getDTDFilename()); if (DTDfile.exists()) { if (xmlStartDTD != -1) { int EndDTD = xmlStringbuffer.indexOf(">", xmlStartDTD); // String DocTypeDTD = xmlStringbuffer.substring(xmlStartDTD, EndDTD + 1); xmlStringbuffer.replace(xmlStartDTD, EndDTD + 1, ""); } String xmlRootnodeDTD = xmlDocDTD.getDocumentElement().getNodeName(); String RefDTD = "<?xml version='" + xmlDocDTD.getXmlVersion() + "' encoding='" + encoding + "'?>\n<!DOCTYPE " + xmlRootnodeDTD + " SYSTEM '" + KettleVFS.getFilename(DTDfile) + "'>\n"; int xmloffsetDTD = xmlStringbuffer.indexOf("<" + xmlRootnodeDTD); xmlStringbuffer.replace(0, xmloffsetDTD, RefDTD); } else { log.logError( BaseMessages.getString(PKG, "JobEntryDTDValidator.ERRORDTDFileNotExists.Subject"), BaseMessages.getString(PKG, "JobEntryDTDValidator.ERRORDTDFileNotExists.Msg", getDTDFilename())); } } if (!(isInternDTD() && xmlStartDTD == -1 || (!isInternDTD() && !DTDfile.exists()))) { // Let's parse now ... MyErrorHandler error = new MyErrorHandler(); DocBuilderFactory.setValidating(true); DocBuilder = DocBuilderFactory.newDocumentBuilder(); DocBuilder.setErrorHandler(error); ba = new ByteArrayInputStream(xmlStringbuffer.toString().getBytes(encoding)); xmlDocDTD = DocBuilder.parse(ba); if (error.errorMessage == null) { log.logBasic(BaseMessages.getString(PKG, "JobEntryDTDValidator.DTDValidatorOK.Subject"), BaseMessages.getString(PKG, "JobEntryDTDValidator.DTDValidatorOK.Label", getXMLFilename())); // Everything is OK retval = true; } else { // Invalid DTD setNrErrors(error.nrErrors); setErrorMessage(BaseMessages.getString(PKG, "JobEntryDTDValidator.DTDValidatorKO", getXMLFilename(), error.nrErrors, error.errorMessage)); } } } else { if (!xmlfile.exists()) { setErrorMessage(BaseMessages.getString(PKG, "JobEntryDTDValidator.FileDoesNotExist.Label", getXMLFilename())); } } } else { setErrorMessage(BaseMessages.getString(PKG, "JobEntryDTDValidator.AllFilesNotNull.Label")); } } catch (Exception e) { setErrorMessage(BaseMessages.getString(PKG, "JobEntryDTDValidator.ErrorDTDValidator.Label", getXMLFilename(), getDTDFilename(), e.getMessage())); } finally { try { if (xmlfile != null) { xmlfile.close(); } if (DTDfile != null) { DTDfile.close(); } if (ba != null) { ba.close(); } } catch (IOException e) { // Ignore close errors } } return retval; }