List of usage examples for java.io IOException initCause
public synchronized Throwable initCause(Throwable cause)
From source file:cn.wanghaomiao.maven.plugin.seimi.packaging.AbstractWarPackagingTask.java
/** * Copy file from source to destination. The directories up to <code>destination</code> will be created if they * don't already exist. if the <code>onlyIfModified</code> flag is <tt>false</tt>, <code>destination</code> will be * overwritten if it already exists. If the flag is <tt>true</tt> destination will be overwritten if it's not up to * date.//from w w w.j av a 2s. c o m * <p/> * * @param context the packaging context * @param source an existing non-directory <code>File</code> to copy bytes from * @param destination a non-directory <code>File</code> to write bytes to (possibly overwriting). * @param targetFilename the relative path of the file from the webapp root directory * @param onlyIfModified if true, copy the file only if the source has changed, always copy otherwise * @return true if the file has been copied/updated, false otherwise * @throws IOException if <code>source</code> does not exist, <code>destination</code> cannot be written to, or an * IO error occurs during copying */ protected boolean copyFile(WarPackagingContext context, File source, File destination, String targetFilename, boolean onlyIfModified) throws IOException { if (onlyIfModified && destination.lastModified() >= source.lastModified()) { context.getLog().debug(" * " + targetFilename + " is up to date."); return false; } else { if (source.isDirectory()) { context.getLog().warn(" + " + targetFilename + " is packaged from the source folder"); try { JarArchiver archiver = context.getJarArchiver(); archiver.addDirectory(source); archiver.setDestFile(destination); archiver.createArchive(); } catch (ArchiverException e) { String msg = "Failed to create " + targetFilename; context.getLog().error(msg, e); IOException ioe = new IOException(msg); ioe.initCause(e); throw ioe; } } else { FileUtils.copyFile(source.getCanonicalFile(), destination); // preserve timestamp destination.setLastModified(source.lastModified()); context.getLog().debug(" + " + targetFilename + " has been copied."); } return true; } }
From source file:at.spardat.xma.xdelta.JarPatcher.java
/** * Apply delta.//w ww . ja v a 2 s. c o m * * @param patch the patch * @param source the source * @param output the output * @param list the list * @param prefix the prefix * @throws IOException Signals that an I/O exception has occurred. */ public void applyDelta(ZipFile patch, ZipFile source, ZipArchiveOutputStream output, BufferedReader list, String prefix) throws IOException { String fileName = null; try { for (fileName = (next == null ? list.readLine() : next); fileName != null; fileName = (next == null ? list.readLine() : next)) { if (next != null) next = null; if (!fileName.startsWith(prefix)) { next = fileName; return; } int crcDelim = fileName.lastIndexOf(':'); int crcStart = fileName.lastIndexOf('|'); long crc = Long.valueOf(fileName.substring(crcStart + 1, crcDelim), 16); long crcSrc = Long.valueOf(fileName.substring(crcDelim + 1), 16); fileName = fileName.substring(prefix.length(), crcStart); if ("META-INF/file.list".equalsIgnoreCase(fileName)) continue; if (fileName.contains("!")) { String[] embeds = fileName.split("\\!"); ZipArchiveEntry original = getEntry(source, embeds[0], crcSrc); File originalFile = File.createTempFile("jardelta-tmp-origin-", ".zip"); File outputFile = File.createTempFile("jardelta-tmp-output-", ".zip"); Exception thrown = null; try (FileOutputStream out = new FileOutputStream(originalFile); InputStream in = source.getInputStream(original)) { int read = 0; while (-1 < (read = in.read(buffer))) { out.write(buffer, 0, read); } out.flush(); applyDelta(patch, new ZipFile(originalFile), new ZipArchiveOutputStream(outputFile), list, prefix + embeds[0] + "!"); } catch (Exception e) { thrown = e; throw e; } finally { originalFile.delete(); try (FileInputStream in = new FileInputStream(outputFile)) { if (thrown == null) { ZipArchiveEntry outEntry = copyEntry(original); output.putArchiveEntry(outEntry); int read = 0; while (-1 < (read = in.read(buffer))) { output.write(buffer, 0, read); } output.flush(); output.closeArchiveEntry(); } } finally { outputFile.delete(); } } } else { try { ZipArchiveEntry patchEntry = getEntry(patch, prefix + fileName, crc); if (patchEntry != null) { // new Entry ZipArchiveEntry outputEntry = JarDelta.entryToNewName(patchEntry, fileName); output.putArchiveEntry(outputEntry); if (!patchEntry.isDirectory()) { try (InputStream in = patch.getInputStream(patchEntry)) { int read = 0; while (-1 < (read = in.read(buffer))) { output.write(buffer, 0, read); } } } closeEntry(output, outputEntry, crc); } else { ZipArchiveEntry sourceEntry = getEntry(source, fileName, crcSrc); if (sourceEntry == null) { throw new FileNotFoundException( fileName + " not found in " + sourceName + " or " + patchName); } if (sourceEntry.isDirectory()) { ZipArchiveEntry outputEntry = new ZipArchiveEntry(sourceEntry); output.putArchiveEntry(outputEntry); closeEntry(output, outputEntry, crc); continue; } patchEntry = getPatchEntry(patch, prefix + fileName + ".gdiff", crc); if (patchEntry != null) { // changed Entry ZipArchiveEntry outputEntry = new ZipArchiveEntry(sourceEntry); outputEntry.setTime(patchEntry.getTime()); output.putArchiveEntry(outputEntry); byte[] sourceBytes = new byte[(int) sourceEntry.getSize()]; try (InputStream sourceStream = source.getInputStream(sourceEntry)) { for (int erg = sourceStream .read(sourceBytes); erg < sourceBytes.length; erg += sourceStream .read(sourceBytes, erg, sourceBytes.length - erg)) ; } InputStream patchStream = patch.getInputStream(patchEntry); GDiffPatcher diffPatcher = new GDiffPatcher(); diffPatcher.patch(sourceBytes, patchStream, output); patchStream.close(); outputEntry.setCrc(crc); closeEntry(output, outputEntry, crc); } else { // unchanged Entry ZipArchiveEntry outputEntry = new ZipArchiveEntry(sourceEntry); output.putArchiveEntry(outputEntry); try (InputStream in = source.getInputStream(sourceEntry)) { int read = 0; while (-1 < (read = in.read(buffer))) { output.write(buffer, 0, read); } } output.flush(); closeEntry(output, outputEntry, crc); } } } catch (PatchException pe) { IOException ioe = new IOException(); ioe.initCause(pe); throw ioe; } } } } catch (Exception e) { System.err.println(prefix + fileName); throw e; } finally { source.close(); output.close(); } }
From source file:org.cloudata.core.tabletserver.FileMerger.java
public void merge(TabletMapFile resultFile, TabletMapFile[] targetTabletMapFiles, boolean debug, int numOfVersion) throws IOException { //FIXME ? ? ? ? ? ?? MapFileWriter writer = resultFile.getMapFileWriter(); MapFileReader[] readers = null;/* w ww . java2 s. com*/ long totalMapFileSize = 0; try { readers = new MapFileReader[targetTabletMapFiles.length]; for (int i = 0; i < targetTabletMapFiles.length; i++) { if (targetTabletMapFiles[i] != null) { totalMapFileSize += targetTabletMapFiles[i].getDataFileSize(); readers[i] = targetTabletMapFiles[i].getMapFileReader(Row.Key.MIN_KEY, Row.Key.MAX_KEY); } } //Compaction ?? kill? ? ?? map file? . //? ?? ? . List<RecordItem> worker = new ArrayList<RecordItem>(); // init initWorker(worker, readers); // if (!MajorCompactionAction.isFastProcessingMode()) { // try { // Thread.sleep(100); // } catch(InterruptedException e) { // } // } boolean isCompleted = false; RecordItem oldWinnerItem = null; ValueCollection values = new ValueCollection(); int recordCount = 0; List<Integer> removedIndexs = new ArrayList<Integer>(10); PerformanceRegulator regulator = createRegulator(totalMapFileSize); while (!isCompleted) { if (worker.isEmpty()) { if (!initWorker(worker, readers)) { writer.write(values); break; } } // if (worker.size() > 100) { // LOG.warn("size of worker exceeds 100"); // } Collections.sort(worker); RecordItem winnerItem = worker.remove(0); int winnerIndex = winnerItem.getIndex(); if (oldWinnerItem == null) { oldWinnerItem = winnerItem; } ColumnValue winnerColumnValue = winnerItem.columnValue; //worker? ?? ColumnValue ?. //?? ColumnValue ColumnValue reader? ? ??? . int index = 0; for (RecordItem eachItem : worker) { if (eachItem.columnValue.equals(winnerColumnValue)) { removedIndexs.add(index); } index++; } index = 0; for (int removeIndex : removedIndexs) { RecordItem removedItem = worker.remove(removeIndex - index); ColumnValue nextColumn = null; if (readers[removedItem.index] != null) { nextColumn = readers[removedItem.index].next(); } if (nextColumn != null) { worker.add(new RecordItem(removedItem.index, readers[removedItem.index].getCurrentRowKey(), nextColumn)); if (nextColumn.getValue() != null) { regulator.recordRead(nextColumn.getValue().length); } } else { try { if (readers[removedItem.index] != null) { readers[removedItem.index].close(); } } catch (IOException e) { //close ? close exception? ?. LOG.info(e.getMessage()); } readers[removedItem.index] = null; } index++; } removedIndexs.clear(); // columnkey? ? read? ? file? write. if (!oldWinnerItem.equalsCellKey(winnerItem)) { int nw = writer.write(values); regulator.recordWrite(nw); oldWinnerItem = winnerItem; values = new ValueCollection(); } values.add(winnerColumnValue, numOfVersion); // remove first key winnerColumnValue = null; winnerItem = null; // read one line from winner file and add ColumnValue nextColumn = readers[winnerIndex].next(); if (nextColumn != null) { worker.add(new RecordItem(winnerIndex, readers[winnerIndex].getCurrentRowKey(), nextColumn)); //regulator.recordRead(nextColumn.getValue().length); } else { try { readers[winnerIndex].close(); } catch (Exception e) { LOG.info(e.getMessage()); } readers[winnerIndex] = null; } recordCount++; } //end of while } catch (Exception e) { LOG.error("merge error:" + e.getMessage(), e); IOException err = new IOException(e.getMessage()); err.initCause(e); throw err; } finally { if (readers != null) { for (int i = 0; i < readers.length; i++) { if (readers[i] != null) { try { readers[i].close(); } catch (Exception e) { LOG.info(e.getMessage()); } } } } try { if (writer != null) { writer.close(); } // LOG.info(resultFile.getTabletInfo().getTabletName() + " file merged from orig: " + totalMapFileSize // + " to " + resultFile.getDataFileSize()); } catch (IOException e) { LOG.error(e.getMessage(), e); throw e; } } }
From source file:com.ridgelineapps.wallpaper.photosite.TumblrUtils.java
private void parseResponse(InputStream in, ResponseParser responseParser) throws IOException { final XmlPullParser parser = Xml.newPullParser(); try {//from w ww. j a va 2 s.co m parser.setInput(new InputStreamReader(in)); int type; while ((type = parser.next()) != XmlPullParser.START_TAG && type != XmlPullParser.END_DOCUMENT) { // Empty } if (type != XmlPullParser.START_TAG) { throw new InflateException(parser.getPositionDescription() + ": No start tag found!"); } // String name = parser.getName(); // if (RESPONSE_TAG_RSP.equals(name)) { // final String value = parser.getAttributeValue(null, RESPONSE_ATTR_STAT); // if (!RESPONSE_STATUS_OK.equals(value)) { // throw new IOException("Wrong status: " + value); // } // } responseParser.parseResponse(parser); } catch (XmlPullParserException e) { final IOException ioe = new IOException("Could not parse the response"); ioe.initCause(e); throw ioe; } }
From source file:org.cloudsmith.stackhammer.api.client.StackHammerClient.java
/** * Parse JSON to specified type/*from w ww . ja va 2 s.c om*/ * * @param <V> * @param stream * @param type * @return parsed type * @throws IOException */ protected <V> V parseJson(InputStream stream, Class<V> type) throws IOException { BufferedReader reader = new BufferedReader(new InputStreamReader(stream, UTF_8), bufferSize); try { return gson.fromJson(reader, type); } catch (JsonSyntaxException jpe) { IOException ioe = new IOException("Parse exception converting JSON to object"); //$NON-NLS-1$ ioe.initCause(jpe); throw ioe; } finally { try { reader.close(); } catch (IOException ignored) { // Ignored } } }
From source file:org.archive.crawler.util.BdbUriUniqFilter.java
/** * Constructor.//from w w w. j ava 2 s .c o m * * Only used for testing; usually no-arg constructor is used, and * environment provided by injected BdbModule. * * @param bdbEnv The directory that holds the bdb environment. Will * make a database under here if doesn't already exit. Otherwise * reopens any existing dbs. * @param cacheSizePercentage Percentage of JVM bdb allocates as * its cache. Pass -1 to get default cache size. * @throws IOException */ public BdbUriUniqFilter(File bdbEnv, final int cacheSizePercentage) throws IOException { super(); FileUtils.ensureWriteableDirectory(bdbEnv); EnvironmentConfig envConfig = new EnvironmentConfig(); envConfig.setAllowCreate(true); if (cacheSizePercentage > 0 && cacheSizePercentage < 100) { envConfig.setCachePercent(cacheSizePercentage); } try { createdEnvironment = true; Environment env = new Environment(bdbEnv, envConfig); BdbModule.BdbConfig config = getDatabaseConfig(); config.setAllowCreate(true); try { env.truncateDatabase(null, DB_NAME, false); } catch (DatabaseNotFoundException e) { // ignored } Database db = env.openDatabase(null, DB_NAME, config.toDatabaseConfig()); initialize(db); } catch (DatabaseException e) { IOException io = new IOException(); io.initCause(e); throw io; } }
From source file:org.dhatim.smooks.edi.EDIReader.java
/** * Get the mapping model associated with the supplied SmooksResourceConfiguration. * <p/>/*from w w w . j a v a 2 s .co m*/ * The parsed and validated model are cached in the Smooks container context, keyed * by the SmooksResourceConfiguration instance. * @return The Mapping Model. * @throws IOException Error reading resource configuration data (the mapping model). * @throws SAXException Error parsing mapping model. */ private EdifactModel getMappingModel() throws IOException, SAXException { EdifactModel edifactModel; Hashtable mappings = getMappingTable(applicationContext); synchronized (configuration) { edifactModel = (EdifactModel) mappings.get(configuration); if (edifactModel == null) { try { ContainerResourceLocator resourceLocator = applicationContext.getResourceLocator(); if (modelConfigData.startsWith("urn:") || modelConfigData.endsWith(".jar") || modelConfigData.endsWith(".zip")) { throw new IOException("Unsupported mapping model config URI for basic EDI Parser '" + modelConfigData + "'. Check that you are using the correct EDI parser. You may need to configure an Interchange Parser, such as the UN/EDIFACT parser."); } if (resourceLocator instanceof URIResourceLocator) { // This will resolve config paths relative to the containing smooks config file.... edifactModel = EDIParser.parseMappingModel(modelConfigData, (resourceLocator).getBaseURI()); } else { edifactModel = EDIParser.parseMappingModel(modelConfigData, URIResourceLocator.getSystemBaseURI()); } if (edifactModel == null) { logger.error("Invalid " + MODEL_CONFIG_KEY + " config value '" + modelConfigData + "'. Failed to locate EDI Mapping Model resource!"); } } catch (IOException e) { IOException newE = new IOException( "Error parsing EDI mapping model [" + configuration.getStringParameter(MODEL_CONFIG_KEY) + "]. Target Profile(s) " + getTargetProfiles() + "."); newE.initCause(e); throw newE; } catch (SAXException e) { throw new SAXException( "Error parsing EDI mapping model [" + configuration.getStringParameter(MODEL_CONFIG_KEY) + "]. Target Profile(s) " + getTargetProfiles() + ".", e); } catch (EDIConfigurationException e) { throw new SAXException( "Error parsing EDI mapping model [" + configuration.getStringParameter(MODEL_CONFIG_KEY) + "]. Target Profile(s) " + getTargetProfiles() + ".", e); } mappings.put(configuration, edifactModel); logger.debug("Parsed, validated and cached EDI mapping model [" + edifactModel.getEdimap().getDescription().getName() + ", Version " + edifactModel.getEdimap().getDescription().getVersion() + "]. Target Profile(s) " + getTargetProfiles() + "."); } else if (logger.isInfoEnabled()) { logger.debug("Found EDI mapping model [" + edifactModel.getEdimap().getDescription().getName() + ", Version " + edifactModel.getEdimap().getDescription().getVersion() + "] in the model cache. Target Profile(s) " + getTargetProfiles() + "."); } } return edifactModel; }
From source file:com.microsoft.azure.storage.core.Utility.java
/** * Creates an instance of the <code>IOException</code> class using the specified exception. * /*w w w. j ava 2s . c o m*/ * @param ex * An <code>Exception</code> object that represents the exception used to create the IO exception. * * @return A <code>java.io.IOException</code> object that represents the created IO exception. */ public static IOException initIOException(final Exception ex) { final IOException retEx = new IOException(); retEx.initCause(ex); return retEx; }
From source file:org.hyperic.hq.agent.client.AgentConnection.java
private AgentStreamPair sendCommandHeadersWithRetries(String cmdName, int cmdVersion, AgentRemoteValue arg, int maxRetries) throws IOException { IOException ex = null;//from ww w. j av a2 s . c o m AgentStreamPair streamPair = null; Socket s = null; int tries = 0; while (tries++ < maxRetries) { try { s = getSocket(); streamPair = new SocketStreamPair(s, s.getInputStream(), s.getOutputStream()); DataOutputStream outputStream = new DataOutputStream(streamPair.getOutputStream()); outputStream.writeInt(_agentAPI.getVersion()); outputStream.writeInt(cmdVersion); outputStream.writeUTF(cmdName); arg.toStream(outputStream); outputStream.flush(); return streamPair; } catch (IOException e) { ex = e; close(s); } if (tries >= maxRetries) { break; } try { Thread.sleep(SLEEP_TIME); } catch (InterruptedException e) { log.debug(e, e); } } if (ex != null) { IOException toThrow = new IOException(ex.getMessage() + ", retried " + MAX_RETRIES + " times"); // call initCause instead of constructor to be java 1.5 compat toThrow.initCause(ex); throw toThrow; } return streamPair; }
From source file:org.apache.hadoop.hbase.regionserver.RegionScannerHolder.java
/** * Get the prefetched scan result, if any. Otherwise, * do a scan synchronously and return the result, which * may take some time. Region scan coprocessor, if specified, * is invoked properly, which may override the scan result. * * @param rows the number of rows to scan, which is preferred * not to change among scanner.next() calls. * * @return scan result, which has the data retrieved from * the scanner, or some IOException if the scan failed. * @throws IOException if failed to retrieve from the scanner. */// www .java2 s .c o m public ScanResult getScanResult(final int rows) throws IOException { Preconditions.checkArgument(rows > 0, "Number of rows requested must be positive"); ScanResult scanResult = null; this.rows = rows; if (prefetchScanFuture == null) { // Need to scan inline if not prefetched scanResult = prefetcher.call(); } else { // if we have a prefetched result, then use it try { scanResult = prefetchScanFuture.get(); if (scanResult.moreResults) { int prefetchedRows = scanResult.results.size(); if (prefetchedRows != 0 && this.rows > prefetchedRows) { // Try to scan more since we haven't prefetched enough this.rows -= prefetchedRows; ScanResult tmp = prefetcher.call(); if (tmp.isException) { return tmp; // Keep the prefetched results for later } if (tmp.results != null && !tmp.results.isEmpty()) { // Merge new results to the old result list scanResult.results.addAll(tmp.results); } // Reset rows for next prefetching this.rows = rows; } } prefetchScanFuture = null; if (prefetchedResultSize > 0) { globalPrefetchedResultSize.addAndGet(-prefetchedResultSize); prefetchedResultSize = 0L; } } catch (ExecutionException ee) { throw new IOException("failed to run prefetching task", ee.getCause()); } catch (InterruptedException ie) { Thread.currentThread().interrupt(); IOException iie = new InterruptedIOException("scan was interrupted"); iie.initCause(ie); throw iie; } } if (prefetching && scanResult.moreResults && !scanResult.results.isEmpty()) { long totalPrefetchedResultSize = globalPrefetchedResultSize.get(); if (totalPrefetchedResultSize < maxGlobalPrefetchedResultSize) { // Schedule a background prefetch for the next result // if prefetch is enabled on scans and there are more results prefetchScanFuture = scanPrefetchThreadPool.submit(prefetcher); } else if (LOG.isTraceEnabled()) { LOG.trace("One prefetching is skipped for scanner " + scannerName + " since total prefetched result size " + totalPrefetchedResultSize + " is more than the maximum configured " + maxGlobalPrefetchedResultSize); } } return scanResult; }