List of usage examples for java.io IOException initCause
public synchronized Throwable initCause(Throwable cause)
From source file:org.cloudata.core.client.CTableManager.java
public static IOException makeIOException(Exception e) { if (e instanceof CRemoteException) { CRemoteException re = (CRemoteException) e; IOException resultException = null; try {/*from w w w. j a v a 2s. c o m*/ if (re.getClassName() == null) { throw re; } Constructor cn = IOException.class.getConstructor(String.class); cn.setAccessible(true); IOException ex = (IOException) cn.newInstance(e.getMessage()); ex.initCause(e); return ex; //resultException = (IOException)Class.forName(re.getClassName()).newInstance(); //resultException.initCause(e); //return resultException; } catch (Exception e1) { e1.printStackTrace(); LOG.error(e1.getMessage()); // return null; return re; } } else if (e instanceof IOException) { return (IOException) e; } else { IOException err = new IOException(e.getMessage()); err.initCause(e); return err; } }
From source file:org.apache.hadoop.hbase.util.ModifyRegionUtils.java
/** * Execute the task on the specified set of regions. * * @param exec Thread Pool Executor/*from w w w . j a v a2 s .c o m*/ * @param regions {@link HRegionInfo} that describes the regions to edit * @param task {@link RegionFillTask} custom code to edit the region * @throws IOException */ public static void editRegions(final ThreadPoolExecutor exec, final Collection<HRegionInfo> regions, final RegionEditTask task) throws IOException { final ExecutorCompletionService<Void> completionService = new ExecutorCompletionService<Void>(exec); for (final HRegionInfo hri : regions) { completionService.submit(new Callable<Void>() { @Override public Void call() throws IOException { task.editRegion(hri); return null; } }); } try { for (HRegionInfo hri : regions) { completionService.take().get(); } } catch (InterruptedException e) { throw new InterruptedIOException(e.getMessage()); } catch (ExecutionException e) { IOException ex = new IOException(); ex.initCause(e.getCause()); throw ex; } }
From source file:biz.varkon.shelvesom.server.BGGInfo.java
/** * Parses a valid XML response from the specified input stream. This method * must invoke parse/*from www . java2 s . c o m*/ * {@link ResponseParser#parseResponse(org.xmlpull.v1.XmlPullParser)} if the * XML response is valid, or throw an exception if it is not. * * @param in * The input stream containing the response sent by the web * service. * @param responseParser * The parser to use when the response is valid. * * @throws java.io.IOException */ public static void parseResponse(InputStream in, ResponseParser responseParser, IOUtilities.inputTypes inputType) throws IOException { final XmlPullParser parser = Xml.newPullParser(); try { parser.setInput(new InputStreamReader(in)); int type; while ((type = parser.next()) != XmlPullParser.START_TAG && type != XmlPullParser.END_DOCUMENT) { // Empty } /* * if (type != XmlPullParser.START_TAG) { throw new * InflateException(parser.getPositionDescription() + * ": No start tag found!"); } * * String name; boolean valid = false; final int topDepth = * parser.getDepth(); * * while (((type = parser.next()) != XmlPullParser.END_TAG || parser * .getDepth() > topDepth) && type != XmlPullParser.END_DOCUMENT) { * * if (type != XmlPullParser.START_TAG) { continue; } * * name = parser.getName(); valid = true; if * (RESPONSE_TAG_BOARDGAME.equals(name)) { valid = true; break; } } * * if (valid) */ responseParser.parseResponse(parser); } catch (XmlPullParserException e) { final IOException ioe = new IOException("Could not parse the response"); ioe.initCause(e); throw ioe; } }
From source file:it.geosolutions.geofence.gui.server.utility.IoUtility.java
/** * Decompress.//from w ww. j ava 2 s .c o m * * @param prefix * the prefix * @param inputFile * the input file * @param tempFile * the temp file * @return the file * @throws IOException * Signals that an I/O exception has occurred. */ public static File decompress(final String prefix, final File inputFile, final File tempFile) throws IOException { final File tmpDestDir = createTodayPrefixedDirectory(prefix, new File(tempFile.getParent())); ZipFile zipFile = new ZipFile(inputFile); Enumeration<? extends ZipEntry> entries = zipFile.entries(); while (entries.hasMoreElements()) { ZipEntry entry = (ZipEntry) entries.nextElement(); InputStream stream = zipFile.getInputStream(entry); if (entry.isDirectory()) { // Assume directories are stored parents first then // children. (new File(tmpDestDir, entry.getName())).mkdir(); continue; } File newFile = new File(tmpDestDir, entry.getName()); FileOutputStream fos = new FileOutputStream(newFile); try { byte[] buf = new byte[1024]; int len; while ((len = stream.read(buf)) >= 0) { saveCompressedStream(buf, fos, len); } } catch (IOException e) { zipFile.close(); IOException ioe = new IOException("Not valid ZIP archive file type."); ioe.initCause(e); throw ioe; } finally { fos.flush(); fos.close(); stream.close(); } } zipFile.close(); if ((tmpDestDir.listFiles().length == 1) && (tmpDestDir.listFiles()[0].isDirectory())) { return getShpFile(tmpDestDir.listFiles()[0]); } // File[] files = tmpDestDir.listFiles(new FilenameFilter() { // // public boolean accept(File dir, String name) { // return FilenameUtils.getExtension(name).equalsIgnoreCase("shp"); // } // }); // // return files.length > 0 ? files[0] : null; return getShpFile(tmpDestDir); }
From source file:org.apache.hadoop.hbase.util.JVMClusterUtil.java
/** * Creates a {@link MasterThread}./*from w ww. j a v a 2 s . com*/ * Call 'start' on the returned thread to make it run. * @param c Configuration to use. * @param cp consensus provider to use * @param hmc Class to create. * @param index Used distinguishing the object returned. * @throws IOException * @return Master added. */ public static JVMClusterUtil.MasterThread createMasterThread(final Configuration c, CoordinatedStateManager cp, final Class<? extends HMaster> hmc, final int index) throws IOException { HMaster server; try { server = hmc.getConstructor(Configuration.class, CoordinatedStateManager.class).newInstance(c, cp); } catch (InvocationTargetException ite) { Throwable target = ite.getTargetException(); throw new RuntimeException("Failed construction of Master: " + hmc.toString() + ((target.getCause() != null) ? target.getCause().getMessage() : ""), target); } catch (Exception e) { IOException ioe = new IOException(); ioe.initCause(e); throw ioe; } return new JVMClusterUtil.MasterThread(server, index); }
From source file:org.apache.hadoop.mapreduce.v2.jobhistory.FileNameIndexUtils.java
/** * Helper function to decode the URL of the filename of the job-history * log file./*from w w w. ja v a 2 s .c o m*/ * * @param logFileName file name of the job-history file * @return URL decoded filename * @throws IOException */ public static String decodeJobHistoryFileName(String logFileName) throws IOException { String decodedFileName = null; try { decodedFileName = URLDecoder.decode(logFileName, "UTF-8"); } catch (UnsupportedEncodingException uee) { IOException ioe = new IOException(); ioe.initCause(uee); ioe.setStackTrace(uee.getStackTrace()); throw ioe; } return decodedFileName; }
From source file:org.apache.hadoop.hbase.util.JVMClusterUtil.java
/** * Creates a {@link RegionServerThread}. * Call 'start' on the returned thread to make it run. * @param c Configuration to use./*from ww w . j a v a2s . co m*/ * @param cp consensus provider to use * @param hrsc Class to create. * @param index Used distinguishing the object returned. * @throws IOException * @return Region server added. */ public static JVMClusterUtil.RegionServerThread createRegionServerThread(final Configuration c, CoordinatedStateManager cp, final Class<? extends HRegionServer> hrsc, final int index) throws IOException { HRegionServer server; try { Constructor<? extends HRegionServer> ctor = hrsc.getConstructor(Configuration.class, CoordinatedStateManager.class); ctor.setAccessible(true); server = ctor.newInstance(c, cp); } catch (InvocationTargetException ite) { Throwable target = ite.getTargetException(); throw new RuntimeException("Failed construction of RegionServer: " + hrsc.toString() + ((target.getCause() != null) ? target.getCause().getMessage() : ""), target); } catch (Exception e) { IOException ioe = new IOException(); ioe.initCause(e); throw ioe; } return new JVMClusterUtil.RegionServerThread(server, index); }
From source file:org.fusesource.meshkeeper.classloader.basic.BasicClassLoaderServer.java
private static void addExportedURLs(URL[] urls, ArrayList<ExportedFile> elements) throws IOException { for (URL url : urls) { if ("file".equals(url.getProtocol())) { File file;/*from w w w .j av a 2 s . c o m*/ try { file = new File(url.toURI()); } catch (URISyntaxException e) { IOException ioe = new IOException(e.getMessage()); ioe.initCause(e); throw ioe; } addExportedFile(elements, file); } else { ExportedFile exportedFile = new ExportedFile(); exportedFile.element.id = ids.incrementAndGet(); exportedFile.element.url = url; elements.add(exportedFile); if (LOG.isDebugEnabled()) { LOG.debug("Exporting: " + url.toString()); } } } }
From source file:com.scaleoutsoftware.soss.hserver.hadoop.SubmittedJob.java
@SuppressWarnings("unchecked") private static <T> T getSplitDetails(FSDataInputStream inFile, long offset, Configuration configuration) throws IOException { inFile.seek(offset);//w ww .ja v a 2 s . c om String className = StringInterner.weakIntern(Text.readString(inFile)); Class<T> cls; try { cls = (Class<T>) configuration.getClassByName(className); } catch (ClassNotFoundException ce) { IOException wrap = new IOException("Split class " + className + " not found"); wrap.initCause(ce); throw wrap; } SerializationFactory factory = new SerializationFactory(configuration); Deserializer<T> deserializer = (Deserializer<T>) factory.getDeserializer(cls); deserializer.open(inFile); T split = deserializer.deserialize(null); return split; }
From source file:de.ingrid.iplug.csw.dsc.tools.FileUtils.java
/** * This function will copy files or directories from one location to another. * note that the source and the destination must be mutually exclusive. This * function can not be used to copy a directory to a sub directory of itself. * The function will also have problems if the destination files already exist. * @param src -- A File object that represents the source for the copy * @param dest -- A File object that represnts the destination for the copy. * @throws IOException if unable to copy. * /*from w ww.jav a2 s . c om*/ * Source: http://www.dreamincode.net/code/snippet1443.htm */ public static void copyRecursive(File src, File dest) throws IOException { //Check to ensure that the source is valid... if (!src.exists()) { throw new IOException("copyFiles: Can not find source: " + src.getAbsolutePath() + "."); } else if (!src.canRead()) { //check to ensure we have rights to the source... throw new IOException("copyFiles: No right to source: " + src.getAbsolutePath() + "."); } //is this a directory copy? if (src.isDirectory()) { if (!dest.exists()) { //does the destination already exist? //if not we need to make it exist if possible (note this is mkdirs not mkdir) if (!dest.mkdirs()) { throw new IOException("copyFiles: Could not create direcotry: " + dest.getAbsolutePath() + "."); } } //get a listing of files... String list[] = src.list(); //copy all the files in the list. for (int i = 0; i < list.length; i++) { File dest1 = new File(dest, list[i]); File src1 = new File(src, list[i]); copyRecursive(src1, dest1); } } else { //This was not a directory, so lets just copy the file FileInputStream fin = null; FileOutputStream fout = null; byte[] buffer = new byte[4096]; //Buffer 4K at a time (you can change this). int bytesRead; try { //open the files for input and output fin = new FileInputStream(src); fout = new FileOutputStream(dest); //while bytesRead indicates a successful read, lets write... while ((bytesRead = fin.read(buffer)) >= 0) { fout.write(buffer, 0, bytesRead); } fin.close(); fout.close(); fin = null; fout = null; } catch (IOException e) { //Error copying file... IOException wrapper = new IOException("copyFiles: Unable to copy file: " + src.getAbsolutePath() + "to" + dest.getAbsolutePath() + "."); wrapper.initCause(e); wrapper.setStackTrace(e.getStackTrace()); throw wrapper; } finally { //Ensure that the files are closed (if they were open). if (fin != null) { fin.close(); } if (fout != null) { fin.close(); } } } }