List of usage examples for java.io IOException initCause
public synchronized Throwable initCause(Throwable cause)
From source file:org.apache.hadoop.hdfs.server.datanode.BlockSender.java
/** * Converts an IOExcpetion (not subclasses) to SocketException. * This is typically done to indicate to upper layers that the error * was a socket error rather than often more serious exceptions like * disk errors./* ww w. j a v a2s .c o m*/ */ private static IOException ioeToSocketException(IOException ioe) { if (ioe.getClass().equals(IOException.class)) { // "se" could be a new class in stead of SocketException. IOException se = new SocketException("Original Exception : " + ioe); se.initCause(ioe); /* Change the stacktrace so that original trace is not truncated * when printed.*/ se.setStackTrace(ioe.getStackTrace()); return se; } // otherwise just return the same exception. return ioe; }
From source file:org.apache.hama.pipes.Submitter.java
private static void setupPipesJob(BSPJob job) throws IOException { job.setBspClass(PipesBSP.class); job.setJarByClass(PipesBSP.class); String textClassname = Text.class.getName(); setIfUnset(job.getConfiguration(), "bsp.input.key.class", textClassname); setIfUnset(job.getConfiguration(), "bsp.input.value.class", textClassname); setIfUnset(job.getConfiguration(), "bsp.output.key.class", textClassname); setIfUnset(job.getConfiguration(), "bsp.output.value.class", textClassname); setIfUnset(job.getConfiguration(), Constants.MESSAGE_CLASS, BytesWritable.class.getName()); setIfUnset(job.getConfiguration(), "bsp.job.name", "Hama Pipes Job"); // DEBUG Output LOG.debug("BspClass: " + job.getBspClass().getName()); // conf.setInputFormat(NLineInputFormat.class); LOG.debug("InputFormat: " + job.getInputFormat()); LOG.debug("InputKeyClass: " + job.getInputKeyClass().getName()); LOG.debug("InputValueClass: " + job.getInputValueClass().getName()); LOG.debug("InputFormat: " + job.getOutputFormat()); LOG.debug("OutputKeyClass: " + job.getOutputKeyClass().getName()); LOG.debug("OutputValueClass: " + job.getOutputValueClass().getName()); LOG.debug("MessageClass: " + job.get(Constants.MESSAGE_CLASS)); LOG.debug("bsp.master.address: " + job.getConfiguration().get("bsp.master.address")); LOG.debug("bsp.local.tasks.maximum: " + job.getConfiguration().get("bsp.local.tasks.maximum")); LOG.debug("NumBspTask: " + job.getNumBspTask()); LOG.debug("fs.default.name: " + job.getConfiguration().get("fs.default.name")); String exec = getExecutable(job.getConfiguration()); if (exec == null) { throw new IllegalArgumentException("No application defined. (Set property hama.pipes.executable)"); }/*from w w w . j a v a 2 s .c om*/ URI[] fileCache = DistributedCache.getCacheFiles(job.getConfiguration()); if (fileCache == null) { fileCache = new URI[1]; } else { URI[] tmp = new URI[fileCache.length + 1]; System.arraycopy(fileCache, 0, tmp, 1, fileCache.length); fileCache = tmp; } try { fileCache[0] = new URI(exec); } catch (URISyntaxException e) { IOException ie = new IOException("Problem parsing executable URI " + exec); ie.initCause(e); throw ie; } DistributedCache.setCacheFiles(fileCache, job.getConfiguration()); // Add libjars to HDFS String tmpjars = job.getConfiguration().get("tmpjars"); LOG.debug("conf.get(tmpjars): " + tmpjars); if (tmpjars != null) { String hdfsFileUrls = DistributedCacheUtil.addFilesToHDFS(job.getConfiguration(), job.getConfiguration().get("tmpjars")); job.getConfiguration().set("tmpjars", hdfsFileUrls); LOG.info("conf.get(tmpjars): " + job.getConfiguration().get("tmpjars")); } }
From source file:it.crs4.pydoop.pipes.Submitter.java
private static void setupPipesJob(JobConf conf) throws IOException { // default map output types to Text if (!getIsJavaMapper(conf)) { conf.setMapRunnerClass(PipesMapRunner.class); // Save the user's partitioner and hook in our's. setJavaPartitioner(conf, conf.getPartitionerClass()); conf.setPartitionerClass(PipesPartitioner.class); }//from ww w . j ava 2s . c om if (!getIsJavaReducer(conf)) { conf.setReducerClass(PipesReducer.class); if (!getIsJavaRecordWriter(conf)) { conf.setOutputFormat(NullOutputFormat.class); } } String textClassname = Text.class.getName(); setIfUnset(conf, MRJobConfig.MAP_OUTPUT_KEY_CLASS, textClassname); setIfUnset(conf, MRJobConfig.MAP_OUTPUT_VALUE_CLASS, textClassname); setIfUnset(conf, MRJobConfig.OUTPUT_KEY_CLASS, textClassname); setIfUnset(conf, MRJobConfig.OUTPUT_VALUE_CLASS, textClassname); // Use PipesNonJavaInputFormat if necessary to handle progress reporting // from C++ RecordReaders ... if (!getIsJavaRecordReader(conf) && !getIsJavaMapper(conf)) { conf.setClass(Submitter.INPUT_FORMAT, conf.getInputFormat().getClass(), InputFormat.class); conf.setInputFormat(PipesNonJavaInputFormat.class); } String exec = getExecutable(conf); if (exec == null) { throw new IllegalArgumentException("No application program defined."); } // add default debug script only when executable is expressed as // <path>#<executable> if (exec.contains("#")) { // set default gdb commands for map and reduce task String defScript = "$HADOOP_PREFIX/src/c++/pipes/debug/pipes-default-script"; setIfUnset(conf, MRJobConfig.MAP_DEBUG_SCRIPT, defScript); setIfUnset(conf, MRJobConfig.REDUCE_DEBUG_SCRIPT, defScript); } URI[] fileCache = DistributedCache.getCacheFiles(conf); if (fileCache == null) { fileCache = new URI[1]; } else { URI[] tmp = new URI[fileCache.length + 1]; System.arraycopy(fileCache, 0, tmp, 1, fileCache.length); fileCache = tmp; } try { fileCache[0] = new URI(exec); } catch (URISyntaxException e) { IOException ie = new IOException("Problem parsing execable URI " + exec); ie.initCause(e); throw ie; } DistributedCache.setCacheFiles(fileCache, conf); }
From source file:org.geoserver.data.util.IOUtils.java
public static void decompress(final File inputFile, final File destDir) throws IOException { ZipFile zipFile = new ZipFile(inputFile); Enumeration<? extends ZipEntry> entries = zipFile.entries(); while (entries.hasMoreElements()) { ZipEntry entry = (ZipEntry) entries.nextElement(); InputStream stream = zipFile.getInputStream(entry); if (entry.isDirectory()) { // Assume directories are stored parents first then children. (new File(destDir, entry.getName())).mkdir(); continue; }/* w ww.j a v a2 s . co m*/ File newFile = new File(destDir, entry.getName()); FileOutputStream fos = new FileOutputStream(newFile); try { byte[] buf = new byte[1024]; int len; while ((len = stream.read(buf)) >= 0) saveCompressedStream(buf, fos, len); } catch (IOException e) { zipFile.close(); IOException ioe = new IOException("Not valid COAMPS archive file type."); ioe.initCause(e); throw ioe; } finally { fos.flush(); fos.close(); stream.close(); } } zipFile.close(); }
From source file:org.apache.openejb.math.MathRuntimeException.java
/** * Constructs a new <code>IOException</code> with specified nested * <code>Throwable</code> root cause. * <p>This factory method allows chaining of other exceptions within an * <code>IOException</code> even for Java 5. The constructor for * <code>IOException</code> with a cause parameter was introduced only * with Java 6.</p>// w w w .j a v a 2 s. c o m * * @param rootCause the exception or error that caused this exception * to be thrown. * @return built exception */ public static IOException createIOException(final Throwable rootCause) { final IOException ioe = new IOException(rootCause.getLocalizedMessage()); ioe.initCause(rootCause); return ioe; }
From source file:it.crs4.pydoop.mapreduce.pipes.CommandLineParser.java
private static void setupPipesJob(Job job) throws IOException, ClassNotFoundException { Configuration conf = job.getConfiguration(); // default map output types to Text if (!getIsJavaMapper(conf)) { job.setMapperClass(PipesMapper.class); // Save the user's partitioner and hook in our's. setJavaPartitioner(conf, job.getPartitionerClass()); job.setPartitionerClass(PipesPartitioner.class); }/*w w w .j a v a 2s .c om*/ if (!getIsJavaReducer(conf)) { job.setReducerClass(PipesReducer.class); if (!getIsJavaRecordWriter(conf)) { job.setOutputFormatClass(NullOutputFormat.class); } } String textClassname = Text.class.getName(); setIfUnset(conf, MRJobConfig.MAP_OUTPUT_KEY_CLASS, textClassname); setIfUnset(conf, MRJobConfig.MAP_OUTPUT_VALUE_CLASS, textClassname); setIfUnset(conf, MRJobConfig.OUTPUT_KEY_CLASS, textClassname); setIfUnset(conf, MRJobConfig.OUTPUT_VALUE_CLASS, textClassname); // Use PipesNonJavaInputFormat if necessary to handle progress reporting // from C++ RecordReaders ... if (!getIsJavaRecordReader(conf) && !getIsJavaMapper(conf)) { conf.setClass(Submitter.INPUT_FORMAT, job.getInputFormatClass(), InputFormat.class); job.setInputFormatClass(PipesNonJavaInputFormat.class); } if (avroInput != null) { if (explicitInputFormat) { conf.setClass(Submitter.INPUT_FORMAT, job.getInputFormatClass(), InputFormat.class); } // else let the bridge fall back to the appropriate Avro IF switch (avroInput) { case K: job.setInputFormatClass(PydoopAvroInputKeyBridge.class); break; case V: job.setInputFormatClass(PydoopAvroInputValueBridge.class); break; case KV: job.setInputFormatClass(PydoopAvroInputKeyValueBridge.class); break; default: throw new IllegalArgumentException("Bad Avro input type"); } } if (avroOutput != null) { if (explicitOutputFormat) { conf.setClass(Submitter.OUTPUT_FORMAT, job.getOutputFormatClass(), OutputFormat.class); } // else let the bridge fall back to the appropriate Avro OF conf.set(props.getProperty("AVRO_OUTPUT"), avroOutput.name()); switch (avroOutput) { case K: job.setOutputFormatClass(PydoopAvroOutputKeyBridge.class); break; case V: job.setOutputFormatClass(PydoopAvroOutputValueBridge.class); break; case KV: job.setOutputFormatClass(PydoopAvroOutputKeyValueBridge.class); break; default: throw new IllegalArgumentException("Bad Avro output type"); } } String exec = getExecutable(conf); if (exec == null) { String msg = "No application program defined."; throw new IllegalArgumentException(msg); } // add default debug script only when executable is expressed as // <path>#<executable> //FIXME: this is kind of useless if the pipes program is not in c++ if (exec.contains("#")) { // set default gdb commands for map and reduce task String defScript = "$HADOOP_PREFIX/src/c++/pipes/debug/pipes-default-script"; setIfUnset(conf, MRJobConfig.MAP_DEBUG_SCRIPT, defScript); setIfUnset(conf, MRJobConfig.REDUCE_DEBUG_SCRIPT, defScript); } URI[] fileCache = DistributedCache.getCacheFiles(conf); if (fileCache == null) { fileCache = new URI[1]; } else { URI[] tmp = new URI[fileCache.length + 1]; System.arraycopy(fileCache, 0, tmp, 1, fileCache.length); fileCache = tmp; } try { fileCache[0] = new URI(exec); } catch (URISyntaxException e) { String msg = "Problem parsing executable URI " + exec; IOException ie = new IOException(msg); ie.initCause(e); throw ie; } DistributedCache.setCacheFiles(fileCache, conf); }
From source file:com.netxforge.oss2.core.xml.CastorUtils.java
/** * Unmarshal a Castor XML configuration file. Uses Java 5 generics for * return type./*ww w. j av a 2 s.c om*/ * * @param clazz the class representing the marshalled XML configuration file * @param resource the marshalled XML configuration file to unmarshal * @param preserveWhitespace whether or not to preserve whitespace * @return Unmarshalled object representing XML file * @throws org.exolab.castor.xml.MarshalException if the underlying Castor * Unmarshaller.unmarshal() call throws a org.exolab.castor.xml.MarshalException * @throws org.exolab.castor.xml.ValidationException if the underlying Castor * Unmarshaller.unmarshal() call throws a org.exolab.castor.xml.ValidationException * @throws java.io.IOException if the resource could not be opened */ public static <T> T unmarshal(Class<T> clazz, Resource resource, boolean preserveWhitespace) throws MarshalException, ValidationException, IOException { InputStream in; try { in = resource.getInputStream(); } catch (IOException e) { IOException newE = new IOException( "Failed to open XML configuration file for resource '" + resource + "': " + e); newE.initCause(e); throw newE; } try { InputSource source = new InputSource(in); try { source.setSystemId(resource.getURL().toString()); } catch (Throwable t) { // ignore } return unmarshal(clazz, source, preserveWhitespace); } finally { IOUtils.closeQuietly(in); } }
From source file:SocketFetcher.java
/** * This method returns a Socket. Properties control the use of * socket factories and other socket characteristics. The properties * used are: <p>//from w w w . j av a2s . co m * <ul> * <li> <i>prefix</i>.socketFactory.class * <li> <i>prefix</i>.socketFactory.fallback * <li> <i>prefix</i>.socketFactory.port * <li> <i>prefix</i>.timeout * <li> <i>prefix</i>.connectiontimeout * <li> <i>prefix</i>.localaddress * <li> <i>prefix</i>.localport * </ul> <p> * If the socketFactory.class property isn't set, the socket * returned is an instance of java.net.Socket connected to the * given host and port. If the socketFactory.class property is set, * it is expected to contain a fully qualified classname of a * javax.net.SocketFactory subclass. In this case, the class is * dynamically instantiated and a socket created by that * SocketFactory is returned. <p> * * If the socketFactory.fallback property is set to false, don't * fall back to using regular sockets if the socket factory fails. <p> * * The socketFactory.port specifies a port to use when connecting * through the socket factory. If unset, the port argument will be * used. <p> * * If the connectiontimeout property is set, we use a separate thread * to make the connection so that we can timeout that connection attempt. * <p> * * If the timeout property is set, it is used to set the socket timeout. * <p> * * If the localaddress property is set, it's used as the local address * to bind to. If the localport property is also set, it's used as the * local port number to bind to. * * @param host The host to connect to * @param port The port to connect to at the host * @param props Properties object containing socket properties * @param prefix Property name prefix, e.g., "mail.imap" * @param useSSL use the SSL socket factory as the default */ public static Socket getSocket(String host, int port, Properties props, String prefix, boolean useSSL) throws IOException { if (prefix == null) prefix = "socket"; if (props == null) props = new Properties(); // empty String s = props.getProperty(prefix + ".connectiontimeout", null); int cto = -1; if (s != null) { try { cto = Integer.parseInt(s); } catch (NumberFormatException nfex) { } } Socket socket = null; String timeout = props.getProperty(prefix + ".timeout", null); String localaddrstr = props.getProperty(prefix + ".localaddress", null); InetAddress localaddr = null; if (localaddrstr != null) localaddr = InetAddress.getByName(localaddrstr); String localportstr = props.getProperty(prefix + ".localport", null); int localport = 0; if (localportstr != null) { try { localport = Integer.parseInt(localportstr); } catch (NumberFormatException nfex) { } } boolean fb = false; String fallback = props.getProperty(prefix + ".socketFactory.fallback", null); fb = fallback == null || (!fallback.equalsIgnoreCase("false")); String sfClass = props.getProperty(prefix + ".socketFactory.class", null); int sfPort = -1; try { SocketFactory sf = getSocketFactory(sfClass); if (sf != null) { String sfPortStr = props.getProperty(prefix + ".socketFactory.port", null); if (sfPortStr != null) { try { sfPort = Integer.parseInt(sfPortStr); } catch (NumberFormatException nfex) { } } // if port passed in via property isn't valid, use param if (sfPort == -1) sfPort = port; socket = createSocket(localaddr, localport, host, sfPort, cto, sf, useSSL); } } catch (SocketTimeoutException sex) { throw sex; } catch (Exception ex) { if (!fb) { if (ex instanceof InvocationTargetException) { Throwable t = ((InvocationTargetException) ex).getTargetException(); if (t instanceof Exception) ex = (Exception) t; } if (ex instanceof IOException) throw (IOException) ex; IOException ioex = new IOException("Couldn't connect using \"" + sfClass + "\" socket factory to host, port: " + host + ", " + sfPort + "; Exception: " + ex); ioex.initCause(ex); throw ioex; } } if (socket == null) socket = createSocket(localaddr, localport, host, port, cto, null, useSSL); int to = -1; if (timeout != null) { try { to = Integer.parseInt(timeout); } catch (NumberFormatException nfex) { } } if (to >= 0) socket.setSoTimeout(to); configureSSLSocket(socket, props, prefix); return socket; }
From source file:de.ingrid.interfaces.csw.tools.FileUtils.java
/** * This function will copy files or directories from one location to * another. note that the source and the destination must be mutually * exclusive. This function can not be used to copy a directory to a sub * directory of itself. The function will also have problems if the * destination files already exist.// ww w. j a v a 2 s . c om * * @param src * -- A File object that represents the source for the copy * @param dest * -- A File object that represnts the destination for the copy. * @throws IOException * if unable to copy. * * Source: http://www.dreamincode.net/code/snippet1443.htm */ public static void copyRecursive(File src, File dest) throws IOException { // Check to ensure that the source is valid... if (!src.exists()) { throw new IOException("copyFiles: Can not find source: " + src.getAbsolutePath() + "."); } else if (!src.canRead()) { // check to ensure we have rights to the // source... throw new IOException("copyFiles: No right to source: " + src.getAbsolutePath() + "."); } // is this a directory copy? if (src.isDirectory()) { if (!dest.exists()) { // does the destination already exist? // if not we need to make it exist if possible (note this is // mkdirs not mkdir) if (!dest.mkdirs()) { throw new IOException("copyFiles: Could not create direcotry: " + dest.getAbsolutePath() + "."); } } // get a listing of files... String list[] = src.list(); // copy all the files in the list. for (String element : list) { File dest1 = new File(dest, element); File src1 = new File(src, element); copyRecursive(src1, dest1); } } else { // This was not a directory, so lets just copy the file FileInputStream fin = null; FileOutputStream fout = null; byte[] buffer = new byte[4096]; // Buffer 4K at a time (you can // change this). int bytesRead; try { // open the files for input and output fin = new FileInputStream(src); fout = new FileOutputStream(dest); // while bytesRead indicates a successful read, lets write... while ((bytesRead = fin.read(buffer)) >= 0) { fout.write(buffer, 0, bytesRead); } fin.close(); fout.close(); fin = null; fout = null; } catch (IOException e) { // Error copying file... IOException wrapper = new IOException("copyFiles: Unable to copy file: " + src.getAbsolutePath() + "to" + dest.getAbsolutePath() + "."); wrapper.initCause(e); wrapper.setStackTrace(e.getStackTrace()); throw wrapper; } finally { // Ensure that the files are closed (if they were open). if (fin != null) { fin.close(); } if (fout != null) { fin.close(); } } } }
From source file:org.geotools.gce.imagemosaic.CatalogManager.java
/** * Create a granule catalog from a datastore properties file * @param parent/*from www . j a v a2 s . c o m*/ * @param datastoreProperties * @param create * @param hints * @return * @throws IOException */ public static GranuleCatalog createGranuleCatalogFromDatastore(File parent, File datastoreProperties, boolean create, Hints hints) throws IOException { GranuleCatalog catalog = null; Utilities.ensureNonNull("datastoreProperties", datastoreProperties); Properties properties = createGranuleCatalogProperties(datastoreProperties); // SPI final String SPIClass = properties.getProperty("SPI"); try { // create a datastore as instructed final DataStoreFactorySpi spi = (DataStoreFactorySpi) Class.forName(SPIClass).newInstance(); // set ParentLocation parameter since for embedded database like H2 we must change the database // to incorporate the path where to write the db properties.put("ParentLocation", DataUtilities.fileToURL(parent).toExternalForm()); catalog = GranuleCatalogFactory.createGranuleCatalog(properties, false, create, spi, hints); MultiLevelROIProvider rois = MultiLevelROIProviderFactory.createFootprintProvider(parent); catalog.setMultiScaleROIProvider(rois); } catch (Exception e) { final IOException ioe = new IOException(); throw (IOException) ioe.initCause(e); } return catalog; }