Example usage for java.io IOException getStackTrace

List of usage examples for java.io IOException getStackTrace

Introduction

In this page you can find the example usage for java.io IOException getStackTrace.

Prototype

public StackTraceElement[] getStackTrace() 

Source Link

Document

Provides programmatic access to the stack trace information printed by #printStackTrace() .

Usage

From source file:com.excilys.ebi.gatling.recorder.ui.component.RunningFrame.java

private void saveScenario() {
    VelocityEngine ve = new VelocityEngine();
    ve.setProperty("file.resource.loader.class", ClasspathResourceLoader.class.getName());
    ve.init();//from   www .ja va  2 s.  c om

    VelocityContext context = new VelocityContext();
    context.put("protocol", protocol);
    context.put("host", host);
    context.put("port", port);
    context.put("urlBase", urlBaseString);
    context.put("proxy", configuration.getProxy());
    context.put("urls", urls);
    context.put("headers", headers);
    context.put("name", "Scenario name");

    if (listEvents.size() > EVENTS_GROUPING) {
        List<List<Object>> subListsEvents = new ArrayList<List<Object>>();
        int numberOfSubLists = listEvents.size() / EVENTS_GROUPING + 1;
        for (int i = 0; i < numberOfSubLists; i++)
            subListsEvents.add(listEvents.subList(0 + EVENTS_GROUPING * i,
                    Math.min(EVENTS_GROUPING * (i + 1), listEvents.size() - 1)));

        context.put("chainEvents", subListsEvents);
        context.put("events", new ArrayList<Object>());
    } else {
        context.put("events", listEvents);
        context.put("chainEvents", new ArrayList<List<Object>>());
    }

    context.put("package", Configuration.getInstance().getIdePackage());
    context.put("date", ResultType.FORMAT.format(startDate));
    URI uri = URI.create("");
    context.put("URI", uri);

    Template template = null;
    Writer writer = null;
    for (ResultType resultType : configuration.getResultTypes()) {
        try {
            template = ve.getTemplate(resultType.getTemplate());
            writer = new OutputStreamWriter(
                    new FileOutputStream(
                            new File(getOutputFolder(), resultType.getScenarioFileName(startDate))),
                    configuration.getEncoding());
            template.merge(context, writer);
            writer.flush();

        } catch (IOException e) {
            logger.error("Error, while saving '" + resultType + "' scenario..." + e.getStackTrace());

        } finally {
            closeQuietly(writer);
        }
    }
}

From source file:org.archive.io.arc.ARCReader.java

/**
 * Create new arc record.//from w w w. j a  v a 2  s .c om
 *
 * Encapsulate housekeeping that has to do w/ creating a new record.
 *
 * <p>Call this method at end of constructor to read in the
 * arcfile header.  Will be problems reading subsequent arc records
 * if you don't since arcfile header has the list of metadata fields for
 * all records that follow.
 * 
 * <p>When parsing through ARCs writing out CDX info, we spend about
 * 38% of CPU in here -- about 30% of which is in getTokenizedHeaderLine
 * -- of which 16% is reading.
 *
 * @param is InputStream to use.
 * @param offset Absolute offset into arc file.
 * @return An arc record.
 * @throws IOException
 */
protected ARCRecord createArchiveRecord(InputStream is, long offset) throws IOException {
    try {
        String version = super.getVersion();
        ARCRecord record = new ARCRecord(is, getReaderIdentifier(), offset, isDigest(), isStrict(),
                isParseHttpHeaders(), isAlignedOnFirstRecord(), version);
        if (version != null && super.getVersion() == null)
            super.setVersion(version);
        currentRecord(record);
    } catch (IOException e) {
        if (e instanceof RecoverableIOException) {
            // Don't mess with RecoverableIOExceptions.  Let them out.
            throw e;
        }
        IOException newE = new IOException(e.getMessage() + " (Offset " + offset + ").");
        newE.setStackTrace(e.getStackTrace());
        throw newE;
    }
    return (ARCRecord) getCurrentRecord();
}

From source file:com.cablelabs.fsm.AutoProvState.java

private String autoGenerate(int msgIndexQueue) {
    Properties platform = SystemSettings.getSettings(SettingConstants.PLATFORM);
    Properties dut = SystemSettings.getSettings(SettingConstants.DUT);
    Boolean autoProv = SystemSettings.getBooleanSetting(SettingConstants.AUTO_GENERATE);
    Boolean autoGen = SystemSettings.getBooleanSetting(SettingConstants.AUTO_PROVISION);
    if (autoProv && autoGen) {
        if (pd != null && platform != null && dut != null) {
            String pcscfLabel = dut.getProperty(SettingConstants.PCSCF);
            String macAddr = dut.getProperty(SettingConstants.MAC_ADDRESS);
            String tftpIP = platform.getProperty(SettingConstants.TFTP_SERVER_IP);
            String tftpPort = platform.getProperty(SettingConstants.TFTP_SERVER_PORT);
            String phone1 = dut.getProperty(SettingConstants.PHONE_NUMBER_1);
            String phone2 = dut.getProperty(SettingConstants.PHONE_NUMBER_2);
            String cw = platform.getProperty(SettingConstants.CW_NUMBER);
            if (macAddr != null && pcscfLabel != null && tftpIP != null && tftpPort != null && cw != null) {

                // Next verify the port is not set to zero
                try {
                    int port = Integer.parseInt(tftpPort);
                    if (port > 0 && port <= 65535) {

                        // Next make sure the TFTP Server IP is not set to 0.0.0.0
                        if (tftpIP.equals("0.0.0.0")) {
                            logger.warn(PC2LogCategory.PCSim2, subCat,
                                    "The TFTP Server IP setting in the platform file is not valid. Ending auto generate operation.");
                            return null;
                        }//from   ww w .jav  a  2s.  c  o m

                        File input = new File(SettingConstants.AUTO_PROV_FILE_DIRECTORY + File.separator
                                + SettingConstants.CW + cw + File.separator + pd.getProvFileName());
                        if (input != null) {
                            ProvGen pg = new ProvGen(input);
                            if (phone1 != null)
                                pg.changePhoneNum(SettingConstants.AUTO_GENERATE_PHONE_NUMBER_1, phone1);
                            if (phone2 != null)
                                pg.changePhoneNum(SettingConstants.AUTO_GENERATE_PHONE_NUMBER_2, phone2);
                            Properties pcscf = SystemSettings.getSettings(pcscfLabel);
                            if (pcscf != null) {
                                String pcscfIP = pcscf.getProperty(SettingConstants.IP);
                                if (pcscfIP != null)
                                    pg.changePCSCF(pcscfIP);
                            }
                            String newFileName = macAddr + ".bin";
                            if (pg.output(SettingConstants.AUTO_PROV_FILE_DIRECTORY + File.separator
                                    + SettingConstants.CW + cw + File.separator + newFileName)) {
                                // Test system
                                //File output = new File(SettingConstants.AUTO_PROV_FILE_DIRECTORY + newFileName);
                                //File pact = new File(SettingConstants.AUTO_PROV_FILE_DIRECTORY + "chinmaya_base_ph1_pcscf.bin");
                                //pg.compare(pact, output);
                                // Create a data entry of the issued event
                                //ProvisioningData issuePD = new ProvisioningData(macAddr, pd.getPolicyFileName(), newFileName);
                                logger.info(PC2LogCategory.FSM, subCat,
                                        "AutoProvState beginning to TFTP the new provisioning file.");

                                // Next we need to TFTP the file to the server
                                TFTPClient client = new TFTPClient();
                                File binFile = new File(
                                        SettingConstants.AUTO_PROV_FILE_DIRECTORY + File.separator
                                                + SettingConstants.CW + cw + File.separator + newFileName);
                                if (binFile.isFile() && binFile.canRead()) {
                                    FileInputStream istrm = new FileInputStream(binFile);
                                    //InetAddress ia = InetAddress.getByName("10.4.1.37");
                                    client.open(); // client.open(20003, ia);
                                    client.sendFile(newFileName, TFTP.BINARY_MODE, istrm, tftpIP, port);
                                    client.close();
                                    logger.info(PC2LogCategory.FSM, subCat,
                                            "TFTP of the new provisioning file is complete.");
                                    super.processPrelude(msgIndexQueue);
                                } else {
                                    logger.warn(PC2LogCategory.FSM, subCat,
                                            "The " + macAddr + ".bin doesn't appear in the "
                                                    + SettingConstants.AUTO_PROV_FILE_DIRECTORY + File.separator
                                                    + SettingConstants.CW + cw
                                                    + " Ending auto generate operation.");
                                }
                            } else {
                                logger.error(PC2LogCategory.FSM, subCat,
                                        "AutoProvState could not locate provisioning template file["
                                                + input.getAbsolutePath() + "].");
                            }
                        }
                        //                  else {
                        //                     logger.info(PC2LogCategory.FSM, subCat, "AutoProvState is terminating because the input directory is null.");
                        //                  }
                    } else {
                        logger.info(PC2LogCategory.PCSim2, subCat,
                                "AutoProvState is terminating because the port(" + port
                                        + ") is less than 0 or greater than 65535.");
                    }
                } catch (NumberFormatException nfe) {
                    logger.warn(PC2LogCategory.FSM, subCat,
                            "AutoProvState is not auto generating a provisioning file because the "
                                    + "TFTP Server Port setting doesn't appear to be a number.");
                } catch (UnknownHostException uhe) {
                    logger.warn(PC2LogCategory.FSM, subCat,
                            "AutoProvState is not auto generating a provisioning file because the "
                                    + "system encountered an error when attempting to send the file to the TFTP Server.\n"
                                    + uhe.getMessage() + "\n" + uhe.getStackTrace());
                } catch (IOException ioe) {
                    logger.warn(PC2LogCategory.FSM, subCat,
                            "AutoProvState is not auto generating a provisioning file because the "
                                    + "system encountered an error when attempting to send the file to the TFTP Server.\n"
                                    + ioe.getMessage() + "\n" + ioe.getStackTrace());
                }

            } else {
                logger.info(PC2LogCategory.FSM, subCat,
                        "AutoProvState is stopping because one of the values is null.\n" + "macAddr=" + macAddr
                                + " pcscfLabel=" + pcscfLabel + " tftpIP=" + tftpIP + " tftpPort=" + tftpPort);
            }
        } else {
            if (pd != null)
                logger.info(PC2LogCategory.FSM, subCat,
                        "The provisioning data is null, terminating processing.");
            if (platform != null)
                logger.info(PC2LogCategory.FSM, subCat,
                        "The Platform settings is null, terminating processing.");
            if (dut != null)
                logger.info(PC2LogCategory.FSM, subCat, "The DUT settings is null, terminating processing.");
        }
    } else {
        Generate g = new Generate(EventConstants.AUTO_PROV_PROMPT, null, this.owner.getName());
        try {
            g.execute(super.api, 0);
        } catch (PC2Exception pce) {
            logger.error(PC2LogCategory.FSM, subCat,
                    name + " couldn't generate " + EventConstants.AUTO_PROV_PROMPT + " event to the FSM.");
        }
    }

    return null;
}

From source file:com.bah.applefox.main.plugins.fulltextindex.FTLoader.java

/**
 * run takes the comandline args as arguments (in this case from a
 * configuration file), creates a new job, configures it, initiates it,
 * waits for completion, and returns 0 if it is successful (1 if it is not)
 * /*w  ww.j av a2s.  c o  m*/
 * @param args
 *            the commandline arguments (in this case from a configuration
 *            file)
 * 
 * @return 0 if the job ran successfully and 1 if it isn't
 */
public int run(String[] args) throws Exception {
    try {
        // Initialize variables
        FTLoader.articleFile = args[8];
        FTLoader.maxNGrams = Integer.parseInt(args[9]);
        FTLoader.stopWords = getStopWords();
        FTLoader.dTable = args[10];
        FTLoader.urlCheckedTable = args[11];
        FTLoader.divsFile = args[20];
        FTLoader.exDivs = getExDivs();

        // Give the job a name
        String jobName = this.getClass().getSimpleName() + "_" + System.currentTimeMillis();

        // Create job and set the jar
        Job job = new Job(getConf(), jobName);
        job.setJarByClass(this.getClass());

        String urlTable = args[5];

        job.setInputFormatClass(AccumuloInputFormat.class);
        InputFormatBase.setZooKeeperInstance(job.getConfiguration(), args[0], args[1]);
        InputFormatBase.setInputInfo(job.getConfiguration(), args[2], args[3].getBytes(), urlTable,
                new Authorizations());

        job.setMapperClass(MapperClass.class);
        job.setMapOutputKeyClass(Key.class);
        job.setMapOutputValueClass(Value.class);

        job.setReducerClass(ReducerClass.class);
        job.setNumReduceTasks(Integer.parseInt(args[4]));

        job.setOutputFormatClass(AccumuloOutputFormat.class);
        job.setOutputKeyClass(Key.class);
        job.setOutputValueClass(Value.class);

        AccumuloOutputFormat.setZooKeeperInstance(job.getConfiguration(), args[0], args[1]);
        AccumuloOutputFormat.setOutputInfo(job.getConfiguration(), args[2], args[3].getBytes(), true, urlTable);

        job.waitForCompletion(true);

        return job.isSuccessful() ? 0 : 1;
    } catch (IOException e) {
        if (e.getMessage() != null) {
            log.error(e.getMessage());
        } else {
            log.error(e.getStackTrace());
        }
    } catch (InterruptedException e) {
        if (e.getMessage() != null) {
            log.error(e.getMessage());
        } else {
            log.error(e.getStackTrace());
        }
    } catch (ClassNotFoundException e) {
        if (e.getMessage() != null) {
            log.error(e.getMessage());
        } else {
            log.error(e.getStackTrace());
        }
    }
    return 1;
}

From source file:org.opencb.commons.datastore.mongodb.MongoDBCollection.java

private <T> QueryResult<T> privateFind(Bson query, Bson projection, Class<T> clazz,
        ComplexTypeConverter<T, Document> converter, QueryOptions options) {
    long start = startQuery();

    /**//from w  w w .  j  a  v a2s.  c o  m
     * Getting the cursor and setting the batchSize from options. Default value set to 20.
     */
    FindIterable<Document> findIterable = mongoDBNativeQuery.find(query, projection, options);
    MongoCursor<Document> cursor = findIterable.iterator();

    QueryResult<T> queryResult;
    List<T> list = new LinkedList<>();
    if (cursor != null) {
        if (queryResultWriter != null) {
            try {
                queryResultWriter.open();
                while (cursor.hasNext()) {
                    queryResultWriter.write(cursor.next());
                }
                queryResultWriter.close();
            } catch (IOException e) {
                cursor.close();
                queryResult = endQuery(null, start);
                queryResult.setErrorMsg(e.getMessage() + " " + Arrays.toString(e.getStackTrace()));
                return queryResult;
            }
        } else {
            if (converter != null) {
                while (cursor.hasNext()) {
                    list.add(converter.convertToDataModelType(cursor.next()));
                }
            } else {
                if (clazz != null && !clazz.equals(Document.class)) {
                    Document document;
                    while (cursor.hasNext()) {
                        document = cursor.next();
                        try {
                            list.add(objectMapper.readValue(objectWriter.writeValueAsString(document), clazz));
                        } catch (IOException e) {
                            e.printStackTrace();
                        }
                    }
                } else {
                    while (cursor.hasNext()) {
                        list.add((T) cursor.next());
                    }
                }
            }
        }

        if (options != null && options.getInt(QueryOptions.SKIP) <= 0
                && options.getInt(QueryOptions.LIMIT) > 0) {
            int numTotalResults;
            if (options.getBoolean(QueryOptions.SKIP_COUNT)) {
                numTotalResults = -1;
            } else {
                try {
                    //                        numTotalResults = findIterable.maxTime(options.getInt("countTimeout"), TimeUnit.MILLISECONDS).count();
                    numTotalResults = (int) mongoDBNativeQuery.count(query);
                } catch (MongoExecutionTimeoutException e) {
                    numTotalResults = -1;
                }
            }
            queryResult = endQuery(list, numTotalResults, start);
        } else {
            queryResult = endQuery(list, start);
        }
        cursor.close();
    } else {
        queryResult = endQuery(list, start);
    }

    return queryResult;
}

From source file:org.apache.pig.piggybank.storage.AllLoader.java

/**
 * Reads the partition columns/*from  www  . j  av a2  s .  com*/
 * 
 * @param location
 * @param job
 * @return
 */
private String[] getPartitionColumns(String location, Job job) {

    if (partitionColumns == null) {
        // read the partition columns from the UDF Context first.
        // if not in the UDF context then read it using the PathPartitioner.

        Properties properties = getUDFContext();

        if (properties == null) {
            properties = new Properties();
        }

        String partitionColumnStr = properties.getProperty(PathPartitionHelper.PARTITION_COLUMNS);

        if (partitionColumnStr == null && !(location == null || job == null)) {
            // if it hasn't been written yet.
            Set<String> partitionColumnSet;

            try {
                partitionColumnSet = pathPartitionerHelper.getPartitionKeys(location, job.getConfiguration());
            } catch (IOException e) {

                RuntimeException rte = new RuntimeException(e);
                rte.setStackTrace(e.getStackTrace());
                throw rte;

            }

            if (partitionColumnSet != null) {

                StringBuilder buff = new StringBuilder();

                int i = 0;
                for (String column : partitionColumnSet) {
                    if (i++ != 0) {
                        buff.append(',');
                    }

                    buff.append(column);
                }

                String buffStr = buff.toString().trim();

                if (buffStr.length() > 0) {

                    properties.setProperty(PathPartitionHelper.PARTITION_COLUMNS, buff.toString());
                }

                partitionColumns = partitionColumnSet.toArray(new String[] {});

            }

        } else {
            // the partition columns has been set already in the UDF Context
            if (partitionColumnStr != null) {
                String split[] = partitionColumnStr.split(",");
                Set<String> partitionColumnSet = new LinkedHashSet<String>();
                if (split.length > 0) {
                    for (String splitItem : split) {
                        partitionColumnSet.add(splitItem);
                    }
                }

                partitionColumns = partitionColumnSet.toArray(new String[] {});
            }

        }

    }

    return partitionColumns;

}

From source file:org.apache.lucene.gdata.storage.lucenestorage.StorageImplementation.java

/**
 * @see org.apache.lucene.gdata.storage.Storage#updateEntry(org.apache.lucene.gdata.data.ServerBaseEntry)
 *///  w  w w.  ja  v  a  2s.  co m
public BaseEntry updateEntry(ServerBaseEntry entry) throws StorageException {

    if (entry == null)
        throw new StorageException("entry is null");
    if (entry.getId() == null)
        throw new StorageException("entry id is null");
    if (entry.getServiceConfig() == null)
        throw new StorageException("service config is not set -- null");
    if (entry.getFeedId() == null)
        throw new StorageException("feed id is null");
    if (LOG.isInfoEnabled())
        LOG.info("update entry " + entry.getId() + " -- feed: " + entry.getFeedId());
    StorageModifier modifier = this.controller.getStorageModifier();
    ReferenceCounter<StorageQuery> query = this.controller.getStorageQuery();
    // try to set concurrency Lock
    String key = entry.getId();
    setLock(key);
    try {

        if (query.get().isEntryStored(entry.getId(), entry.getFeedId())) {

            if (query.get().checkEntryVersion(entry.getId(), entry.getFeedId(), entry.getVersion())) {
                entry.setVersion(entry.getVersion() + 1);
                StorageEntryWrapper wrapper = new StorageEntryWrapper(entry, StorageOperation.UPDATE);
                modifier.updateEntry(wrapper);
            } else
                throw new ModificationConflictException("The entry version does not match -- entry "
                        + entry.getId() + " feed:" + entry.getFeedId() + " version: " + entry.getVersion());

        } else
            throw new ResourceNotFoundException("Entry for entry id: " + entry.getId() + " is not stored");

    } catch (IOException e) {
        LOG.error("Can't update entry for feedID: " + entry.getFeedId() + "; entryId: " + entry.getId() + " -- "
                + e.getMessage(), e);
        StorageException ex = new StorageException("Can't update Entry -- " + e.getMessage(), e);
        ex.setStackTrace(e.getStackTrace());
        throw ex;

    }

    finally {
        if (query != null)
            query.decrementRef();
        // release lock for concurrency
        releaseLock(key);
    }

    return entry.getEntry();

}

From source file:org.apache.pig.piggybank.storage.HiveColumnarLoader.java

/**
 * Reads the partition columns//from   ww w . j av  a 2s. c o m
 *
 * @param location
 * @param job
 * @return
 */
private Set<String> getPartitionColumns(String location, Job job) {

    if (partitionColumns == null) {
        // read the partition columns from the UDF Context first.
        // if not in the UDF context then read it using the PathPartitioner.

        Properties properties = getUDFContext();

        if (properties == null)
            properties = new Properties();

        String partitionColumnStr = properties.getProperty(PathPartitionHelper.PARTITION_COLUMNS);

        if (partitionColumnStr == null && !(location == null || job == null)) {
            // if it hasn't been written yet.
            Set<String> partitionColumnSet;

            try {
                partitionColumnSet = pathPartitionerHelper.getPartitionKeys(location, job.getConfiguration());
            } catch (IOException e) {

                RuntimeException rte = new RuntimeException(e);
                rte.setStackTrace(e.getStackTrace());
                throw rte;

            }

            if (partitionColumnSet != null) {

                StringBuilder buff = new StringBuilder();

                int i = 0;
                for (String column : partitionColumnSet) {
                    if (i++ != 0) {
                        buff.append(',');
                    }

                    buff.append(column);
                }

                String buffStr = buff.toString().trim();

                if (buffStr.length() > 0) {

                    properties.setProperty(PathPartitionHelper.PARTITION_COLUMNS, buff.toString());
                }

                partitionColumns = partitionColumnSet;

            }

        } else {
            // the partition columns has been set already in the UDF Context
            if (partitionColumnStr != null) {
                String split[] = partitionColumnStr.split(",");
                partitionColumns = new LinkedHashSet<String>();
                if (split.length > 0) {
                    for (String splitItem : split) {
                        partitionColumns.add(splitItem);
                    }
                }
            }

        }

    }

    return partitionColumns;

}

From source file:cn.vlabs.umt.ui.servlet.AuthorizationCodeServlet.java

public void writeJSONResponse(HttpServletResponse response, JSONObject obj) {
    PrintWriter writer = null;/*from   ww w.  j  a v  a  2  s.  c om*/
    try {
        //IE???text/html?
        response.setContentType("text/html");
        writer = response.getWriter();
        writer.write(obj.toString());
    } catch (IOException e) {
        LOG.error("JSONHelper write json object IOException:" + e.getMessage());
        LOG.debug(e.getStackTrace());
    } finally {
        if (writer != null) {
            writer.flush();
            writer.close();
        }
    }
}

From source file:org.apache.solr.update.InvenioKeepRecidUpdated.java

private void runAsynchronously(Map<String, Object> dictData, SolrQueryRequest req) {

    final Map<String, Object> dataToProcess = dictData;
    final SolrQueryRequest localReq = new LocalSolrQueryRequest(req.getCore(), req.getParams());
    localReq.getContext().put(IKRU_PROPERTIES, req.getContext().get(IKRU_PROPERTIES));

    new Thread(new Runnable() {

        public void run() {
            try {
                runSynchronously(dataToProcess, localReq);
            } catch (IOException e) {
                log.error(e.getLocalizedMessage());
                log.error(e.getStackTrace().toString());
            } catch (InterruptedException e) {
                log.error(e.getLocalizedMessage());
                log.error(e.getStackTrace().toString());
            } finally {
                setBusy(false);//w w w . ja  va 2s. co m
                localReq.close();
            }
        }
    }).start();
}