Example usage for java.io InputStream reset

List of usage examples for java.io InputStream reset

Introduction

In this page you can find the example usage for java.io InputStream reset.

Prototype

public synchronized void reset() throws IOException 

Source Link

Document

Repositions this stream to the position at the time the mark method was last called on this input stream.

Usage

From source file:au.org.theark.lims.util.BioCollectionSpecimenUploader.java

/**
 * //w ww.j a  va2  s.com
 * Upload the biocollection file data.
 * 
 * Where N is any number of columns
 * 
 * @param fileInputStream
 *           is the input stream of a file
 * @param inLength
 *           is the length of a file
 * @throws FileFormatException
 *            file format Exception
 * @throws ArkBaseException
 *            general ARK Exception
 * @return the upload report detailing the upload process
 */
public StringBuffer uploadAndReportMatrixBiocollectionFile(InputStream fileInputStream, long inLength,
        String inFileFormat, char inDelimChr) throws FileFormatException, ArkSystemException {
    delimiterCharacter = inDelimChr;
    uploadReport = new StringBuffer();
    curPos = 0;

    InputStreamReader inputStreamReader = null;
    CsvReader csvReader = null;
    DecimalFormat decimalFormat = new DecimalFormat("0.00");

    // If Excel, convert to CSV for validation
    if (inFileFormat.equalsIgnoreCase("XLS")) {
        Workbook w;
        try {
            w = Workbook.getWorkbook(fileInputStream);
            delimiterCharacter = ',';
            XLStoCSV xlsToCsv = new XLStoCSV(delimiterCharacter);
            fileInputStream = xlsToCsv.convertXlsToCsv(w);
            fileInputStream.reset();
        } catch (BiffException e) {
            log.error(e.getMessage());
        } catch (IOException e) {
            log.error(e.getMessage());
        }
    }

    try {
        inputStreamReader = new InputStreamReader(fileInputStream);
        csvReader = new CsvReader(inputStreamReader, delimiterCharacter);

        srcLength = inLength;
        if (srcLength <= 0) {
            uploadReport.append("The input size was not greater than 0. Actual length reported: ");
            uploadReport.append(srcLength);
            uploadReport.append("\n");
            throw new FileFormatException(
                    "The input size was not greater than 0. Actual length reported: " + srcLength);
        }
        timer = new StopWatch();
        timer.start();
        csvReader.readHeaders();
        srcLength = inLength - csvReader.getHeaders().toString().length();
        log.debug("Header length: " + csvReader.getHeaders().toString().length());
        // Loop through all rows in file
        while (csvReader.readRecord()) {
            log.info("At record: " + recordCount);
            String subjectUID = csvReader.get("SUBJECTUID");
            String biocollectionUID = csvReader.get("BIOCOLLECTIONUID");
            LinkSubjectStudy linkSubjectStudy = iArkCommonService.getSubjectByUIDAndStudy(subjectUID, study);
            //this is validated in prior step and should never happen
            if (linkSubjectStudy == null) {
                log.error(
                        "\n\n\n\n\n\n\n\n\n\n\n\nUnexpected subject? a shouldnt happen...we should have errored this in validation");
                break;//TODO : log appropriately or do some handling
            }
            BioCollection bioCollection = iLimsService.getBioCollectionForStudySubjectByUID(biocollectionUID,
                    study, linkSubjectStudy);
            if (bioCollection == null) {
                bioCollection = new BioCollection();
                if (study.getAutoGenerateBiocollectionUid()) {
                    // if biocollection not in the system we have to create a new biocollection uid.
                    bioCollection.setBiocollectionUid(iLimsService.getNextGeneratedBiospecimenUID(study));
                } else {
                    bioCollection.setBiocollectionUid(biocollectionUID);
                }
            } else {// if exsists we do not want to auto genetared the uid.
                bioCollection.setBiocollectionUid(biocollectionUID);
            }
            bioCollection.setStudy(study);
            bioCollection.setLinkSubjectStudy(linkSubjectStudy);

            if (csvReader.getIndex("NAME") > 0) {
                String name = csvReader.get("NAME");
                bioCollection.setName(name);
            }
            if (csvReader.getIndex("COLLECTIONDATE") > 0) {
                String collectionDate = csvReader.get("COLLECTIONDATE");
                bioCollection.setCollectionDate(simpleDateFormat.parse(collectionDate));
            }
            if (csvReader.getIndex("COMMENTS") > 0) {
                String comments = csvReader.get("COMMENTS");
                bioCollection.setComments(comments);
            }
            //validation SHOULD make sure these cases will work.  TODO:  test scripts

            if (bioCollection.getId() == null) {
                insertBiocollections.add(bioCollection);
                StringBuffer sb = new StringBuffer();
                sb.append("BioCollectionUID: ");
                sb.append(bioCollection.getBiocollectionUid());
                sb.append(" has been created successfully.");
                sb.append("\n");
                uploadReport.append(sb);
                insertCount++;
            } else {
                updateBiocollections.add(bioCollection);
                StringBuffer sb = new StringBuffer();
                sb.append("BioCollectionUID: ");
                sb.append(bioCollection.getBiocollectionUid());
                sb.append(" has been updated successfully.");
                sb.append("\n");
                uploadReport.append(sb);
                updateCount++;
            }
            recordCount++;
        }
    } catch (IOException ioe) {
        uploadReport.append("Unexpected I/O exception whilst reading the biospecimen data file\n");
        log.error("processMatrixBiospecimenFile IOException stacktrace:", ioe);
        throw new ArkSystemException("Unexpected I/O exception whilst reading the biospecimen data file");
    } catch (Exception ex) {
        uploadReport.append("Unexpected exception whilst reading the biospecimen data file\n");
        log.error("processMatrixBiospecimenFile Exception stacktrace:", ex);
        throw new ArkSystemException(
                "Unexpected exception occurred when trying to process biospecimen data file");
    } finally {
        // Clean up the IO objects
        timer.stop();
        uploadReport.append("\n");
        uploadReport.append("Total elapsed time: ");
        uploadReport.append(timer.getTime());
        uploadReport.append(" ms or ");
        uploadReport.append(decimalFormat.format(timer.getTime() / 1000.0));
        uploadReport.append(" s");
        uploadReport.append("\n");
        uploadReport.append("Total file size: ");
        uploadReport.append(inLength);
        uploadReport.append(" B or ");
        uploadReport.append(decimalFormat.format(inLength / 1024.0 / 1024.0));
        uploadReport.append(" MB");
        uploadReport.append("\n");

        if (timer != null)
            timer = null;

        if (csvReader != null) {
            try {
                csvReader.close();
            } catch (Exception ex) {
                log.error("Cleanup operation failed: csvRdr.close()", ex);
            }
        }
        if (inputStreamReader != null) {
            try {
                inputStreamReader.close();
            } catch (Exception ex) {
                log.error("Cleanup operation failed: isr.close()", ex);
            }
        }
        // Restore the state of variables
        srcLength = -1;
    }
    uploadReport.append("Processed ");
    uploadReport.append(recordCount);
    uploadReport.append(" records.");
    uploadReport.append("\n");
    uploadReport.append("Inserted ");
    uploadReport.append(insertCount);
    uploadReport.append(" records.");
    uploadReport.append("\n");
    uploadReport.append("Updated ");
    uploadReport.append(updateCount);
    uploadReport.append(" records.");
    uploadReport.append("\n");

    // Batch insert/update
    iLimsService.batchInsertBiocollections(insertBiocollections);
    iLimsService.batchUpdateBiocollections(updateBiocollections);

    return uploadReport;
}

From source file:eu.medsea.mimeutil.detector.OpendesktopMimeDetector.java

private Collection lookupMimeTypesForMagicData(InputStream in) {

    int offset = 0;
    int len = getMaxExtents();
    byte[] data = new byte[len];
    // Mark the input stream
    in.mark(len);/*from  w  ww  .ja  v a2s .  c o m*/

    try {
        // Since an InputStream might return only some data (not all
        // requested), we have to read in a loop until
        // either EOF is reached or the desired number of bytes have been
        // read.
        int restBytesToRead = len;
        while (restBytesToRead > 0) {
            int bytesRead = in.read(data, offset, restBytesToRead);
            if (bytesRead < 0)
                break; // EOF

            offset += bytesRead;
            restBytesToRead -= bytesRead;
        }
    } catch (IOException ioe) {
        throw new MimeException(ioe);
    } finally {
        try {
            // Reset the input stream to where it was marked.
            in.reset();
        } catch (Exception e) {
            throw new MimeException(e);
        }
    }
    return lookupMagicData(data);
}

From source file:com.ksc.http.KSCHttpClient.java

/**
 * Used to perform a last reset on the content input stream (if mark-supported); this is so that, for backward
 * compatibility reason, any "blind" retry (ie without calling reset) by user of this library with the same input
 * stream (such as ByteArrayInputStream) could still succeed.
 *
 * @param t   the failure/*from   www .  ja va  2 s  .  com*/
 * @param req the request, if known; or null otherwise.
 * @return the failure as given
 */
private <T extends Throwable> T lastReset(final T t, final Request<?> req) {
    try {
        InputStream content = req.getContent();
        if (content != null) {
            if (content.markSupported())
                content.reset();
        }
    } catch (Exception ex) {
        log.debug("FYI: failed to reset content inputstream before throwing up", ex);
    }
    return t;
}

From source file:org.opendap.d1.DAPMNodeService.java

/**
 * Return the SDO, SMO or ORE document that matches the given D1/DAP server
 * PID. Note that this is used for both /object and /replica calls.
 * //from   w ww.  ja v  a  2s.c  o m
 * @param pid The D1 Persistent Identifier for the local object
 * @return An InputStream; read the object from this.
 * @see org.dataone.service.mn.tier1.v1.MNRead#get()
 */
@Override
public InputStream get(Identifier pid)
        throws InvalidToken, NotAuthorized, NotImplemented, ServiceFailure, NotFound, InsufficientResources {
    // Query the database for the PID. If it is a SDO or SMO, then we must
    // access the DAP server and return the streamed object via the InputStream.
    // if the PID references an ORE document, we must build the ORE doc and 
    // return it.
    log.debug("... in DAPMNodeService, get()...");

    if (!db.isInMetadata(pid.getValue()))
        throw new NotFound("1020", "The PID '" + pid.getValue() + "' was not found on this server.");

    try {
        InputStream in = null;

        // Anything other than an ORE doc must be DAP URL for this server.
        if (db.isDAPURL(pid.getValue())) {
            // For a DAP URL (e.g., it's a .nc or .iso URL), dereference and 
            // return the InputStream
            HttpClient client = new DefaultHttpClient();
            HttpGet request = new HttpGet(db.getDAPURL(pid.getValue()));
            HttpResponse response = client.execute(request);

            // Get the response
            in = response.getEntity().getContent();
        } else {
            String ore_doc = db.getOREDoc(pid.getValue());
            in = new ByteArrayInputStream(ore_doc.getBytes());

            log.debug("ORE doc: $${}$$", ore_doc);
            log.debug("ORE doc checksum: {}", ChecksumUtil.checksum(in, "SHA-1").getValue());

            in.reset();

            /*
            List<String> ids = db.getIdentifiersForORE(pid.getValue());
                    
            Identifier smoId = new Identifier();
            smoId.setValue(ids.get(0));
                    
            List<Identifier> dataObjects = new Vector<Identifier>();
            Identifier sdoId = new Identifier();
            sdoId.setValue(ids.get(1));
            dataObjects.add(sdoId);
                    
            Map<Identifier, List<Identifier>> idMap = new HashMap<Identifier, List<Identifier>>();
            idMap.put(smoId, dataObjects);
                    
            ResourceMap rm = ResourceMapFactory.getInstance().createResourceMap(pid, idMap);
            String resourceMapXML = ResourceMapFactory.getInstance().serializeResourceMap(rm);
            in = new ByteArrayInputStream(resourceMapXML.getBytes());
            */
        }

        if (in == null)
            throw new NotFound("1020", "The PID '" + pid.getValue() + "' was not found.");

        return in;

    } catch (Exception e) {
        log.error(e.getMessage());
        throw new ServiceFailure("1030", e.getMessage());
    }
}

From source file:org.lockss.plugin.atypon.BaseAtyponRisFilterFactory.java

public InputStream createFilteredInputStream(ArchivalUnit au, InputStream in, String encoding)
        throws PluginException {

    InputStream inBuf = null; // to make sure mark() is supported
    /* //w  w  w  .j a  v  a 2s.  c  o  m
     * RIS files are collected with content type text/plain (encoding) and so
     * we have to filter all text/plain and then determine if they're a RIS file 
     * here.  We are working on a different long-term solution by allowing us to verify
     * the URL against a regexp.
     */

    BufferedReader bReader;

    if (in.markSupported() != true) {
        inBuf = new BufferedInputStream(in); //wrap the one that came in
    } else {
        inBuf = in; //use the one passed in
    }
    int BUF_LEN = 2000;
    inBuf.mark(BUF_LEN); // not sure about the limit...just reading far enough to identify file type

    try {
        //Now create a BoundedInputReader to make sure that we don't overstep our reset mark
        bReader = new BufferedReader(new InputStreamReader(new BoundedInputStream(inBuf, BUF_LEN), encoding));

        String aLine = bReader.readLine();
        // The first tag in a RIS file must be "TY - "; be nice about WS
        // The specification doesn't allow for comments or other preceding characters

        // isBlank() checks if whitespace, empty or null
        // keep initial null check or you'd never exit loop if you hit the end of input!
        while (aLine != null && StringUtils.isBlank(aLine)) {
            aLine = bReader.readLine(); // get the next line
        }

        // do NOT close bReader - it would also close underlying inBuf!
        inBuf.reset();
        // if we have  data, see if it matches the RIS pattern
        if (aLine != null && RIS_PATTERN.matcher(aLine).find()) {
            return new RisFilterInputStream(inBuf, encoding, "Y2");
        }
        return inBuf; // If not a RIS file, just return reset file
    } catch (UnsupportedEncodingException e) {
        log.debug2("Internal error (unsupported encoding)", e);
        throw new PluginException("Unsupported encoding looking ahead in input stream", e);
    } catch (IOException e) {
        log.debug2("Internal error (IO exception)", e);
        throw new PluginException("IO exception looking ahead in input stream", e);
    }
}

From source file:org.mifos.customers.client.struts.action.ClientCustAction.java

@TransactionDemarcate(joinToken = true)
public ActionForward retrievePictureOnPreview(ActionMapping mapping, ActionForm form,
        @SuppressWarnings("unused") HttpServletRequest request, HttpServletResponse response) throws Exception {

    ClientCustActionForm actionForm = (ClientCustActionForm) form;
    InputStream in = actionForm.getPicture().getInputStream();
    in.mark(0);//from  www. j  a  va  2  s .  c om
    response.setContentType("image/jpeg");
    BufferedOutputStream out = new BufferedOutputStream(response.getOutputStream());
    byte[] by = new byte[1024 * 4]; // 4K buffer buf, 0, buf.length
    int index = in.read(by, 0, 1024 * 4);
    while (index != -1) {
        out.write(by, 0, index);
        index = in.read(by, 0, 1024 * 4);
    }
    out.flush();
    out.close();
    in.reset();
    String forward = ClientConstants.CUSTOMER_PICTURE_PAGE;
    return mapping.findForward(forward);
}

From source file:org.collectionspace.services.common.config.TenantBindingConfigReaderImpl.java

private InputStream merge(File srcFile, File deltaFile) throws IOException {
    InputStream result = null;
    try {//from  w ww.  j a  v  a2 s .  c  o  m
        FileInputStream srcStream = new FileInputStream(srcFile);
        FileInputStream deltaStream = new FileInputStream(deltaFile);
        InputStream[] inputStreamArray = { srcStream, deltaStream };

        Configurer configurer = new AttributeMergeConfigurer();
        result = new ConfigurableXmlMerge(configurer).merge(inputStreamArray);
    } catch (Exception e) {
        logger.error("Could not merge tenant configuration delta file: " + deltaFile.getAbsolutePath(), e);
    }
    //
    // Try to save the merge output to a file that is suffixed with ".merged.xml" in the same directory
    // as the delta file.
    //
    if (result != null) {
        File outputDir = deltaFile.getParentFile();
        String mergedFileName = outputDir.getAbsolutePath() + File.separator
                + this.TENANT_BINDINGS_FILENAME_PREFIX + MERGED_SUFFIX;
        File mergedOutFile = new File(mergedFileName);
        try {
            FileUtils.copyInputStreamToFile(result, mergedOutFile);
        } catch (IOException e) {
            logger.warn("Could not create a copy of the merged tenant configuration at: " + mergedFileName, e);
        }
        result.reset(); //reset the stream even if the file create failed.
    }

    return result;
}

From source file:au.org.theark.lims.util.BioCollectionSpecimenUploader.java

/**
 * //ww w .  j a v a2s  .co  m
 * Upload the biospecimen file data.
 * 
 * Where N is any number of columns
 * 
 * @param fileInputStream
 *           is the input stream of a file
 * @param inLength
 *           is the length of a file
 * @throws FileFormatException
 *            file format Exception
 * @throws ArkBaseException
 *            general ARK Exception
 * @return the upload report detailing the upload process
 */
public StringBuffer uploadAndReportMatrixBiospecimenFile(InputStream fileInputStream, long inLength,
        String inFileFormat, char inDelimChr) throws FileFormatException, ArkSystemException {
    delimiterCharacter = inDelimChr;
    uploadReport = new StringBuffer();
    curPos = 0;

    InputStreamReader inputStreamReader = null;
    CsvReader csvReader = null;
    DecimalFormat decimalFormat = new DecimalFormat("0.00");

    // If Excel, convert to CSV for validation
    if (inFileFormat.equalsIgnoreCase("XLS")) {
        Workbook w;
        try {
            w = Workbook.getWorkbook(fileInputStream);
            delimiterCharacter = ',';
            XLStoCSV xlsToCsv = new XLStoCSV(delimiterCharacter);
            fileInputStream = xlsToCsv.convertXlsToCsv(w);
            fileInputStream.reset();
        } catch (BiffException e) {
            log.error(e.getMessage());
        } catch (IOException e) {
            log.error(e.getMessage());
        }
    }

    try {
        inputStreamReader = new InputStreamReader(fileInputStream);
        csvReader = new CsvReader(inputStreamReader, delimiterCharacter);

        srcLength = inLength;
        if (srcLength <= 0) {
            uploadReport.append("The input size was not greater than 0. Actual length reported: ");
            uploadReport.append(srcLength);
            uploadReport.append("\n");
            throw new FileFormatException(
                    "The input size was not greater than 0. Actual length reported: " + srcLength);
        }
        timer = new StopWatch();
        timer.start();
        csvReader.readHeaders();
        srcLength = inLength - csvReader.getHeaders().toString().length();
        log.debug("Header length: " + csvReader.getHeaders().toString().length());

        // Loop through all rows in file
        while (csvReader.readRecord()) {
            log.info("At record: " + recordCount);
            String subjectUID = csvReader.get("SUBJECTUID");
            String biospecimenUID = csvReader.get("BIOSPECIMENUID");
            LinkSubjectStudy linkSubjectStudy = iArkCommonService.getSubjectByUIDAndStudy(subjectUID, study);
            //this is validated in prior step and should never happen
            if (linkSubjectStudy == null) {
                log.error(
                        "\n\n\n\n\n\n\n\n\n\n\n\n Unexpected subject? a shouldnt happen...we should have errored this in validation");
                break;//TODO : log appropriately or do some handling
            }
            //Always create a new biospecimen in this time 
            //exsisting biospecimen are not allow to update in here.
            Biospecimen biospecimen = iLimsService.getBiospecimenByUid(biospecimenUID, study);
            if (biospecimen == null) {
                biospecimen = new Biospecimen();
            } else {
                log.error(
                        "\n\n\n\n\n\n\n\n\n....We should NEVER have existing biospecimens this should be  validated in prior step");
                break;
            }
            biospecimen.setStudy(study);
            biospecimen.setLinkSubjectStudy(linkSubjectStudy);
            if (csvReader.getIndex("BIOCOLLECTIONUID") > 0) {
                String biocollectionUid = csvReader.get("BIOCOLLECTIONUID");
                BioCollection bioCollection = iLimsService.getBioCollectionByUID(biocollectionUid,
                        this.study.getId(), subjectUID);
                if (bioCollection == null) {
                    log.error(
                            "\n\n\n\n\n\n\n\n\n....We already validated for the exsisting biocollection and we never created "
                                    + "new one if it does not exsists.");
                    break;
                } else {
                    biospecimen.setBioCollection(bioCollection);
                }
            }
            if (csvReader.getIndex("SAMPLETYPE") > 0) {
                String name = csvReader.get("SAMPLETYPE");
                BioSampletype sampleType = new BioSampletype();
                sampleType = iLimsService.getBioSampleTypeByName(name);
                biospecimen.setSampleType(sampleType);
            }
            if (csvReader.getIndex("QUANTITY") > 0) {
                String quantity = csvReader.get("QUANTITY");
                biospecimen.setQuantity(new Double(quantity));
            }
            if (csvReader.getIndex("CONCENTRATION") > 0) {
                String concentration = csvReader.get("CONCENTRATION");
                if (concentration != null && !concentration.isEmpty()) {
                    try {
                        biospecimen.setConcentration(Double.parseDouble(concentration));
                    } catch (NumberFormatException ne) {
                        log.error("Already validated in the previous step and never happen the for error");
                    }
                }
            }
            if (csvReader.getIndex("UNITS") > 0) {
                String name = csvReader.get("UNITS");
                Unit unit = iLimsService.getUnitByName(name);
                biospecimen.setUnit(unit);
            }
            if (csvReader.getIndex("TREATMENT") > 0) {
                String name = csvReader.get("TREATMENT");
                TreatmentType treatmentType = iLimsService.getTreatmentTypeByName(name);
                biospecimen.setTreatmentType(treatmentType);
            }
            Set<BioTransaction> bioTransactions = new HashSet<BioTransaction>(0);
            // Inheriently create a transaction for the initial quantity
            BioTransaction bioTransaction = new BioTransaction();
            bioTransaction.setBiospecimen(biospecimen);
            bioTransaction.setTransactionDate(Calendar.getInstance().getTime());
            bioTransaction.setQuantity(biospecimen.getQuantity());
            bioTransaction.setReason(au.org.theark.lims.web.Constants.BIOTRANSACTION_STATUS_INITIAL_QUANTITY);

            BioTransactionStatus initialStatus = iLimsService.getBioTransactionStatusByName(
                    au.org.theark.lims.web.Constants.BIOTRANSACTION_STATUS_INITIAL_QUANTITY);
            bioTransaction.setStatus(initialStatus); //ensure that the initial transaction can be identified
            bioTransactions.add(bioTransaction);
            biospecimen.setBioTransactions(bioTransactions);
            //validation SHOULD make sure these cases will work.  TODO:  test scripts
            if (study.getAutoGenerateBiospecimenUid()) {
                biospecimen.setBiospecimenUid(iLimsService.getNextGeneratedBiospecimenUID(study));
            } else {
                biospecimen.setBiospecimenUid(biospecimenUID);
            }
            insertBiospecimens.add(biospecimen);
            StringBuffer sb = new StringBuffer();
            sb.append("Biospecimen UID: ");
            sb.append(biospecimen.getBiospecimenUid());
            sb.append(" has been created successfully.");
            sb.append("\n");
            uploadReport.append(sb);
            insertCount++;
            // Allocation details
            String siteName = csvReader.get("SITE");
            String freezerName = csvReader.get("FREEZER");
            String rackName = csvReader.get("RACK");
            String boxName = csvReader.get("BOX");
            String row = csvReader.get("ROW");
            String column = csvReader.get("COLUMN");
            InvCell invCell = iInventoryService.getInvCellByLocationNames(siteName, freezerName, rackName,
                    boxName, row, column);
            //Biospecimen was supposed to locate in the following valid, empty inventory cell
            // inventory cell is not persist with biospeciman. So we have to update the valid inventory cell location with the 
            //biospecimen uid which we will do it while bispecimen creates.
            biospecimen.setInvCell(invCell);
            recordCount++;
        }
    } catch (IOException ioe) {
        uploadReport.append("Unexpected I/O exception whilst reading the biospecimen data file\n");
        log.error("processMatrixBiospecimenFile IOException stacktrace:", ioe);
        throw new ArkSystemException("Unexpected I/O exception whilst reading the biospecimen data file");
    } catch (Exception ex) {
        uploadReport.append("Unexpected exception whilst reading the biospecimen data file\n");
        log.error("processMatrixBiospecimenFile Exception stacktrace:", ex);
        throw new ArkSystemException(
                "Unexpected exception occurred when trying to process biospecimen data file");
    } finally {
        // Clean up the IO objects
        timer.stop();
        uploadReport.append("\n");
        uploadReport.append("Total elapsed time: ");
        uploadReport.append(timer.getTime());
        uploadReport.append(" ms or ");
        uploadReport.append(decimalFormat.format(timer.getTime() / 1000.0));
        uploadReport.append(" s");
        uploadReport.append("\n");
        uploadReport.append("Total file size: ");
        uploadReport.append(inLength);
        uploadReport.append(" B or ");
        uploadReport.append(decimalFormat.format(inLength / 1024.0 / 1024.0));
        uploadReport.append(" MB");
        uploadReport.append("\n");

        if (timer != null)
            timer = null;

        if (csvReader != null) {
            try {
                csvReader.close();
            } catch (Exception ex) {
                log.error("Cleanup operation failed: csvRdr.close()", ex);
            }
        }
        if (inputStreamReader != null) {
            try {
                inputStreamReader.close();
            } catch (Exception ex) {
                log.error("Cleanup operation failed: isr.close()", ex);
            }
        }
        // Restore the state of variables
        srcLength = -1;
    }
    uploadReport.append("Processed ");
    uploadReport.append(recordCount);
    uploadReport.append(" records.");
    uploadReport.append("\n");
    uploadReport.append("Inserted ");
    uploadReport.append(insertCount);
    uploadReport.append(" records.");
    uploadReport.append("\n");
    uploadReport.append("Updated ");
    uploadReport.append(updateCount);
    uploadReport.append(" records.");
    uploadReport.append("\n");

    // Batch insert/update
    iLimsService.batchInsertBiospecimensAndUpdateInventoryCell(insertBiospecimens);
    return uploadReport;
}

From source file:au.org.theark.lims.util.BioCollectionSpecimenUploader.java

/**
* Upload Biospecimen Inventory location file.
* 
* 
* Where N is any number of columns//w  w w  .j ava  2 s.  c om
* 
* @param fileInputStream
*           is the input stream of a file
* @param inLength
*           is the length of a file
* @throws FileFormatException
*            file format Exception
* @throws ArkBaseException
*            general ARK Exception
* @return the upload report detailing the upload process
*/
public StringBuffer uploadAndReportMatrixBiospecimenInventoryFile(InputStream fileInputStream, long inLength,
        String inFileFormat, char inDelimChr) throws FileFormatException, ArkSystemException {
    delimiterCharacter = inDelimChr;
    uploadReport = new StringBuffer();
    curPos = 0;
    List<InvCell> cellsToUpdate = new ArrayList<InvCell>();

    InputStreamReader inputStreamReader = null;
    CsvReader csvReader = null;
    DecimalFormat decimalFormat = new DecimalFormat("0.00");

    // If Excel, convert to CSV for validation
    if (inFileFormat.equalsIgnoreCase("XLS")) {
        Workbook w;
        try {
            w = Workbook.getWorkbook(fileInputStream);
            delimiterCharacter = ',';
            XLStoCSV xlsToCsv = new XLStoCSV(delimiterCharacter);
            fileInputStream = xlsToCsv.convertXlsToCsv(w);
            fileInputStream.reset();
        } catch (BiffException e) {
            log.error(e.getMessage());
        } catch (IOException e) {
            log.error(e.getMessage());
        }
    }

    try {
        inputStreamReader = new InputStreamReader(fileInputStream);
        csvReader = new CsvReader(inputStreamReader, delimiterCharacter);

        srcLength = inLength;
        if (srcLength <= 0) {
            uploadReport.append("The input size was not greater than 0. Actual length reported: ");
            uploadReport.append(srcLength);
            uploadReport.append("\n");
            throw new FileFormatException(
                    "The input size was not greater than 0. Actual length reported: " + srcLength);
        }

        timer = new StopWatch();
        timer.start();
        csvReader.readHeaders();

        srcLength = inLength - csvReader.getHeaders().toString().length();
        log.debug("Header length: " + csvReader.getHeaders().toString().length());

        // Loop through all rows in file
        while (csvReader.readRecord()) {
            log.info("At record: " + recordCount);
            String biospecimenUID = csvReader.get("BIOSPECIMENUID");
            Biospecimen biospecimen = iLimsService.getBiospecimenByUid(biospecimenUID, study);
            if (biospecimen == null) {
                log.error(
                        "\n\n\n\n\n\n\n\n\n....We should NEVER have null biospecimens this should be  validated in prior step");
                break;
            }
            // Allocation details
            InvCell invCell;
            String siteName = null;
            String freezerName = null;
            String rackName = null;
            String boxName = null;
            String row = null;
            String column = null;

            if (csvReader.getIndex("SITE") > 0) {
                siteName = csvReader.get("SITE");
            }

            if (csvReader.getIndex("FREEZER") > 0) {
                freezerName = csvReader.get("FREEZER");
            }

            if (csvReader.getIndex("RACK") > 0) {
                rackName = csvReader.get("RACK");
            }

            if (csvReader.getIndex("BOX") > 0) {
                boxName = csvReader.get("BOX");
            }

            if (csvReader.getIndex("ROW") > 0) {
                row = csvReader.get("ROW");
            }

            if (csvReader.getIndex("COLUMN") > 0) {
                column = csvReader.get("COLUMN");
            }

            invCell = iInventoryService.getInvCellByLocationNames(siteName, freezerName, rackName, boxName, row,
                    column);

            if (invCell != null && invCell.getId() != null) {
                if (invCell.getBiospecimen() != null) {
                    log.error(
                            "This should NEVER happen as validation should ensure no cell will wipte another");
                    break;
                }
                invCell.setBiospecimen(biospecimen);
                cellsToUpdate.add(invCell);
                updateCount++;
            } else {
                log.error("This should NEVER happen as validation should ensure all cells valid");
                break;
            }

            recordCount++;
        }
    } catch (IOException ioe) {
        uploadReport.append("Unexpected I/O exception whilst reading the biospecimen data file\n");
        log.error("processMatrixBiospecimenFile IOException stacktrace:", ioe);
        throw new ArkSystemException("Unexpected I/O exception whilst reading the biospecimen data file");
    } catch (Exception ex) {
        uploadReport.append("Unexpected exception whilst reading the biospecimen data file\n");
        log.error("processMatrixBiospecimenFile Exception stacktrace:", ex);
        throw new ArkSystemException(
                "Unexpected exception occurred when trying to process biospecimen data file");
    } finally {
        // Clean up the IO objects
        timer.stop();
        uploadReport.append("\n");
        uploadReport.append("Total elapsed time: ");
        uploadReport.append(timer.getTime());
        uploadReport.append(" ms or ");
        uploadReport.append(decimalFormat.format(timer.getTime() / 1000.0));
        uploadReport.append(" s");
        uploadReport.append("\n");
        uploadReport.append("Total file size: ");
        uploadReport.append(inLength);
        uploadReport.append(" B or ");
        uploadReport.append(decimalFormat.format(inLength / 1024.0 / 1024.0));
        uploadReport.append(" MB");
        uploadReport.append("\n");

        if (timer != null)
            timer = null;

        if (csvReader != null) {
            try {
                csvReader.close();
            } catch (Exception ex) {
                log.error("Cleanup operation failed: csvRdr.close()", ex);
            }
        }
        if (inputStreamReader != null) {
            try {
                inputStreamReader.close();
            } catch (Exception ex) {
                log.error("Cleanup operation failed: isr.close()", ex);
            }
        }
        // Restore the state of variables
        srcLength = -1;
    }
    uploadReport.append("Processed ");
    uploadReport.append(recordCount);
    uploadReport.append(" records.");
    uploadReport.append("\n");
    uploadReport.append("Updated ");
    uploadReport.append(updateCount);
    uploadReport.append(" records.");
    uploadReport.append("\n");

    iLimsService.batchUpdateInvCells(cellsToUpdate);

    return uploadReport;
}

From source file:com.ksc.http.KSCHttpClient.java

/**
 * Reset the input stream of the request before a retry.
 *
 * @param request Request containing input stream to reset
 * @throws ResetException If Input Stream can't be reset which means the request can't be retried
 *//*from   w  ww. ja  va 2  s . co m*/
private void resetRequestInputStream(final Request<?> request) throws ResetException {
    InputStream requestInputStream = request.getContent();
    if (requestInputStream != null) {
        if (requestInputStream.markSupported()) {
            try {
                requestInputStream.reset();
            } catch (IOException ex) {
                throw new ResetException("Failed to reset the request input stream", ex);
            }
        }
    }
}