Example usage for java.io DataInputStream close

List of usage examples for java.io DataInputStream close

Introduction

In this page you can find the example usage for java.io DataInputStream close.

Prototype

public void close() throws IOException 

Source Link

Document

Closes this input stream and releases any system resources associated with the stream.

Usage

From source file:org.carbondata.lcm.status.SegmentStatusManager.java

/**
 * get valid segment for given table//from w  w w.  j  a v a 2s. co m
 * @return
 * @throws IOException
 */
public ValidSegmentsInfo getValidSegments() throws IOException {

    // @TODO: move reading LoadStatus file to separate class
    List<String> listOfValidSegments = new ArrayList<String>(10);
    List<String> listOfValidUpdatedSegments = new ArrayList<String>(10);
    CarbonTablePath carbonTablePath = CarbonStorePath.getCarbonTablePath(absoluteTableIdentifier.getStorePath(),
            absoluteTableIdentifier.getCarbonTableIdentifier());
    String dataPath = carbonTablePath.getTableStatusFilePath();

    DataInputStream dataInputStream = null;
    Gson gsonObjectToRead = new Gson();
    AtomicFileOperations fileOperation = new AtomicFileOperationsImpl(dataPath,
            FileFactory.getFileType(dataPath));
    LoadMetadataDetails[] loadFolderDetailsArray;
    try {
        if (FileFactory.isFileExist(dataPath, FileFactory.getFileType(dataPath))) {

            dataInputStream = fileOperation.openForRead();

            BufferedReader buffReader = new BufferedReader(new InputStreamReader(dataInputStream, "UTF-8"));

            loadFolderDetailsArray = gsonObjectToRead.fromJson(buffReader, LoadMetadataDetails[].class);
            //just directly iterate Array
            List<LoadMetadataDetails> loadFolderDetails = Arrays.asList(loadFolderDetailsArray);

            for (LoadMetadataDetails loadMetadataDetails : loadFolderDetails) {
                if (CarbonCommonConstants.STORE_LOADSTATUS_SUCCESS
                        .equalsIgnoreCase(loadMetadataDetails.getLoadStatus())
                        || CarbonCommonConstants.MARKED_FOR_UPDATE
                                .equalsIgnoreCase(loadMetadataDetails.getLoadStatus())
                        || CarbonCommonConstants.STORE_LOADSTATUS_PARTIAL_SUCCESS
                                .equalsIgnoreCase(loadMetadataDetails.getLoadStatus())) {
                    // check for merged loads.
                    if (null != loadMetadataDetails.getMergedLoadName()) {

                        if (!listOfValidSegments.contains(loadMetadataDetails.getMergedLoadName())) {
                            listOfValidSegments.add(loadMetadataDetails.getMergedLoadName());
                        }
                        // if merged load is updated then put it in updated list
                        if (CarbonCommonConstants.MARKED_FOR_UPDATE
                                .equalsIgnoreCase(loadMetadataDetails.getLoadStatus())) {
                            listOfValidUpdatedSegments.add(loadMetadataDetails.getMergedLoadName());
                        }
                        continue;
                    }

                    if (CarbonCommonConstants.MARKED_FOR_UPDATE
                            .equalsIgnoreCase(loadMetadataDetails.getLoadStatus())) {

                        listOfValidUpdatedSegments.add(loadMetadataDetails.getLoadName());
                    }
                    listOfValidSegments.add(loadMetadataDetails.getLoadName());

                }
            }
        } else {
            loadFolderDetailsArray = new LoadMetadataDetails[0];
        }
    } catch (IOException e) {
        LOG.error(e);
        throw e;
    } finally {
        try {

            if (null != dataInputStream) {
                dataInputStream.close();
            }
        } catch (Exception e) {
            LOG.error(e);
            throw e;
        }

    }
    return new ValidSegmentsInfo(listOfValidSegments, listOfValidUpdatedSegments);
}

From source file:net.timewalker.ffmq4.storage.data.impl.AbstractBlockBasedDataStore.java

/**
 * Run an integrity check on the store files and fix them as necessary
 * @throws DataStoreException if the files could not be fixed
 *//*from   ww  w  .j a v  a2  s  .  c  o m*/
protected void integrityCheck() throws DataStoreException {
    try {
        //========================
        // 1 - Check files sizes
        //========================
        // -- Allocation table
        long atFileSize = allocationTableRandomAccessFile.length();
        if (atFileSize < AT_HEADER_SIZE + AT_BLOCK_SIZE) /* Should have at least one entry */
            throw new DataStoreException(
                    "Allocation table is truncated : " + allocationTableFile.getAbsolutePath());

        // Read some header fields
        FileInputStream inFile = new FileInputStream(allocationTableFile);
        DataInputStream in = new DataInputStream(new BufferedInputStream(inFile, 16384));
        int blockCount = in.readInt();
        int blockSize = in.readInt();
        int firstBlock = in.readInt();
        // Fix AT size
        long expectedATFileSize = AT_HEADER_SIZE + AT_BLOCK_SIZE * (long) blockCount;
        if (atFileSize != expectedATFileSize) {
            log.error("[" + descriptor.getName() + "] Allocation table has an invalid size (actual:"
                    + atFileSize + ",expected:" + expectedATFileSize + "), fixing.");
            allocationTableRandomAccessFile.setLength(expectedATFileSize);
        }
        // Fix data size
        long dataFileSize = dataRandomAccessFile.length();
        long expectedDataFileSize = (long) blockSize * blockCount;
        if (dataFileSize != expectedDataFileSize) {
            log.error("[" + descriptor.getName() + "] Data file has an invalid size (actual:" + dataFileSize
                    + ",expected:" + expectedDataFileSize + "), fixing.");
            dataRandomAccessFile.setLength(expectedDataFileSize);
        }

        //============================
        // 2 - Check allocation table
        //============================
        // Read the AT into memory
        byte[] flags = new byte[blockCount];
        int[] allocatedSize = new int[blockCount];
        int[] previousBlock = new int[blockCount];
        int[] nextBlock = new int[blockCount];
        int blocksInUse = 0;
        int msgCount = 0;
        for (int n = 0; n < blockCount; n++) {
            flags[n] = in.readByte();
            allocatedSize[n] = in.readInt();
            previousBlock[n] = in.readInt();
            nextBlock[n] = in.readInt();
            if (allocatedSize[n] != -1) {
                blocksInUse++;
                if ((flags[n] & FLAG_START_BLOCK) > 0)
                    msgCount++;
            }
        }
        in.close();
        log.debug("[" + descriptor.getName() + "] Blocks in use before fix : " + blocksInUse);
        log.debug("[" + descriptor.getName() + "] Messages count before fix : " + msgCount);

        // Fix first block index
        boolean changed = false;
        if (firstBlock < -1 || firstBlock >= blockCount) {
            log.error("[" + descriptor.getName() + "] Invalid allocation table first block index (" + firstBlock
                    + "), guessing new one ...");
            firstBlock = guessFirstBlockIndex(blockCount, allocatedSize, nextBlock);
            log.debug("[" + descriptor.getName() + "] Guessed first block index : " + firstBlock);
            changed = true;
        }

        // Recover table
        if (msgCount == 0) {
            if (firstBlock == -1) {
                // Table is empty, cleanup dirty entries
                changed = changed
                        || cleanupEmptyBlocks(blockCount, flags, allocatedSize, previousBlock, nextBlock);
            } else {
                log.error("[" + descriptor.getName() + "] First block index should be -1, clearing ...");
                firstBlock = -1;
                changed = true;
            }
        } else {
            if (firstBlock == -1) {
                log.error("[" + descriptor.getName() + "] Invalid first block index, guessing value ...");
                firstBlock = guessFirstBlockIndex(blockCount, allocatedSize, nextBlock);
                log.debug("[" + descriptor.getName() + "] Guessed first block index : " + firstBlock);
                changed = true;
            }

            changed = changed || fixBlocks(blockCount, blockSize, firstBlock, flags, allocatedSize,
                    previousBlock, nextBlock);
            changed = changed || cleanupEmptyBlocks(blockCount, flags, allocatedSize, previousBlock, nextBlock);
        }

        // Update the allocation file table
        if (changed) {
            // Re-compute size
            msgCount = 0;
            blocksInUse = 0;
            for (int n = 0; n < blockCount; n++) {
                if (allocatedSize[n] != -1) {
                    blocksInUse++;
                    if ((flags[n] & FLAG_START_BLOCK) > 0)
                        msgCount++;
                }
            }
            log.debug("[" + descriptor.getName() + "] Blocks in use after fix : " + blocksInUse);
            log.debug("[" + descriptor.getName() + "] Messages count after fix : " + msgCount);

            log.debug("[" + descriptor.getName() + "] Allocation table was altered, saving ...");
            allocationTableRandomAccessFile.seek(AT_HEADER_FIRSTBLOCK_OFFSET);
            allocationTableRandomAccessFile.writeInt(firstBlock);
            for (int n = 0; n < blockCount; n++) {
                byte[] allocationBlock = new byte[AT_BLOCK_SIZE];

                // Regroup I/O to improve performance
                allocationBlock[AB_FLAGS_OFFSET] = flags[n];
                allocationBlock[AB_ALLOCSIZE_OFFSET] = (byte) ((allocatedSize[n] >>> 24) & 0xFF);
                allocationBlock[AB_ALLOCSIZE_OFFSET + 1] = (byte) ((allocatedSize[n] >>> 16) & 0xFF);
                allocationBlock[AB_ALLOCSIZE_OFFSET + 2] = (byte) ((allocatedSize[n] >>> 8) & 0xFF);
                allocationBlock[AB_ALLOCSIZE_OFFSET + 3] = (byte) ((allocatedSize[n] >>> 0) & 0xFF);
                allocationBlock[AB_PREVBLOCK_OFFSET] = (byte) ((previousBlock[n] >>> 24) & 0xFF);
                allocationBlock[AB_PREVBLOCK_OFFSET + 1] = (byte) ((previousBlock[n] >>> 16) & 0xFF);
                allocationBlock[AB_PREVBLOCK_OFFSET + 2] = (byte) ((previousBlock[n] >>> 8) & 0xFF);
                allocationBlock[AB_PREVBLOCK_OFFSET + 3] = (byte) ((previousBlock[n] >>> 0) & 0xFF);
                allocationBlock[AB_NEXTBLOCK_OFFSET] = (byte) ((nextBlock[n] >>> 24) & 0xFF);
                allocationBlock[AB_NEXTBLOCK_OFFSET + 1] = (byte) ((nextBlock[n] >>> 16) & 0xFF);
                allocationBlock[AB_NEXTBLOCK_OFFSET + 2] = (byte) ((nextBlock[n] >>> 8) & 0xFF);
                allocationBlock[AB_NEXTBLOCK_OFFSET + 3] = (byte) ((nextBlock[n] >>> 0) & 0xFF);

                allocationTableRandomAccessFile.seek(AT_HEADER_SIZE + n * AT_BLOCK_SIZE);
                allocationTableRandomAccessFile.write(allocationBlock);
            }
            allocationTableRandomAccessFile.getFD().sync();
        } else
            log.debug("[" + descriptor.getName() + "] Allocation table was not altered");
    } catch (IOException e) {
        throw new DataStoreException("Cannot check/fix store integrity : " + e);
    }
}

From source file:eu.cloud4soa.soa.jaxrs.test.Initializer.java

public void deployApplication() throws FileNotFoundException {
    final String RS_URI = BASE_URI + "ApplicationDeploymentRS/deployApplication";

    WebClient client = WebClient.create(RS_URI);
    client.type("multipart/mixed").accept(MediaType.TEXT_PLAIN);

    URL fileURL = this.getClass().getClassLoader().getResource("SimpleWar.war");
    if (fileURL == null) {
        throw new FileNotFoundException("SimpleWar.war");
    }/*from w ww . j  av a2 s  .  com*/

    ByteArrayOutputStream bas = new ByteArrayOutputStream();

    File file = new File(fileURL.getPath());
    file.length();
    FileInputStream fis = new FileInputStream(file);
    BufferedInputStream bis = new BufferedInputStream(fis);
    DataInputStream dis = new DataInputStream(bis);

    //Calculate digest from InputStream
    //        InputStream tempIs = new FileInputStream(file);
    String tempFileDigest = null;
    try {
        FileInputStream tempFis = new FileInputStream(file);
        tempFileDigest = DigestUtils.sha256Hex(tempFis);
    } catch (IOException ex) {
        logger.error(ex.getMessage());
    }

    JSONProvider jsonProvider = new JSONProvider();
    jsonProvider.setSupportUnwrapped(false);
    jsonProvider.setSerializeAsArray(true);
    ProviderFactory.getSharedInstance().registerUserProvider(jsonProvider);

    // POST the request
    //        Response response = applicationDeploymentRS.deployApplication(dis, applicationInstanceJsonObj, paaSInstanceJsonObj);
    List<Attachment> atts = new LinkedList<Attachment>();
    atts.add(new Attachment("applicationInstanceUriId", "text/plain", applicationInstanceUriId));
    atts.add(new Attachment("paaSInstanceUriId", "text/plain", paaSInstanceUriIds.get(selectedPaaS)));
    atts.add(new Attachment("publicKey", "text/plain", publicKey));
    atts.add(new Attachment("secretKey", "text/plain", secretKey));
    atts.add(new Attachment("applicationArchive", "application/octet-stream", dis));

    Response response = client.post(new MultipartBody(atts));
    if (Response.Status.fromStatusCode(response.getStatus()) == Response.Status.ACCEPTED) {
        try {
            System.out.println(
                    "Response Status : " + IOUtils.readStringFromStream((InputStream) response.getEntity()));
        } catch (IOException ex) {
            logger.error(ex.getMessage());
        }
    }

    try {
        fis.close();
        bis.close();
        dis.close();
    } catch (IOException ex) {
        logger.error(ex.getMessage());
    }
}

From source file:org.apache.giraph.graph.BspServiceMaster.java

/**
 * Read the finalized checkpoint file and associated metadata files for the
 * checkpoint.  Modifies the {@link PartitionOwner} objects to get the
 * checkpoint prefixes.  It is an optimization to prevent all workers from
 * searching all the files.  Also read in the aggregator data from the
 * finalized checkpoint file and setting it.
 *
 * @param superstep Checkpoint set to examine.
 * @param partitionOwners Partition owners to modify with checkpoint
 *        prefixes/*ww  w . j av a2 s .  c  o m*/
 * @throws IOException
 * @throws InterruptedException
 * @throws KeeperException
 */
private void prepareCheckpointRestart(long superstep, Collection<PartitionOwner> partitionOwners)
        throws IOException, KeeperException, InterruptedException {
    FileSystem fs = getFs();
    List<Path> validMetadataPathList = new ArrayList<Path>();
    String finalizedCheckpointPath = getCheckpointBasePath(superstep) + CHECKPOINT_FINALIZED_POSTFIX;
    DataInputStream finalizedStream = fs.open(new Path(finalizedCheckpointPath));
    int prefixFileCount = finalizedStream.readInt();
    for (int i = 0; i < prefixFileCount; ++i) {
        String metadataFilePath = finalizedStream.readUTF() + CHECKPOINT_METADATA_POSTFIX;
        validMetadataPathList.add(new Path(metadataFilePath));
    }

    // Set the merged aggregator data if it exists.
    int aggregatorDataSize = finalizedStream.readInt();
    if (aggregatorDataSize > 0) {
        byte[] aggregatorZkData = new byte[aggregatorDataSize];
        int actualDataRead = finalizedStream.read(aggregatorZkData, 0, aggregatorDataSize);
        if (actualDataRead != aggregatorDataSize) {
            throw new RuntimeException("prepareCheckpointRestart: Only read " + actualDataRead + " of "
                    + aggregatorDataSize + " aggregator bytes from " + finalizedCheckpointPath);
        }
        String mergedAggregatorPath = getMergedAggregatorPath(getApplicationAttempt(), superstep - 1);
        if (LOG.isInfoEnabled()) {
            LOG.info("prepareCheckpointRestart: Reloading merged " + "aggregator " + "data '"
                    + Arrays.toString(aggregatorZkData) + "' to previous checkpoint in path "
                    + mergedAggregatorPath);
        }
        if (getZkExt().exists(mergedAggregatorPath, false) == null) {
            getZkExt().createExt(mergedAggregatorPath, aggregatorZkData, Ids.OPEN_ACL_UNSAFE,
                    CreateMode.PERSISTENT, true);
        } else {
            getZkExt().setData(mergedAggregatorPath, aggregatorZkData, -1);
        }
    }
    masterCompute.readFields(finalizedStream);
    finalizedStream.close();

    Map<Integer, PartitionOwner> idOwnerMap = new HashMap<Integer, PartitionOwner>();
    for (PartitionOwner partitionOwner : partitionOwners) {
        if (idOwnerMap.put(partitionOwner.getPartitionId(), partitionOwner) != null) {
            throw new IllegalStateException("prepareCheckpointRestart: Duplicate partition " + partitionOwner);
        }
    }
    // Reading the metadata files.  Simply assign each partition owner
    // the correct file prefix based on the partition id.
    for (Path metadataPath : validMetadataPathList) {
        String checkpointFilePrefix = metadataPath.toString();
        checkpointFilePrefix = checkpointFilePrefix.substring(0,
                checkpointFilePrefix.length() - CHECKPOINT_METADATA_POSTFIX.length());
        DataInputStream metadataStream = fs.open(metadataPath);
        long partitions = metadataStream.readInt();
        for (long i = 0; i < partitions; ++i) {
            long dataPos = metadataStream.readLong();
            int partitionId = metadataStream.readInt();
            PartitionOwner partitionOwner = idOwnerMap.get(partitionId);
            if (LOG.isInfoEnabled()) {
                LOG.info("prepareSuperstepRestart: File " + metadataPath + " with position " + dataPos
                        + ", partition id = " + partitionId + " assigned to " + partitionOwner);
            }
            partitionOwner.setCheckpointFilesPrefix(checkpointFilePrefix);
        }
        metadataStream.close();
    }
}

From source file:com.android.launcher2.Launcher.java

private static void readConfiguration(Context context, LocaleConfiguration configuration) {
    DataInputStream in = null;
    try {//from  w  w w .ja v  a  2s .  c o  m
        in = new DataInputStream(context.openFileInput(PREFERENCES));
        configuration.locale = in.readUTF();
        configuration.mcc = in.readInt();
        configuration.mnc = in.readInt();
    } catch (FileNotFoundException e) {
        // Ignore
    } catch (IOException e) {
        // Ignore
    } finally {
        if (in != null) {
            try {
                in.close();
            } catch (IOException e) {
                // Ignore
            }
        }
    }
}

From source file:com.salesmanager.core.module.impl.integration.payment.PaypalTransactionImpl.java

public Map httpcall(IntegrationProperties keys, CoreModuleService cms, String methodName, String nvpStr)
        throws Exception {

    // return null;

    boolean bSandbox = false;
    if (keys.getProperties5().equals("2")) {// sandbox
        bSandbox = true;/*  w  w  w  .j  a va  2 s  .  co m*/
    }

    String gv_APIEndpoint = "";
    String PAYPAL_URL = "";
    String gv_Version = "3.3";

    if (bSandbox == true) {
        gv_APIEndpoint = "https://api-3t.sandbox.paypal.com/nvp";
        PAYPAL_URL = new StringBuffer().append(cms.getCoreModuleServiceDevProtocol()).append("://")
                .append(cms.getCoreModuleServiceDevDomain()).append(cms.getCoreModuleServiceDevEnv())
                .append("?cmd=_express-checkout&token=").toString();
        // PAYPAL_URL =
        // "https://www.sandbox.paypal.com/webscr?cmd=_express-checkout&token=";
    } else {
        gv_APIEndpoint = "https://api-3t.paypal.com/nvp";
        PAYPAL_URL = new StringBuffer().append(cms.getCoreModuleServiceProdProtocol()).append("://")
                .append(cms.getCoreModuleServiceProdDomain()).append(cms.getCoreModuleServiceProdEnv())
                .append("?cmd=_express-checkout&token=").toString();
        // PAYPAL_URL =
        // "https://www.paypal.com/cgi-bin/webscr?cmd=_express-checkout&token=";
    }

    String agent = "Mozilla/4.0";
    String respText = "";
    Map nvp = null;

    // deformatNVP( nvpStr );
    String encodedData = "METHOD=" + methodName + "&VERSION=" + gv_Version + "&PWD=" + keys.getProperties2()
            + "&USER=" + keys.getProperties1() + "&SIGNATURE=" + keys.getProperties3() + nvpStr
            + "&BUTTONSOURCE=" + "PP-ECWizard";
    log.debug("REQUEST SENT TO PAYPAL -> " + encodedData);

    HttpURLConnection conn = null;
    DataOutputStream output = null;
    DataInputStream in = null;
    BufferedReader is = null;
    try {
        URL postURL = new URL(gv_APIEndpoint);
        conn = (HttpURLConnection) postURL.openConnection();

        // Set connection parameters. We need to perform input and output,
        // so set both as true.
        conn.setDoInput(true);
        conn.setDoOutput(true);

        // Set the content type we are POSTing. We impersonate it as
        // encoded form data
        conn.setRequestProperty("Content-Type", "application/x-www-form-urlencoded");
        conn.setRequestProperty("User-Agent", agent);

        // conn.setRequestProperty( "Content-Type", type );
        conn.setRequestProperty("Content-Length", String.valueOf(encodedData.length()));
        conn.setRequestMethod("POST");

        // get the output stream to POST to.
        output = new DataOutputStream(conn.getOutputStream());
        output.writeBytes(encodedData);
        output.flush();
        // output.close ();

        // Read input from the input stream.
        in = new DataInputStream(conn.getInputStream());
        int rc = conn.getResponseCode();
        if (rc != -1) {
            is = new BufferedReader(new InputStreamReader(conn.getInputStream()));
            String _line = null;
            while (((_line = is.readLine()) != null)) {
                log.debug("Response line from Paypal -> " + _line);
                respText = respText + _line;
            }
            nvp = StringUtil.deformatUrlResponse(respText);
        } else {
            throw new Exception("Invalid response from paypal, return code is " + rc);
        }

        nvp.put("PAYPAL_URL", PAYPAL_URL);
        nvp.put("NVP_URL", gv_APIEndpoint);

        return nvp;

    } finally {
        if (is != null) {
            try {
                is.close();
            } catch (Exception ignore) {
                // TODO: handle exception
            }
        }

        if (in != null) {
            try {
                in.close();
            } catch (Exception ignore) {
                // TODO: handle exception
            }
        }

        if (output != null) {
            try {
                output.close();
            } catch (Exception ignore) {
                // TODO: handle exception
            }
        }

        if (conn != null) {
            try {
                conn.disconnect();
            } catch (Exception ignore) {
                // TODO: handle exception
            }
        }
    }
}

From source file:edu.kit.dama.dataworkflow.util.DataWorkflowHelper.java

/**
 * Helper method to perform the actual substitution.
 *
 * @param pTask The task whose working directory should be checked for
 * substitution.//from  w w  w.  j  a  v  a  2  s .co m
 * @param pTargetPath The target path.
 *
 * @throws IOException If the replacement operation fails for some reason.
 * @throws URISyntaxException If any of the URLs in the task (input, output,
 * temp or working dir URL) is invalid.
 */
private static void performSubstitution(DataWorkflowTask pTask, File pDirectory)
        throws IOException, URISyntaxException {
    File[] relevantFileList = pDirectory.listFiles(VAR_FILTER);
    LOGGER.info("Substituting variables in " + relevantFileList.length
            + ((relevantFileList.length == 1) ? " file" : " files"));

    for (File f : relevantFileList) {
        if (f.length() > 10 * FileUtils.ONE_MB) {
            LOGGER.warn(
                    "File {} has a size of {} bytes. Variable substitution is only supported for files with less than 10MB. File is skipped.",
                    f, f.length());
            continue;
        }
        //perform replacement
        LOGGER.info(" * Substituting variables in file '" + f.getPath() + "'");
        DataInputStream din = null;
        FileOutputStream fout = null;
        try {
            LOGGER.info("   - Reading input file");
            byte[] data = new byte[(int) f.length()];
            din = new DataInputStream(new FileInputStream(f));
            din.readFully(data);

            LOGGER.info("   - Substituting variables");
            String dataString = new String(data);

            String accessPointId = pTask.getExecutionEnvironment().getStagingAccessPointId();
            AbstractStagingAccessPoint accessPoint = StagingConfigurationManager.getSingleton()
                    .getAccessPointById(accessPointId);
            LOGGER.debug("  - Obtaining local path for input dir URL {}", pTask.getInputDirectoryUrl());
            File localPath = accessPoint.getLocalPathForUrl(new URL(pTask.getInputDirectoryUrl()),
                    getTaskContext(pTask));
            LOGGER.debug("  - Local path is: {}", localPath);
            String inputDirReplacement = localPath.getCanonicalPath();

            LOGGER.debug("  - Obtaining local path for output dir URL {}", pTask.getOutputDirectoryUrl());
            localPath = accessPoint.getLocalPathForUrl(new URL(pTask.getOutputDirectoryUrl()),
                    getTaskContext(pTask));
            String outputDirReplacement = localPath.getCanonicalPath();

            LOGGER.debug("  - Obtaining local path for working dir URL {}", pTask.getWorkingDirectoryUrl());
            localPath = accessPoint.getLocalPathForUrl(new URL(pTask.getWorkingDirectoryUrl()),
                    getTaskContext(pTask));
            String workingDirReplacement = localPath.getCanonicalPath();

            LOGGER.debug("  - Obtaining local path for temp dir URL {}", pTask.getTempDirectoryUrl());
            localPath = accessPoint.getLocalPathForUrl(new URL(pTask.getTempDirectoryUrl()),
                    getTaskContext(pTask));
            String tempDirReplacement = localPath.getCanonicalPath();

            LOGGER.info("     " + DATA_IN_DIR + ": " + inputDirReplacement);
            LOGGER.info("     " + DATA_OUT_DIR + ": " + outputDirReplacement);
            LOGGER.info("     " + TEMP_DIR + ": " + tempDirReplacement);
            LOGGER.info("     " + WORKING_DIR + ": " + workingDirReplacement);
            //replace all variables
            //To obtain a proper path format the input paths are put into a file object and the URI path is used for replacement. Therefore differences between
            //source and destination platform are not relevant. Due to the URI.toPath() returns the path with leading slash, we use the path beginning with
            //the second index to avoid problems with other programming languages not able to deal with the leading slash.
            dataString = dataString.replaceAll(Pattern.quote(DATA_IN_DIR_VARIABLE), inputDirReplacement)
                    .replaceAll(Pattern.quote(DATA_OUT_DIR_VARIABLE), outputDirReplacement)
                    .replaceAll(Pattern.quote(TEMP_DIR_VARIABLE), tempDirReplacement)
                    .replaceAll(Pattern.quote(WORKING_DIR_VARIABLE), workingDirReplacement);
            LOGGER.info("   - Writing output file");
            fout = new FileOutputStream(f);
            fout.write(dataString.getBytes());
            fout.flush();
            LOGGER.info(" * Substituting operations finished successfully");
        } finally {
            try {
                if (din != null) {
                    din.close();
                }
            } catch (IOException ioe) {
            }
            try {
                if (fout != null) {
                    fout.close();
                }
            } catch (IOException ioe) {
            }
        }
    }
    LOGGER.info("Directory {} processed successfully", pDirectory);
}

From source file:de.huxhorn.lilith.swing.ApplicationPreferences.java

private void initIfNecessary(File file, String resourcePath, String historyBasePath, boolean overwriteAlways) {
    boolean delete = false;
    if (overwriteAlways) {
        delete = true;//ww  w  .  j a  v  a 2 s  . co m
    } else if (file.isFile()) {
        byte[] available = null;

        try {
            FileInputStream availableFile = new FileInputStream(file);
            available = getMD5(availableFile);
        } catch (FileNotFoundException e) {
            // ignore
        }

        byte[] current = getMD5(ApplicationPreferences.class.getResourceAsStream(resourcePath));
        if (Arrays.equals(available, current)) {
            // we are done already. The current version is the latest version.
            if (logger.isDebugEnabled())
                logger.debug("The current version of {} is also the latest version.", file.getAbsolutePath());
            return;
        }

        if (available != null) {
            // check older versions if available
            URL historyUrl = getClass().getResource(historyBasePath + "history.txt");
            if (historyUrl != null) {
                List<String> historyList = readLines(historyUrl);

                for (String currentLine : historyList) {
                    InputStream is = getClass().getResourceAsStream(historyBasePath + currentLine + ".md5");
                    if (is != null) {
                        DataInputStream dis = new DataInputStream(is);
                        byte[] checksum = new byte[16];
                        try {
                            dis.readFully(checksum);
                            if (Arrays.equals(available, checksum)) {
                                if (logger.isInfoEnabled())
                                    logger.info("Found old version of {}: {}", file.getAbsolutePath(),
                                            currentLine);
                                delete = true;
                                break;
                            }
                        } catch (IOException e) {
                            if (logger.isWarnEnabled())
                                logger.warn("Exception while reading checksum of {}!", currentLine, e);
                        } finally {
                            try {
                                dis.close();
                            } catch (IOException e) {
                                // ignore
                            }
                        }
                    }
                }
            }
        } else {
            // we couldn't calculate the checksum. Try to delete it...
            delete = true;
        }
    }

    URL resourceUrl = ApplicationPreferences.class.getResource(resourcePath);
    if (resourceUrl == null) {
        if (logger.isErrorEnabled())
            logger.error("Couldn't find resource {}!", resourcePath);
        return;
    }
    copy(resourceUrl, file, delete);
}

From source file:com.example.google.play.apkx.SampleDownloaderActivity.java

/**
 * Go through each of the Expansion APK files and open each as a zip file.
 * Calculate the CRC for each file and return false if any fail to match.
 *
 * @return true if XAPKZipFile is successful
 *//*  w w  w  .  j  av a 2 s.  c o m*/
void validateXAPKZipFiles() {
    AsyncTask<Object, DownloadProgressInfo, Boolean> validationTask = new AsyncTask<Object, DownloadProgressInfo, Boolean>() {

        @Override
        protected void onPreExecute() {
            mDashboard.setVisibility(View.VISIBLE);
            mCellMessage.setVisibility(View.GONE);
            mStatusText.setText(R.string.text_verifying_download);
            mPauseButton.setOnClickListener(new View.OnClickListener() {
                @Override
                public void onClick(View view) {
                    mCancelValidation = true;
                }
            });
            mPauseButton.setText(R.string.text_button_cancel_verify);
            super.onPreExecute();
        }

        @Override
        protected Boolean doInBackground(Object... params) {
            for (XAPKFile xf : xAPKS) {
                String fileName = Helpers.getExpansionAPKFileName(SampleDownloaderActivity.this, xf.mIsMain,
                        xf.mFileVersion);
                if (!Helpers.doesFileExist(SampleDownloaderActivity.this, fileName, xf.mFileSize, false))
                    return false;
                fileName = Helpers.generateSaveFileName(SampleDownloaderActivity.this, fileName);
                ZipResourceFile zrf;
                byte[] buf = new byte[1024 * 256];
                try {
                    zrf = new ZipResourceFile(fileName);
                    ZipEntryRO[] entries = zrf.getAllEntries();
                    /**
                     * First calculate the total compressed length
                     */
                    long totalCompressedLength = 0;
                    for (ZipEntryRO entry : entries) {
                        totalCompressedLength += entry.mCompressedLength;
                    }
                    float averageVerifySpeed = 0;
                    long totalBytesRemaining = totalCompressedLength;
                    long timeRemaining;
                    /**
                     * Then calculate a CRC for every file in the
                     * Zip file, comparing it to what is stored in
                     * the Zip directory. Note that for compressed
                     * Zip files we must extract the contents to do
                     * this comparison.
                     */
                    for (ZipEntryRO entry : entries) {
                        if (-1 != entry.mCRC32) {
                            long length = entry.mUncompressedLength;
                            CRC32 crc = new CRC32();
                            DataInputStream dis = null;
                            try {
                                dis = new DataInputStream(zrf.getInputStream(entry.mFileName));

                                long startTime = SystemClock.uptimeMillis();
                                while (length > 0) {
                                    int seek = (int) (length > buf.length ? buf.length : length);
                                    dis.readFully(buf, 0, seek);
                                    crc.update(buf, 0, seek);
                                    length -= seek;
                                    long currentTime = SystemClock.uptimeMillis();
                                    long timePassed = currentTime - startTime;
                                    if (timePassed > 0) {
                                        float currentSpeedSample = (float) seek / (float) timePassed;
                                        if (0 != averageVerifySpeed) {
                                            averageVerifySpeed = SMOOTHING_FACTOR * currentSpeedSample
                                                    + (1 - SMOOTHING_FACTOR) * averageVerifySpeed;
                                        } else {
                                            averageVerifySpeed = currentSpeedSample;
                                        }
                                        totalBytesRemaining -= seek;
                                        timeRemaining = (long) (totalBytesRemaining / averageVerifySpeed);
                                        this.publishProgress(new DownloadProgressInfo(totalCompressedLength,
                                                totalCompressedLength - totalBytesRemaining, timeRemaining,
                                                averageVerifySpeed));
                                    }
                                    startTime = currentTime;
                                    if (mCancelValidation)
                                        return true;
                                }
                                if (crc.getValue() != entry.mCRC32) {
                                    Log.e(Constants.TAG, "CRC does not match for entry: " + entry.mFileName);
                                    Log.e(Constants.TAG, "In file: " + entry.getZipFileName());
                                    return false;
                                }
                            } finally {
                                if (null != dis) {
                                    dis.close();
                                }
                            }
                        }
                    }
                } catch (IOException e) {
                    e.printStackTrace();
                    return false;
                }
            }
            return true;
        }

        @Override
        protected void onProgressUpdate(DownloadProgressInfo... values) {
            onDownloadProgress(values[0]);
            super.onProgressUpdate(values);
        }

        @Override
        protected void onPostExecute(Boolean result) {
            if (result) {
                mDashboard.setVisibility(View.VISIBLE);
                mCellMessage.setVisibility(View.GONE);
                mStatusText.setText(R.string.text_validation_complete);
                mPauseButton.setOnClickListener(new View.OnClickListener() {
                    @Override
                    public void onClick(View view) {
                        startMovie();
                    }
                });
                mPauseButton.setText(android.R.string.ok);
            } else {
                mDashboard.setVisibility(View.VISIBLE);
                mCellMessage.setVisibility(View.GONE);
                mStatusText.setText(R.string.text_validation_failed);
                mPauseButton.setOnClickListener(new View.OnClickListener() {
                    @Override
                    public void onClick(View view) {
                        finish();
                    }
                });
                mPauseButton.setText(android.R.string.cancel);
            }
            super.onPostExecute(result);
        }

    };
    validationTask.execute(new Object());
}