Example usage for org.apache.commons.io FileUtils copyURLToFile

List of usage examples for org.apache.commons.io FileUtils copyURLToFile

Introduction

In this page you can find the example usage for org.apache.commons.io FileUtils copyURLToFile.

Prototype

public static void copyURLToFile(URL source, File destination) throws IOException 

Source Link

Document

Copies bytes from the URL source to a file destination.

Usage

From source file:org.dataconservancy.dcs.util.droid.DroidSignatureFileManager.java

/**
 * Checks if the url of the of the file exists and then downloads it to the provided file if it does.
 * @param fileUrl The url of the file to retrieve.
 * @param signatureFile The file object that should be used to store the contents of the file at the url.
 * @return True if the url was found and the contents were able to be stored in the file, false otherwise.
 *//*from w  ww.  j  a va  2s .  com*/
private boolean downloadLatestFile(URL fileUrl, File signatureFile) {
    boolean fileRetrieved = true;
    log.info("Attempting to download droid file: " + fileUrl);
    String contentType = "";
    try {
        HttpURLConnection connection = (HttpURLConnection) fileUrl.openConnection();
        connection.setInstanceFollowRedirects(false);
        connection.setRequestMethod("GET");
        connection.connect();
        contentType = connection.getHeaderField("Content-Type");
    } catch (IOException e) {
        fileRetrieved = false;
        log.error("Error connection to file url: " + fileUrl + " Exception: " + e.getMessage());
    }

    if (fileRetrieved) {
        //National Archives website returns 200 even if the url doesn't exist, so check to make sure the content type is xml and not html
        if (contentType.equalsIgnoreCase("text/xml")) {
            try {
                FileUtils.copyURLToFile(fileUrl, signatureFile);
            } catch (IOException e) {
                fileRetrieved = false;
                log.error("Error connection to file url: " + fileUrl + " Exception: " + e.getMessage());
            }
            log.info("Successfully downloaded droid file: " + fileUrl);
        } else {
            fileRetrieved = false;
        }
    }
    return fileRetrieved;
}

From source file:org.datahack.data.uploader.UploadTables.java

public static void main(String[] args) {

    EntityManager em = ParkingDBConnector.getEntityManager();

    try {// w  w w . j a v  a2s.  c o  m
        File output = File.createTempFile("WCC_ParkingStreets", "csv");
        output.deleteOnExit();
        String csvName = "WCC_ParkingStreets.csv";
        URL resource = UploadTables.class.getResource("/" + csvName);
        FileUtils.copyURLToFile(resource, output);
        CSVReader reader = new CSVReader(new FileReader(output.getPath()));

        //Headings
        String[] str = reader.readNext();
        em.getTransaction().begin();
        int row = 0;
        while (str != null) {
            row++;
            str = reader.readNext();
            if (str != null) {

                if (str.length == 4) {

                    ParkingStreet pS = new ParkingStreet();

                    try {

                        Integer id = Integer.parseInt(str[0]);
                        Integer usrn = Integer.parseInt(str[1]);
                        String streetName = str[2];
                        Integer parkingZoneKey = Integer.parseInt(str[3]);
                        pS.setId(id);
                        pS.setStreetName(streetName);
                        pS.setuSRN(usrn);

                        ParkingZone pZ = em.find(ParkingZone.class, parkingZoneKey);

                        if (pZ == null) {
                            pZ = new ParkingZone();
                            pZ.setId(parkingZoneKey);

                        }

                        pS.setParkingZone(pZ);
                        em.merge(pS);

                    } catch (NumberFormatException e) {
                        System.out.println("Failed to merge row " + row);
                    }

                    if (row % 1000 == 0) {
                        em.getTransaction().commit();
                        em.clear();
                        em.getTransaction().begin();
                    }

                }

            }

        }

        em.getTransaction().commit();

    } catch (FileNotFoundException ex) {
        Logger.getLogger(UploadTables.class.getName()).log(Level.SEVERE, null, ex);
    } catch (IOException ex) {
        Logger.getLogger(UploadTables.class.getName()).log(Level.SEVERE, null, ex);
    }

    //Uplad Bay data
    String csvName = "bay_table.csv";

    try {
        File output = File.createTempFile("bay_table", "csv");
        output.deleteOnExit();
        URL resource = UploadTables.class.getResource("/" + csvName);
        FileUtils.copyURLToFile(resource, output);
        CSVReader reader = new CSVReader(new FileReader(output.getPath()));

        //Headings
        String[] str = reader.readNext();
        em.getTransaction().begin();
        int row = 0;
        while (str != null) {
            row++;
            str = reader.readNext();
            if (str != null) {

                if (str.length == 5) {

                    Bay b = new Bay();
                    try {

                        Integer id = Integer.parseInt(str[0]);
                        Double lat = Double.parseDouble(str[1]);
                        Double lon = Double.parseDouble(str[2]);
                        Integer spaces = Integer.parseInt(str[3]);
                        Integer parkingStreetKey = Integer.parseInt(str[4]);

                        b.setId(id);
                        b.setLatitude(lat);
                        b.setLongitude(lon);
                        b.setTotalSpaces(spaces);

                        ParkingStreet find = em.find(ParkingStreet.class, parkingStreetKey);

                        if (find != null) {
                            ParkingZone pZ = find.getParkingZone();

                            if (pZ != null) {
                                Set<Bay> bays = pZ.getBays();

                                if (bays == null) {
                                    bays = new LinkedHashSet();
                                    pZ.setBays(bays);
                                }

                                bays.add(b);
                                b.setParkingZone(pZ);

                                //Calculate parking zone location as mean of lats and lons
                                Iterator<Bay> iterator = bays.iterator();
                                Integer numBays = bays.size();

                                Double latitude = 0.0;
                                Double longitude = 0.0;

                                while (iterator.hasNext()) {
                                    Bay next = iterator.next();
                                    latitude += next.getLatitude() / numBays;
                                    longitude += next.getLongitude() / numBays;

                                }

                                pZ.setLatitude(latitude);
                                pZ.setLongitude(longitude);
                            }

                            em.merge(pZ);

                            System.out.println("Bay added to zone");
                        }

                        em.merge(b);

                    } catch (NumberFormatException e) {
                        System.out.println("Failed to merge row " + row);
                    }

                    if (row % 1000 == 0) {
                        em.getTransaction().commit();
                        em.clear();
                        em.getTransaction().begin();
                    }

                }

            }

        }

        em.getTransaction().commit();

    } catch (FileNotFoundException ex) {
        Logger.getLogger(UploadTables.class.getName()).log(Level.SEVERE, null, ex);
    } catch (IOException ex) {
        Logger.getLogger(UploadTables.class.getName()).log(Level.SEVERE, null, ex);
    }

    try {
        File output = File.createTempFile("bay_event_table", "csv");
        output.deleteOnExit();
        csvName = "bay_event_table.csv";
        URL resource = UploadTables.class.getResource("/" + csvName);
        FileUtils.copyURLToFile(resource, output);
        CSVReader reader = new CSVReader(new FileReader(output.getPath()));

        //Headings
        String[] str = reader.readNext();
        em.getTransaction().begin();
        int row = 0;
        while (str != null) {
            row++;
            str = reader.readNext();
            if (str != null) {

                if (str.length == 4) {

                    BayEvent bE = new BayEvent();

                    try {

                        Integer id = Integer.parseInt(str[0]);
                        Integer bayId = Integer.parseInt(str[1]);
                        String eventTimeString = str[2];
                        Double estimatedSpaces = Double.parseDouble(str[3]);

                        bE.setId(id);

                        Bay find = em.find(Bay.class, bayId);
                        bE.setBay(find);

                        DateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.ENGLISH);
                        java.util.Date d;

                        d = df.parse(eventTimeString);

                        bE.setEventTime(d);

                        bE.setEstimatedSpaces(estimatedSpaces);

                        em.merge(bE);

                    } catch (NumberFormatException e) {
                        System.out.println("Failed to merge row " + row);
                    } catch (ParseException ex) {
                        Logger.getLogger(UploadTables.class.getName()).log(Level.SEVERE, null, ex);
                    }

                    if (row % 1000 == 0) {
                        em.getTransaction().commit();
                        em.clear();
                        em.getTransaction().begin();
                    }

                }

            }

        }

        em.getTransaction().commit();

    } catch (FileNotFoundException ex) {
        Logger.getLogger(UploadTables.class.getName()).log(Level.SEVERE, null, ex);
    } catch (IOException ex) {
        Logger.getLogger(UploadTables.class.getName()).log(Level.SEVERE, null, ex);
    }

}

From source file:org.datahack.forecast.BayExample.java

public static void main(String[] args) {
    try {/*from w ww.  j  a va2  s  .  c om*/
        // path to the Australian wine data included with the time series forecasting
        // package

        File output = File.createTempFile("tempWineData", "arff");
        output.deleteOnExit();
        String dataFileName = "sample-data/wine.arff";
        URL resource = BayExample.class.getResource("/" + dataFileName);
        FileUtils.copyURLToFile(resource, output);

        String pathToWineData = output.getPath();

        // load the wine data
        Instances wine = new Instances(new BufferedReader(new FileReader(pathToWineData)));

        InstanceQuery q = new InstanceQuery();

        // new forecaster
        WekaForecaster forecaster = new WekaForecaster();

        // set the targets we want to forecast. This method calls
        // setFieldsToLag() on the lag maker object for us
        forecaster.setFieldsToForecast("Fortified,Dry-white");

        // default underlying classifier is SMOreg (SVM) - we'll use
        // gaussian processes for regression instead
        forecaster.setBaseForecaster(new GaussianProcesses());

        forecaster.getTSLagMaker().setTimeStampField("Date"); // date time stamp
        forecaster.getTSLagMaker().setMinLag(1);
        forecaster.getTSLagMaker().setMaxLag(12); // monthly data

        // add a month of the year indicator field
        forecaster.getTSLagMaker().setAddMonthOfYear(true);

        // add a quarter of the year indicator field
        forecaster.getTSLagMaker().setAddQuarterOfYear(true);

        // build the model
        forecaster.buildForecaster(wine, System.out);

        // prime the forecaster with enough recent historical data
        // to cover up to the maximum lag. In our case, we could just supply
        // the 12 most recent historical instances, as this covers our maximum
        // lag period
        forecaster.primeForecaster(wine);

        // forecast for 12 units (months) beyond the end of the
        // training data
        List<List<NumericPrediction>> forecast = forecaster.forecast(12, System.out);

        // output the predictions. Outer list is over the steps; inner list is over
        // the targets
        for (int i = 0; i < 12; i++) {
            List<NumericPrediction> predsAtStep = forecast.get(i);
            for (int j = 0; j < 2; j++) {
                NumericPrediction predForTarget = predsAtStep.get(j);
                System.out.print("" + predForTarget.predicted() + " ");
            }
            System.out.println();
        }

        // we can continue to use the trained forecaster for further forecasting
        // by priming with the most recent historical data (as it becomes available).
        // At some stage it becomes prudent to re-build the model using current
        // historical data.

    } catch (Exception ex) {
        ex.printStackTrace();
    }
}

From source file:org.datahack.forecast.TimeSeriesExample.java

public static void main(String[] args) {
    try {//from ww  w.  ja  v  a2s  .c o m
        // path to the Australian wine data included with the time series forecasting
        // package

        File output = File.createTempFile("tempWineData", "arff");
        output.deleteOnExit();
        String dataFileName = "sample-data/parking344.arff";
        URL resource = TimeSeriesExample.class.getResource("/" + dataFileName);
        FileUtils.copyURLToFile(resource, output);

        String pathToWineData = output.getPath();

        // load the wine data
        Instances wine = new Instances(new BufferedReader(new FileReader(pathToWineData)));

        // new forecaster
        WekaForecaster forecaster = new WekaForecaster();

        // set the targets we want to forecast. This method calls
        // setFieldsToLag() on the lag maker object for us
        forecaster.setFieldsToForecast("occupiedSpaces");

        // default underlying classifier is SMOreg (SVM) - we'll use
        // gaussian processes for regression instead
        forecaster.setBaseForecaster(new GaussianProcesses());

        forecaster.getTSLagMaker().setTimeStampField("eventTime"); // date time stamp
        forecaster.getTSLagMaker().setMinLag(1);
        forecaster.getTSLagMaker().setMaxLag(12); // monthly data

        // add a month of the year indicator field
        forecaster.getTSLagMaker().setAddMonthOfYear(true);

        // add a quarter of the year indicator field
        forecaster.getTSLagMaker().setAddQuarterOfYear(true);

        // build the model
        forecaster.buildForecaster(wine, System.out);

        // prime the forecaster with enough recent historical data
        // to cover up to the maximum lag. In our case, we could just supply
        // the 12 most recent historical instances, as this covers our maximum
        // lag period
        forecaster.primeForecaster(wine);

        // forecast for 12 units (months) beyond the end of the
        // training data
        List<List<NumericPrediction>> forecast = forecaster.forecast(12, System.out);

        // output the predictions. Outer list is over the steps; inner list is over
        // the targets
        for (int i = 0; i < 12; i++) {
            List<NumericPrediction> predsAtStep = forecast.get(i);
            for (int j = 0; j < 2; j++) {
                NumericPrediction predForTarget = predsAtStep.get(j);
                System.out.print("" + predForTarget.predicted() + " ");
            }
            System.out.println();
        }

        // we can continue to use the trained forecaster for further forecasting
        // by priming with the most recent historical data (as it becomes available).
        // At some stage it becomes prudent to re-build the model using current
        // historical data.

    } catch (Exception ex) {
        ex.printStackTrace();
    }
}

From source file:org.datavec.api.transform.transform.geo.GeoIPFetcher.java

public static synchronized File fetchCityDB() throws IOException {
    File cityFile = new File(GEOIP_DIR, CITY_DB);
    if (cityFile.isFile()) {
        return cityFile;
    }//ww w.  j  a va  2s.c om
    cityFile = new File(GEOIP_DIR, CITY_LITE_DB);
    if (cityFile.isFile()) {
        return cityFile;
    }
    cityFile = new File(GEOIP_DIR2, CITY_LITE_DB);
    if (cityFile.isFile()) {
        return cityFile;
    }

    log.info("Downloading GeoLite2 City database...");
    File archive = new File(GEOIP_DIR2, CITY_LITE_DB + ".gz");
    File dir = new File(GEOIP_DIR2);
    dir.mkdirs();
    FileUtils.copyURLToFile(new URL(CITY_LITE_URL), archive);
    ArchiveUtils.unzipFileTo(archive.getAbsolutePath(), dir.getAbsolutePath());
    assert cityFile.isFile();

    return cityFile;
}

From source file:org.datavec.image.loader.BaseImageLoader.java

public static void downloadAndUntar(Map urlMap, File fullDir) {
    try {//w w w  .j  av  a2s . com
        File file = new File(fullDir, urlMap.get("filesFilename").toString());
        if (!file.isFile()) {
            FileUtils.copyURLToFile(new URL(urlMap.get("filesURL").toString()), file);
        }

        String fileName = file.toString();
        if (fileName.endsWith(".tgz") || fileName.endsWith(".tar.gz") || fileName.endsWith(".gz")
                || fileName.endsWith(".zip"))
            ArchiveUtils.unzipFileTo(file.getAbsolutePath(), fullDir.getAbsolutePath());
    } catch (IOException e) {
        throw new IllegalStateException("Unable to fetch images", e);
    }
}

From source file:org.datavec.transform.basic.BasicTransformExample.java

public static void main(String[] args) throws Exception {

    String filename = "iris.data";
    URL url = new URL("https://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data");

    File irisText = new File(filename);
    if (!irisText.exists()) {
        FileUtils.copyURLToFile(url, irisText);
    }//from ww w  . j a v a 2 s. com

    SparkConf conf = new SparkConf();
    conf.setMaster("local[*]");
    conf.setAppName("DataVec Example");

    JavaSparkContext sc = new JavaSparkContext(conf);
    JavaRDD<String> stringData = sc.textFile(filename);

    //Take out empty lines.
    RecordReader rr = new CSVRecordReader();
    JavaRDD<List<Writable>> parsedInputData = stringData.filter((x) -> !x.isEmpty())
            .map(new StringToWritablesFunction(rr));

    // Print the original text file.  Not the empty line at the bottom,
    List<String> inputDataCollected = stringData.collect();
    System.out.println("\n\n---- Original Data ----");
    for (String s : inputDataCollected)
        System.out.println("'" + s + "'");

    //
    JavaRDD<String> processedAsString = parsedInputData.map(new WritablesToStringFunction(","));
    List<String> inputDataParsed = processedAsString.collect();
    System.out.println("\n\n---- Parsed Data ----");
    for (String s : inputDataParsed)
        System.out.println("'" + s + "'");

    // the String to label conversion. Define schema and transform:
    Schema schema = new Schema.Builder()
            .addColumnsDouble("Sepal length", "Sepal width", "Petal length", "Petal width")
            .addColumnCategorical("Species", "Iris-setosa", "Iris-versicolor", "Iris-virginica").build();

    TransformProcess tp = new TransformProcess.Builder(schema).categoricalToInteger("Species").build();

    // do the transformation.
    JavaRDD<List<Writable>> processedData = SparkTransformExecutor.execute(parsedInputData, tp);

    // This is where we print the final result (which you would save to a text file.
    processedAsString = processedData.map(new WritablesToStringFunction(","));
    inputDataParsed = processedAsString.collect();
    System.out.println("\n\n---- Parsed and filtered data ----");
    for (String s : inputDataParsed)
        System.out.println(s);

}

From source file:org.dbflute.intro.app.logic.intro.IntroUpgradeLogic.java

public void upgrade() {
    File jarPathFile = new File("./dbflute-intro.jar");

    Class<?> clazz = this.getClass();
    URL location = clazz.getResource("/" + clazz.getName().replaceAll("\\.", "/") + ".class");
    String path = location.getPath();

    if (path.lastIndexOf("!") != -1) {
        try {/* w ww.  j  av  a  2  s  .  c om*/
            jarPathFile = new File(URLDecoder.decode(path.substring("file:/".length(), path.lastIndexOf("!")),
                    StandardCharsets.UTF_8.name()));
        } catch (UnsupportedEncodingException e) {
            throw new IllegalStateException(e);
        }
    }

    try {
        FileUtils.copyURLToFile(
                new URL("http://p1us2er0.github.io/dbflute-intro/download/" + jarPathFile.getName()),
                jarPathFile);
    } catch (MalformedURLException e) {
        throw new IllegalStateException(e);
    } catch (IOException e) {
        throw new IllegalStateException(e);
    }
}

From source file:org.deeplearning4j.base.MnistFetcher.java

private void tryDownloadingAFewTimes(int attempt, URL url, File f, String targetMD5) throws IOException {
    int maxTries = 3;
    boolean isCorrectFile = f.isFile();
    if (attempt < maxTries && !isCorrectFile) {
        FileUtils.copyURLToFile(url, f);
        if (!checkMD5OfFile(targetMD5, f))
            tryDownloadingAFewTimes(attempt + 1, url, f, targetMD5);
    } else if (isCorrectFile) {
        // do nothing, file downloaded
    } else {//from  w  ww  .  j a va2  s . c o m
        throw new IOException("Could not download " + url.getPath() + "\n properly despite trying " + maxTries
                + " times, check your connection. File info:" + "\nTarget MD5: " + targetMD5
                + "\nHash matches: " + checkMD5OfFile(targetMD5, f) + "\nIs valid file: " + f.isFile());
    }
}

From source file:org.deeplearning4j.datasets.fetchers.CurvesDataFetcher.java

private void download() throws IOException {
    // mac gives unique tmp each run and we want to store this persist
    // this data across restarts
    File tmpDir = new File(System.getProperty("user.home"));

    File baseDir = new File(tmpDir, LOCAL_DIR_NAME);
    if (!(baseDir.isDirectory() || baseDir.mkdir())) {
        throw new IOException("Could not mkdir " + baseDir);
    }//from   ww  w  .ja v a  2 s .c o  m

    File dataFile = new File(baseDir, CURVES_FILE_NAME);

    if (!dataFile.exists() || !dataFile.isFile()) {
        log.info("Downloading curves dataset...");
        FileUtils.copyURLToFile(new URL(CURVES_URL), dataFile);
    }

    data = SerializationUtils.readObject(dataFile);

}