List of usage examples for org.apache.hadoop.fs FileSystem copyToLocalFile
public void copyToLocalFile(boolean delSrc, Path src, Path dst, boolean useRawLocalFileSystem) throws IOException
From source file:edu.uci.ics.asterix.aoya.HDFSBackup.java
License:Apache License
private void performRestore(List<Path[]> paths) throws IOException { FileSystem fs = FileSystem.get(conf); for (Path[] p : paths) { LOG.info("Restoring " + p[0] + " to " + p[1] + "."); File f = new File(p[1].toString() + File.separator + p[0].getName()); LOG.info(f.getAbsolutePath());/*w w w . j ava 2 s .c o m*/ if (f.exists()) { FileUtils.deleteDirectory(f); } LOG.info(f.exists()); fs.copyToLocalFile(false, p[0], p[1], true); } }
From source file:ml.shifu.shifu.util.ModelSpecLoaderUtils.java
License:Apache License
/** * Load generic model from local or HDFS storage and initialize. * * @param modelConfig/* w w w. jav a2s. co m*/ * model config * @param genericModelConfigs * generic model files * @param sourceType * source type * @param models * models list to have the result * @throws IOException * Exception when fail to load generic models */ public static void loadGenericModels(ModelConfig modelConfig, List<FileStatus> genericModelConfigs, SourceType sourceType, List<BasicML> models) throws IOException { FileSystem hdfs = HDFSUtils.getFS(); PathFinder pathFinder = new PathFinder(modelConfig); String src = pathFinder.getModelsPath(sourceType); File f = new File(System.getProperty(Constants.USER_DIR) + "/models"); // check if model dir is exist if (!f.exists()) { hdfs.copyToLocalFile(false, new Path(src), // source new Path(System.getProperty(Constants.USER_DIR)), true); } for (FileStatus fst : genericModelConfigs) { GenericModelConfig gmc = CommonUtils.loadJSON( // loading as GenericModelConfig fst.getPath().toString(), sourceType, GenericModelConfig.class); String alg = (String) gmc.getProperties().get(Constants.GENERIC_ALGORITHM); String genericModelPath = System.getProperty(Constants.USER_DIR) // <usr.dir> + File.separator + Constants.MODELS; // + /models // + File.separator + modelConfig.getBasic().getName(); // + /ModelName gmc.getProperties().put(Constants.GENERIC_MODEL_PATH, genericModelPath); log.info("Generic model path is : {}.", gmc.getProperties().get(Constants.GENERIC_MODEL_PATH)); if (Constants.TENSORFLOW.equals(alg)) { try { // Initiate a evaluator class instance which used for evaluation Class<?> clazz = Class.forName(ComputeImplClass.Tensorflow.getClassName()); Computable computable = (Computable) clazz.newInstance(); computable.init(gmc); GenericModel genericModel = new GenericModel(computable, gmc.getProperties()); models.add(genericModel); } catch (Exception e) { throw new RuntimeException(e); } } else { throw new RuntimeException("Algorithm: " + alg + " is not supported in generic model yet."); } } }
From source file:org.apache.tinkerpop.gremlin.hadoop.process.computer.AbstractHadoopGraphComputer.java
License:Apache License
public static File copyDirectoryIfNonExistent(final FileSystem fileSystem, final String directory) { try {/*from w ww .j a v a 2 s . co m*/ final String hadoopGremlinLibsRemote = "hadoop-gremlin-" + Gremlin.version() + "-libs"; final Path path = new Path(directory); if (Boolean.valueOf(System.getProperty("is.testing", "false")) || (fileSystem.exists(path) && fileSystem.isDirectory(path))) { final File tempDirectory = new File( System.getProperty("java.io.tmpdir") + File.separator + hadoopGremlinLibsRemote); assert tempDirectory.exists() || tempDirectory.mkdirs(); final String tempPath = tempDirectory.getAbsolutePath() + File.separator + path.getName(); final RemoteIterator<LocatedFileStatus> files = fileSystem.listFiles(path, false); while (files.hasNext()) { final LocatedFileStatus f = files.next(); fileSystem.copyToLocalFile(false, f.getPath(), new Path(tempPath + System.getProperty("file.separator") + f.getPath().getName()), true); } return new File(tempPath); } else return new File(directory); } catch (final IOException e) { throw new IllegalStateException(e.getMessage(), e); } }
From source file:org.deeplearning4j.hadoop.nlp.uima.AnalysisEngineHdfs.java
License:Apache License
/** * Reads the configuration from the specified location * @param from the file system to read from * @param to the analysis engine descriptor * @param extraArgs any extra arguments to specify * @return the created analysis engine/*from w w w .j a va2s . co m*/ * @throws Exception if one occurs */ public static AnalysisEngine readConfFrom(FileSystem from, Path to, Object... extraArgs) throws Exception { File local = new File(to.getName()); from.copyToLocalFile(false, to, new Path(local.getPath()), true); AnalysisEngineDescription desc = createEngineDescriptionFromPath(local.getAbsolutePath(), extraArgs); AnalysisEngine ret = createEngine(desc); local.delete(); return ret; }