List of usage examples for java.io File toPath
public Path toPath()
From source file:de.teamgrit.grit.report.PdfConcatenator.java
/** * Concatinates pdfs generated {@link TexGenerator}. * * @param folderWithPdfs/*from www .j a v a 2 s. c o m*/ * the folder with pdfs * @param outPath * the out path * @param exerciseName * the context * @param studentsWithoutSubmissions * list of students who did not submit any solution * @return the path to the created PDF * @throws IOException * Signals that an I/O exception has occurred. */ protected static Path concatPDFS(Path folderWithPdfs, Path outPath, String exerciseName, List<Student> studentsWithoutSubmissions) throws IOException { if ((folderWithPdfs == null) || !Files.isDirectory(folderWithPdfs)) { throw new IOException("The Path doesn't point to a Folder"); } File file = new File(outPath.toFile(), "report.tex"); if (Files.exists(file.toPath(), LinkOption.NOFOLLOW_LINKS)) { Files.delete(file.toPath()); } file.createNewFile(); writePreamble(file, exerciseName); writeMissingStudents(file, studentsWithoutSubmissions); writeFiles(file, folderWithPdfs); writeClosing(file); PdfCreator.createPdfFromPath(file.toPath(), outPath); return file.toPath(); }
From source file:com.amazonaws.codepipeline.jenkinsplugin.CompressionTools.java
private static void compressArchive(final Path pathToCompress, final ArchiveOutputStream archiveOutputStream, final ArchiveEntryFactory archiveEntryFactory, final CompressionType compressionType, final BuildListener listener) throws IOException { final List<File> files = addFilesToCompress(pathToCompress, listener); LoggingHelper.log(listener, "Compressing directory '%s' as a '%s' archive", pathToCompress.toString(), compressionType.name());/*from w w w . j av a 2 s .c om*/ for (final File file : files) { final String newTarFileName = pathToCompress.relativize(file.toPath()).toString(); final ArchiveEntry archiveEntry = archiveEntryFactory.create(file, newTarFileName); archiveOutputStream.putArchiveEntry(archiveEntry); try (final FileInputStream fileInputStream = new FileInputStream(file)) { IOUtils.copy(fileInputStream, archiveOutputStream); } archiveOutputStream.closeArchiveEntry(); } }
From source file:it.serverSystem.ClusterTest.java
private static void expectLog(Orchestrator orchestrator, String expectedLog) throws IOException { File logFile = orchestrator.getServer().getWebLogs(); try (Stream<String> lines = Files.lines(logFile.toPath())) { assertThat(lines.anyMatch(s -> StringUtils.containsIgnoreCase(s, expectedLog))).isTrue(); }/*from www .j av a 2 s. c om*/ }
From source file:com.fizzed.blaze.util.Streamables.java
static public StreamableInput input(File file) { Objects.requireNonNull(file, "file cannot be null"); return input(file.toPath()); }
From source file:com.fizzed.blaze.util.Streamables.java
static public StreamableOutput output(File file) { Objects.requireNonNull(file, "file cannot be null"); return output(file.toPath()); }
From source file:de.alpharogroup.crypto.key.reader.PrivateKeyReader.java
/** * Read the private key from a pem file as base64 encoded {@link String} value. * * @param file/*from w w w . ja va 2s . c o m*/ * the file in pem format that contains the private key. * @return the base64 encoded {@link String} value. * @throws IOException * Signals that an I/O exception has occurred. */ public static String readPemFileAsBase64(final File file) throws IOException { final byte[] keyBytes = Files.readAllBytes(file.toPath()); final String privateKeyAsBase64String = new String(keyBytes).replace(BEGIN_RSA_PRIVATE_KEY_PREFIX, "") .replace(END_RSA_PRIVATE_KEY_SUFFIX, "").trim(); return privateKeyAsBase64String; }
From source file:JMeterProcessing.JMeterPropertiesGenerator.java
private static void generatePropertiesFile(Map<String, Long> idValuesMap) throws IOException { String propertiesOutputDir = System.getProperty("properties.output.dir"); File original = new File(propertiesOutputDir + "/testray.jmeter.full.depth.properties"); File generated = new File(propertiesOutputDir + "/testray.jmeter.properties"); Files.copy(original.toPath(), generated.toPath(), StandardCopyOption.REPLACE_EXISTING); try (BufferedWriter writer = new BufferedWriter( new OutputStreamWriter(new FileOutputStream(generated, true), "utf-8"))) { printProperties(writer, idValuesMap); writer.flush();/* w w w . j a va 2 s . c o m*/ } }
From source file:com.streamsets.datacollector.util.ClusterUtil.java
public static void setupCluster(String testName, String pipelineJson, YarnConfiguration yarnConfiguration) throws Exception { System.setProperty("sdc.testing-mode", "true"); System.setProperty(MiniSDCTestingUtility.PRESERVE_TEST_DIR, "true"); yarnConfiguration.set("yarn.nodemanager.delete.debug-delay-sec", "600"); miniSDCTestingUtility = new MiniSDCTestingUtility(); File dataTestDir = miniSDCTestingUtility.getDataTestDir(); //copy spark files under the test data directory into a dir called "spark" File sparkHome = ClusterUtil.createSparkHome(dataTestDir); //start mini yarn cluster miniYarnCluster = miniSDCTestingUtility.startMiniYarnCluster(testName, 1, 1, 1, yarnConfiguration); Configuration config = miniYarnCluster.getConfig(); long deadline = System.currentTimeMillis() + TimeUnit.SECONDS.toMillis(10); while (config.get(YarnConfiguration.RM_ADDRESS).split(":")[1] == "0") { if (System.currentTimeMillis() > deadline) { throw new IllegalStateException("Timed out waiting for RM to come up."); }/* ww w . jav a 2s .co m*/ LOG.debug("RM address still not set in configuration, waiting..."); TimeUnit.MILLISECONDS.sleep(100); } LOG.debug("RM at " + config.get(YarnConfiguration.RM_ADDRESS)); Properties sparkHadoopProps = new Properties(); for (Map.Entry<String, String> entry : config) { sparkHadoopProps.setProperty("spark.hadoop." + entry.getKey(), entry.getValue()); } LOG.debug("Creating spark properties file at " + dataTestDir); File propertiesFile = new File(dataTestDir, "spark.properties"); propertiesFile.createNewFile(); FileOutputStream sdcOutStream = new FileOutputStream(propertiesFile); sparkHadoopProps.store(sdcOutStream, null); sdcOutStream.flush(); sdcOutStream.close(); // Need to pass this property file to spark-submit for it pick up yarn confs System.setProperty(SPARK_PROPERTY_FILE, propertiesFile.getAbsolutePath()); File sparkBin = new File(sparkHome, "bin"); for (File file : sparkBin.listFiles()) { MiniSDCTestingUtility.setExecutePermission(file.toPath()); } miniSDC = miniSDCTestingUtility.createMiniSDC(MiniSDC.ExecutionMode.CLUSTER); miniSDC.startSDC(); serverURI = miniSDC.getServerURI(); miniSDC.createPipeline(pipelineJson); miniSDC.startPipeline(); int attempt = 0; //Hard wait for 2 minutes while (miniSDC.getListOfSlaveSDCURI().size() == 0 && attempt < 24) { Thread.sleep(5000); attempt++; LOG.debug("Attempt no: " + attempt + " to retrieve list of slaves"); } if (miniSDC.getListOfSlaveSDCURI().size() == 0) { throw new IllegalStateException("Timed out waiting for slaves to come up."); } }
From source file:BluemixUtils.java
private static void copyPhotosInTempDir(List<ClassifierUnit> all, int minSize) throws IOException { for (ClassifierUnit unit : all) { File dir = new File(TMP_DIR + File.separator + unit.getName()); dir.mkdir();/* w ww . java 2s .c o m*/ System.out.println("Copying files to " + dir.getAbsolutePath()); File photosDir = new File(unit.getFolderWithImages()); File[] photoes = photosDir.listFiles(filter); for (int i = 0; i < minSize; i++) { File source = photoes[i]; System.out.println("Copying file " + photoes[i].getName()); Files.copy(source.toPath(), (new File(dir.getAbsolutePath() + File.separator + (i + 1) + source.getName().substring(source.getName().indexOf(".")))).toPath(), StandardCopyOption.REPLACE_EXISTING); } } }
From source file:muffinc.yafdivj.helper.FeretHandler.java
public static void move(File file) { try {//from w w w . j ava 2 s .c o m String newFolder = NEW_FOLDER + file.getName().substring(1, 5) + "/"; File file1 = new File(newFolder); if (!file1.exists()) { file1.mkdirs(); } Files.copy(file.toPath(), new File(newFolder + file.getName()).toPath()); } catch (IOException e) { e.printStackTrace(); } }