List of usage examples for java.nio.file Files createTempFile
public static Path createTempFile(String prefix, String suffix, FileAttribute<?>... attrs) throws IOException
From source file:Main.java
public static void main(String[] args) throws Exception { Path rootDirectory = FileSystems.getDefault().getPath("C:/home/docs"); Path tempDirectory = Files.createTempDirectory(rootDirectory, ""); System.out.println("Temporary directory created successfully!"); String dirPath = tempDirectory.toString(); System.out.println(dirPath);//w ww .ja v a 2s .c o m Path tempFile = Files.createTempFile(tempDirectory, "", ""); System.out.println("Temporary file created successfully!"); String filePath = tempFile.toString(); System.out.println(filePath); }
From source file:Main.java
public static void main(String[] args) { Path basedir = FileSystems.getDefault().getPath("C:/tutorial/tmp"); String tmp_file_prefix = "Swing_"; String tmp_file_sufix = ".txt"; //get the default temporary folders path String default_tmp = System.getProperty("java.io.tmpdir"); System.out.println(default_tmp); try {/*from ww w.ja v a 2 s . co m*/ //create a tmp file in a the base dir Path tmp_3 = Files.createTempFile(basedir, tmp_file_prefix, tmp_file_sufix); System.out.println("TMP: " + tmp_3.toString()); } catch (IOException e) { System.err.println(e); } }
From source file:de.tudarmstadt.ukp.dkpro.discourse.pdtbparser.PDTBParserWrapper.java
public static void main(String[] args) throws Exception { final PDTBParserWrapper pdtbParserWrapper = new PDTBParserWrapper(); Path out = Files.createTempFile(pdtbParserWrapper.tempDirectory, "out", ".xml"); pdtbParserWrapper.run(new File("src/main/resources/test302.txt"), out.toFile()); pdtbParserWrapper.clean();//w w w .java 2 s .co m }
From source file:hdfs.MiniHDFS.java
public static void main(String[] args) throws Exception { if (args.length != 1 && args.length != 3) { throw new IllegalArgumentException( "Expected: MiniHDFS <baseDirectory> [<kerberosPrincipal> <kerberosKeytab>], " + "got: " + Arrays.toString(args)); }//from w w w . j av a 2s . c o m boolean secure = args.length == 3; // configure Paths Path baseDir = Paths.get(args[0]); // hadoop-home/, so logs will not complain if (System.getenv("HADOOP_HOME") == null) { Path hadoopHome = baseDir.resolve("hadoop-home"); Files.createDirectories(hadoopHome); System.setProperty("hadoop.home.dir", hadoopHome.toAbsolutePath().toString()); } // hdfs-data/, where any data is going Path hdfsHome = baseDir.resolve("hdfs-data"); // configure cluster Configuration cfg = new Configuration(); cfg.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, hdfsHome.toAbsolutePath().toString()); // lower default permission: TODO: needed? cfg.set(DFSConfigKeys.DFS_DATANODE_DATA_DIR_PERMISSION_KEY, "766"); // optionally configure security if (secure) { String kerberosPrincipal = args[1]; String keytabFile = args[2]; cfg.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos"); cfg.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION, "true"); cfg.set(DFSConfigKeys.DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, kerberosPrincipal); cfg.set(DFSConfigKeys.DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, kerberosPrincipal); cfg.set(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, kerberosPrincipal); cfg.set(DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY, keytabFile); cfg.set(DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY, keytabFile); cfg.set(DFSConfigKeys.DFS_NAMENODE_ACLS_ENABLED_KEY, "true"); cfg.set(DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, "true"); cfg.set(DFSConfigKeys.IGNORE_SECURE_PORTS_FOR_TESTING_KEY, "true"); } UserGroupInformation.setConfiguration(cfg); // TODO: remove hardcoded port! MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(cfg); if (secure) { builder.nameNodePort(9998); } else { builder.nameNodePort(9999); } MiniDFSCluster dfs = builder.build(); // Configure contents of the filesystem org.apache.hadoop.fs.Path esUserPath = new org.apache.hadoop.fs.Path("/user/elasticsearch"); try (FileSystem fs = dfs.getFileSystem()) { // Set the elasticsearch user directory up fs.mkdirs(esUserPath); if (UserGroupInformation.isSecurityEnabled()) { List<AclEntry> acls = new ArrayList<>(); acls.add(new AclEntry.Builder().setType(AclEntryType.USER).setName("elasticsearch") .setPermission(FsAction.ALL).build()); fs.modifyAclEntries(esUserPath, acls); } // Install a pre-existing repository into HDFS String directoryName = "readonly-repository"; String archiveName = directoryName + ".tar.gz"; URL readOnlyRepositoryArchiveURL = MiniHDFS.class.getClassLoader().getResource(archiveName); if (readOnlyRepositoryArchiveURL != null) { Path tempDirectory = Files.createTempDirectory(MiniHDFS.class.getName()); File readOnlyRepositoryArchive = tempDirectory.resolve(archiveName).toFile(); FileUtils.copyURLToFile(readOnlyRepositoryArchiveURL, readOnlyRepositoryArchive); FileUtil.unTar(readOnlyRepositoryArchive, tempDirectory.toFile()); fs.copyFromLocalFile(true, true, new org.apache.hadoop.fs.Path( tempDirectory.resolve(directoryName).toAbsolutePath().toUri()), esUserPath.suffix("/existing/" + directoryName)); FileUtils.deleteDirectory(tempDirectory.toFile()); } } // write our PID file Path tmp = Files.createTempFile(baseDir, null, null); String pid = ManagementFactory.getRuntimeMXBean().getName().split("@")[0]; Files.write(tmp, pid.getBytes(StandardCharsets.UTF_8)); Files.move(tmp, baseDir.resolve(PID_FILE_NAME), StandardCopyOption.ATOMIC_MOVE); // write our port file tmp = Files.createTempFile(baseDir, null, null); Files.write(tmp, Integer.toString(dfs.getNameNodePort()).getBytes(StandardCharsets.UTF_8)); Files.move(tmp, baseDir.resolve(PORT_FILE_NAME), StandardCopyOption.ATOMIC_MOVE); }
From source file:org.schedulesdirect.api.utils.HttpUtils.java
static private void setupAudit() { try {/*from w ww .j ava 2 s . c o m*/ Path root = Paths.get(Config.get().captureRoot().getAbsolutePath(), "http"); Files.createDirectories(root); AUDIT_LOG = Files.createTempFile(root, String.format("%s_", new SimpleDateFormat("yyyyMMddHHmmss").format(new Date())), ".log"); auditSetup = true; } catch (IOException e) { LOG.error("Unable to create HTTP audit log!", e); AUDIT_LOG = null; } }
From source file:org.opendatakit.briefcase.reused.UncheckedFiles.java
public static Path createTempFile(String prefix, String suffix, FileAttribute<?>... attrs) { try {//from w w w. j a va 2 s. co m return Files.createTempFile(prefix, suffix, attrs); } catch (IOException e) { throw new UncheckedIOException(e); } }
From source file:org.sonar.api.utils.internal.DefaultTempFolder.java
private static Path createTempFile(Path baseDir, String prefix, String suffix) { try {/*from ww w . j a v a2 s .c om*/ return Files.createTempFile(baseDir, prefix, suffix); } catch (IOException e) { throw new IllegalStateException("Failed to create temp file", e); } }
From source file:org.roda_project.commons_ip.utils.Utils.java
public static Path copyResourceFromClasspathToDir(Class<?> resourceClass, Path dir, String resourceTempSuffix, String resourcePath) throws IOException, InterruptedException { try {/*from www . j a v a2 s .co m*/ Path resource = Files.createTempFile(dir, "", resourceTempSuffix); InputStream inputStream = resourceClass.getResourceAsStream(resourcePath); OutputStream outputStream = Files.newOutputStream(resource); IOUtils.copy(inputStream, outputStream); inputStream.close(); outputStream.close(); return resource; } catch (ClosedByInterruptException e) { throw new InterruptedException(); } }
From source file:org.hara.sodra.utils.SodraUtilsTest.java
@Before public void setUp() throws Exception { this.environmentVariables.set(SodraConstants.SODRA_DATA_DIR, "/tmp/sodra/data"); Path solrHome = SodraUtils.getSolrHome(); Files.createDirectories(solrHome); Path solrTemplateConfDir = Paths.get(solrHome.getParent().toString(), "index_template_config", "conf"); Files.createDirectories(solrTemplateConfDir); Files.createTempFile(solrTemplateConfDir, "prefix.", ".suffix"); }
From source file:org.forgerock.openidm.maintenance.upgrade.FileStateCheckerTest.java
@BeforeMethod public void setupTempChecksumFile() throws IOException, NoSuchAlgorithmException { tempFile = Files.createTempFile(tempPath, null, null); }