Example usage for java.io File setReadable

List of usage examples for java.io File setReadable

Introduction

In this page you can find the example usage for java.io File setReadable.

Prototype

public boolean setReadable(boolean readable) 

Source Link

Document

A convenience method to set the owner's read permission for this abstract pathname.

Usage

From source file:com.joeyturczak.jtscanner.ui.MainActivity.java

public void makeFileDirs() {
    File externalDir = Environment.getExternalStorageDirectory();
    String externalDirPath = externalDir.getPath();

    File scannerDir = new File(externalDirPath + getString(R.string.file_directory));
    scannerDir.mkdirs();/*from  w w  w. j a  v  a  2  s. c  o m*/
    scannerDir.setReadable(true);
}

From source file:com.gamerking195.dev.lametric.RestfulWriter.java

@SuppressWarnings("ResultOfMethodCallIgnored")
private void createFiles() {
    if (debug) {//  ww  w .  j  av  a  2s. c om
        System.out.println("");
        System.out.println("=================BEGIN DEBUG=================");
        System.out.println("");
    }
    File subDirectories = new File(filePath);
    subDirectories.mkdirs();

    //MINESWINE APP
    File msFile = new File(filePath + "/mineswineapp.json");

    if (msFile.exists())
        msFile.delete();

    try {
        msFile.createNewFile();

        msFile.setWritable(true);
        msFile.setReadable(true);
        FileWriter writer = new FileWriter(msFile);

        String mineswineStatus = getMineswineApp();

        if (debug) {
            System.out.println("MINESWINE: ");
            System.out.println(mineswineStatus);
        }

        writer.write(mineswineStatus);
        writer.close();
    } catch (IOException e) {
        e.printStackTrace();
    }

    if (debug) {
        System.out.println("");
    }

    //MOJANG APP
    File mcFile = new File(filePath + "/mojangapp.json");

    if (mcFile.exists())
        mcFile.delete();

    try {
        mcFile.createNewFile();

        mcFile.setWritable(true);
        mcFile.setReadable(true);

        FileWriter writer = new FileWriter(mcFile);

        if (debug)
            System.out.println("SERVICES: ");

        String mojangStatus = getMojangApp();

        if (debug) {
            System.out.println("MOJANG: ");
            System.out.println(mojangStatus);
        }

        writer.write(mojangStatus);
        writer.close();
    } catch (IOException e) {
        e.printStackTrace();
    }

    if (debug) {
        System.out.println("");
        System.out.println("=================END DEBUG=================");
    }
}

From source file:com.emarsys.dyson.storage.DefaultDysonStorage.java

/**
 * Creates the directory with the passed filename if it's not
 * already present./*from w  w w .  j a v a  2  s  .c  om*/
 * 
 * @param pathToDir - the path to the directory
 * @throws DysonException - if it was not possible to create a
 *       writable directory with the passed path.
 */
protected void createDirsIfNotPresent(String pathToDir) throws DysonException {
    File dir = new File(pathToDir);
    if (!dir.exists()) {
        log.debug("creating dir(s) \'{}\'", pathToDir);
        dir.mkdirs();
        dir.setReadable(true);
        dir.setWritable(true);
    }

    boolean isWritableDirectoryPresent = dir.exists() && dir.isDirectory() && dir.canRead() && dir.canWrite();

    if (!isWritableDirectoryPresent) {
        throw new DysonException("Was not able to create directory \'" + pathToDir + "\'");
    }
}

From source file:org.ngrinder.common.util.CompressionUtils.java

/**
 * Untar an input file into an output file.
 * The output file is created in the output folder, having the same name as
 * the input file, minus the '.tar' extension.
 *
 * @param inFile    the input .tar file/* ww w. j ava  2s.co  m*/
 * @param outputDir the output directory file.
 * @return The {@link List} of {@link File}s with the untared content.
 */
@SuppressWarnings("resource")
public static List<File> untar(final File inFile, final File outputDir) {
    final List<File> untaredFiles = new LinkedList<File>();
    InputStream is = null;
    TarArchiveInputStream debInputStream = null;
    try {
        is = new FileInputStream(inFile);
        debInputStream = (TarArchiveInputStream) new ArchiveStreamFactory().createArchiveInputStream("tar", is);
        TarArchiveEntry entry;
        while ((entry = (TarArchiveEntry) debInputStream.getNextEntry()) != null) {
            final File outputFile = new File(outputDir, entry.getName());
            if (entry.isDirectory()) {
                if (!outputFile.exists()) {
                    if (!outputFile.mkdirs()) {
                        throw new IllegalStateException(
                                String.format("Couldn't create directory %s.", outputFile.getAbsolutePath()));
                    }
                }
            } else {
                File parentFile = outputFile.getParentFile();
                if (!parentFile.exists()) {
                    parentFile.mkdirs();
                }
                final OutputStream outputFileStream = new FileOutputStream(outputFile);
                try {
                    IOUtils.copy(debInputStream, outputFileStream);
                } finally {
                    IOUtils.closeQuietly(outputFileStream);
                }

                if (FilenameUtils.isExtension(outputFile.getName(), EXECUTABLE_EXTENSION)) {
                    outputFile.setExecutable(true, true);
                }
                outputFile.setReadable(true);
                outputFile.setWritable(true, true);
            }
            untaredFiles.add(outputFile);
        }
    } catch (Exception e) {
        throw processException("Error while untar file", e);
    } finally {
        IOUtils.closeQuietly(is);
        IOUtils.closeQuietly(debInputStream);
    }
    return untaredFiles;
}

From source file:sce.ProcessExecutor.java

/**
 * File Permissions using File and PosixFilePermission
 *
 * @throws IOException// w w  w.jav a2 s  . c  o  m
 */
public void setFilePermissions() throws IOException {
    File file = new File("/Users/temp.txt");

    //set application user permissions to 455
    file.setExecutable(false);
    file.setReadable(false);
    file.setWritable(true);

    //change permission to 777 for all the users
    //no option for group and others
    file.setExecutable(true, false);
    file.setReadable(true, false);
    file.setWritable(true, false);

    //using PosixFilePermission to set file permissions 777
    Set<PosixFilePermission> perms = new HashSet<>();
    //add owners permission
    perms.add(PosixFilePermission.OWNER_READ);
    perms.add(PosixFilePermission.OWNER_WRITE);
    perms.add(PosixFilePermission.OWNER_EXECUTE);
    //add group permissions
    perms.add(PosixFilePermission.GROUP_READ);
    perms.add(PosixFilePermission.GROUP_WRITE);
    perms.add(PosixFilePermission.GROUP_EXECUTE);
    //add others permissions
    perms.add(PosixFilePermission.OTHERS_READ);
    perms.add(PosixFilePermission.OTHERS_WRITE);
    perms.add(PosixFilePermission.OTHERS_EXECUTE);

    Files.setPosixFilePermissions(Paths.get("/Users/pankaj/run.sh"), perms);
}

From source file:com.streamsets.datacollector.cluster.TestShellClusterProvider.java

@Test
public void testCopyDirectory() throws Exception {
    File copyTempDir = new File(tempDir, "copy");
    File srcDir = new File(copyTempDir, "somedir");
    File dstDir = new File(copyTempDir, "dst");
    Assert.assertTrue(srcDir.mkdirs());//from w  w  w .j  ava2 s  .  c o m
    Assert.assertTrue(dstDir.mkdirs());
    File link1 = new File(copyTempDir, "link1");
    File link2 = new File(copyTempDir, "link2");
    File dir1 = new File(copyTempDir, "dir1");
    File file1 = new File(dir1, "f1");
    File file2 = new File(dir1, "f2");
    Assert.assertTrue(dir1.mkdirs());
    Assert.assertTrue(file1.createNewFile());
    Assert.assertTrue(file2.createNewFile());
    file2.setReadable(false);
    file2.setWritable(false);
    file2.setExecutable(false);
    Files.createSymbolicLink(link1.toPath(), dir1.toPath());
    Files.createSymbolicLink(link2.toPath(), link1.toPath());
    Files.createSymbolicLink(new File(srcDir, "dir1").toPath(), link2.toPath());
    File clone = ShellClusterProvider.createDirectoryClone(srcDir, srcDir.getName(), dstDir);
    File cloneF1 = new File(new File(clone, "dir1"), "f1");
    Assert.assertTrue(cloneF1.isFile());
}

From source file:com.blackducksoftware.tools.vuln_collector.VCProcessor.java

/**
 * Creates a directory using the project name Parses the name to escape
 * offensive characters.//from  w  ww. j ava2 s .co m
 * 
 * @param reportLocation
 * @param project
 * @return
 * @throws Exception
 */
private File prepareSubDirectory(File reportLocation, String project) throws Exception {
    project = formatProjectPath(project);
    File reportLocationSubDir = new File(reportLocation.toString() + File.separator + project);
    if (!reportLocationSubDir.exists()) {
        boolean dirsMade = reportLocationSubDir.mkdirs();
        if (!dirsMade) {
            throw new Exception("Unable to create report sub-directory for project: " + project);
        }
    }

    // Copy the web resources into this new location
    ClassLoader classLoader = getClass().getClassLoader();
    File webresources = new File(classLoader.getResource(WEB_RESOURCE).getFile());

    if (!webresources.exists()) {
        throw new Exception("Fatal exception, internal web resources are missing!");
    }

    File[] webSubDirs = webresources.listFiles();
    if (webSubDirs.length == 0) {
        throw new Exception(
                "Fatal exception, internal web resources sub directories are missing!  Corrupt archive.");
    }

    boolean readable = webresources.setReadable(true);
    if (!readable) {
        throw new Exception("Fatal. Cannot read internal web resource directory!");
    }

    try {
        for (File webSubDir : webSubDirs) {
            if (webSubDir.isDirectory()) {
                FileUtils.copyDirectoryToDirectory(webSubDir, reportLocationSubDir);
            } else {
                FileUtils.copyFileToDirectory(webSubDir, reportLocationSubDir);
            }
        }
    } catch (IOException ioe) {
        throw new Exception("Error during creation of report directory", ioe);
    }

    return reportLocationSubDir;
}

From source file:org.jboss.as.test.integration.management.cli.ModuleTestCase.java

@Test
public void addModuleWithDirectoryError() throws Exception {
    if (Util.isWindows()) {
        return;// w w  w .  j  ava  2 s.c  o  m
    }
    String dir = System.currentTimeMillis() + "dir";
    File dirFile = new File(TestSuiteEnvironment.getTmpDir() + File.separator + dir);
    dirFile.mkdir();
    try {
        String content = "HELLO WORLD";
        String fileName = System.currentTimeMillis() + "file.txt";
        File f = new File(dirFile, fileName);
        f.createNewFile();
        Files.write(content.getBytes(StandardCharsets.UTF_8), f);
        f.setReadable(false);
        boolean ret = cli.sendLine(
                "module add --name=" + MODULE_NAME + " --resources=" + dirFile.getAbsolutePath(), true);
        Assert.assertFalse(ret);
        String output = cli.readOutput();
        Assert.assertTrue(output.contains("Module not added"));
        File testModuleRoot = new File(getModulePath(), MODULE_NAME.replace('.', File.separatorChar));
        Assert.assertFalse(testModuleRoot.exists());
    } finally {
        FileUtils.deleteDirectory(dirFile);
    }
}

From source file:alluxio.shell.command.CpCommandIntegrationTest.java

@Test
public void copyDirectoryFromLocalAtomic() throws Exception {
    File localDir = new File(mLocalAlluxioCluster.getAlluxioHome() + "/localDir");
    localDir.mkdir();//ww  w .j a  v a2s .  c o  m
    File testFile = generateFileContent("/localDir/testFile", BufferUtils.getIncreasingByteArray(10));
    File testDir = testFile.getParentFile();
    AlluxioURI alluxioDirPath = new AlluxioURI("/testDir");
    testFile.setReadable(false);

    String[] cmd = { "cp", "file://" + testDir.getPath(), alluxioDirPath.getPath() };
    Assert.assertEquals(-1, mFsShell.run(cmd));
    Assert.assertEquals(testFile.getPath() + " (Permission denied)\n", mOutput.toString());
    Assert.assertFalse(mFileSystem.exists(alluxioDirPath));
    mOutput.reset();

    // If we put a copyable file in the directory, we should be able to copy just that file
    generateFileContent("/localDir/testFile2", BufferUtils.getIncreasingByteArray(20));
    Assert.assertEquals(-1, mFsShell.run(cmd));
    Assert.assertEquals(testFile.getPath() + " (Permission denied)\n", mOutput.toString());
    Assert.assertTrue(mFileSystem.exists(alluxioDirPath));
    Assert.assertTrue(mFileSystem.exists(new AlluxioURI("/testDir/testFile2")));
    Assert.assertFalse(mFileSystem.exists(new AlluxioURI("/testDir/testFile")));

    // The directory should also be deleted from Alluxio filesystem when all files in the
    // directory are failed.
    File innerDir = new File(mLocalAlluxioCluster.getAlluxioHome() + "/localDir/innerDir");
    innerDir.mkdir();
    File innerFile = generateFileContent("/localDir/innerDir/innerFile1",
            BufferUtils.getIncreasingByteArray(30));
    innerFile.setReadable(false);
    Assert.assertEquals(-1, mFsShell.run(cmd));
    Assert.assertTrue(mFileSystem.exists(alluxioDirPath));
    Assert.assertTrue(mFileSystem.exists(new AlluxioURI("/testDir/testFile2")));
    Assert.assertFalse(mFileSystem.exists(new AlluxioURI("/testDir/testFile")));
    Assert.assertFalse(mFileSystem.exists(new AlluxioURI("/testDir/innerDir")));
    Assert.assertFalse(mFileSystem.exists(new AlluxioURI("/testDir/innerDir/innerFile1")));
}

From source file:org.apache.hive.spark.client.AbstractSparkClient.java

private Future<Void> startDriver(final RpcServer rpcServer, final String clientId, final String secret)
        throws IOException {
    final String serverAddress = rpcServer.getAddress();
    final String serverPort = String.valueOf(rpcServer.getPort());

    String sparkHome = getSparkHome();

    String sparkLogDir = conf.get("hive.spark.log.dir");
    if (sparkLogDir == null) {
        if (sparkHome == null) {
            sparkLogDir = "./target/";
        } else {/*w  w w  .j  av a2 s  .c  o  m*/
            sparkLogDir = sparkHome + "/logs/";
        }
    }

    String osxTestOpts = "";
    if (Strings.nullToEmpty(System.getProperty("os.name")).toLowerCase().contains("mac")) {
        osxTestOpts = Strings.nullToEmpty(System.getenv(OSX_TEST_OPTS));
    }

    String driverJavaOpts = Joiner.on(" ").skipNulls().join("-Dhive.spark.log.dir=" + sparkLogDir, osxTestOpts,
            conf.get(DRIVER_OPTS_KEY));
    String executorJavaOpts = Joiner.on(" ").skipNulls().join("-Dhive.spark.log.dir=" + sparkLogDir,
            osxTestOpts, conf.get(EXECUTOR_OPTS_KEY));

    // Create a file with all the job properties to be read by spark-submit. Change the
    // file's permissions so that only the owner can read it. This avoid having the
    // connection secret show up in the child process's command line.
    File properties = File.createTempFile("spark-submit.", ".properties");
    if (!properties.setReadable(false) || !properties.setReadable(true, true)) {
        throw new IOException("Cannot change permissions of job properties file.");
    }
    properties.deleteOnExit();

    Properties allProps = new Properties();
    // first load the defaults from spark-defaults.conf if available
    try {
        URL sparkDefaultsUrl = Thread.currentThread().getContextClassLoader()
                .getResource("spark-defaults.conf");
        if (sparkDefaultsUrl != null) {
            LOG.info("Loading spark defaults configs from: " + sparkDefaultsUrl);
            allProps.load(new ByteArrayInputStream(Resources.toByteArray(sparkDefaultsUrl)));
        }
    } catch (Exception e) {
        String msg = "Exception trying to load spark-defaults.conf: " + e;
        throw new IOException(msg, e);
    }
    // then load the SparkClientImpl config
    for (Map.Entry<String, String> e : conf.entrySet()) {
        allProps.put(e.getKey(), conf.get(e.getKey()));
    }
    allProps.put(SparkClientFactory.CONF_CLIENT_ID, clientId);
    allProps.put(SparkClientFactory.CONF_KEY_SECRET, secret);
    allProps.put(DRIVER_OPTS_KEY, driverJavaOpts);
    allProps.put(EXECUTOR_OPTS_KEY, executorJavaOpts);

    String isTesting = conf.get("spark.testing");
    if (isTesting != null && isTesting.equalsIgnoreCase("true")) {
        String hiveHadoopTestClasspath = Strings.nullToEmpty(System.getenv("HIVE_HADOOP_TEST_CLASSPATH"));
        if (!hiveHadoopTestClasspath.isEmpty()) {
            String extraDriverClasspath = Strings.nullToEmpty((String) allProps.get(DRIVER_EXTRA_CLASSPATH));
            if (extraDriverClasspath.isEmpty()) {
                allProps.put(DRIVER_EXTRA_CLASSPATH, hiveHadoopTestClasspath);
            } else {
                extraDriverClasspath = extraDriverClasspath.endsWith(File.pathSeparator) ? extraDriverClasspath
                        : extraDriverClasspath + File.pathSeparator;
                allProps.put(DRIVER_EXTRA_CLASSPATH, extraDriverClasspath + hiveHadoopTestClasspath);
            }

            String extraExecutorClasspath = Strings
                    .nullToEmpty((String) allProps.get(EXECUTOR_EXTRA_CLASSPATH));
            if (extraExecutorClasspath.isEmpty()) {
                allProps.put(EXECUTOR_EXTRA_CLASSPATH, hiveHadoopTestClasspath);
            } else {
                extraExecutorClasspath = extraExecutorClasspath.endsWith(File.pathSeparator)
                        ? extraExecutorClasspath
                        : extraExecutorClasspath + File.pathSeparator;
                allProps.put(EXECUTOR_EXTRA_CLASSPATH, extraExecutorClasspath + hiveHadoopTestClasspath);
            }
        }
    }

    Writer writer = new OutputStreamWriter(new FileOutputStream(properties), Charsets.UTF_8);
    try {
        allProps.store(writer, "Spark Context configuration");
    } finally {
        writer.close();
    }

    // Define how to pass options to the child process. If launching in client (or local)
    // mode, the driver options need to be passed directly on the command line. Otherwise,
    // SparkSubmit will take care of that for us.
    String master = conf.get("spark.master");
    Preconditions.checkArgument(master != null, "spark.master is not defined.");
    String deployMode = conf.get(SPARK_DEPLOY_MODE);

    if (SparkClientUtilities.isYarnClusterMode(master, deployMode)) {
        String executorCores = conf.get("spark.executor.cores");
        if (executorCores != null) {
            addExecutorCores(executorCores);
        }

        String executorMemory = conf.get("spark.executor.memory");
        if (executorMemory != null) {
            addExecutorMemory(executorMemory);
        }

        String numOfExecutors = conf.get("spark.executor.instances");
        if (numOfExecutors != null) {
            addNumExecutors(numOfExecutors);
        }
    }
    // The options --principal/--keypad do not work with --proxy-user in spark-submit.sh
    // (see HIVE-15485, SPARK-5493, SPARK-19143), so Hive could only support doAs or
    // delegation token renewal, but not both. Since doAs is a more common case, if both
    // are needed, we choose to favor doAs. So when doAs is enabled, we use kinit command,
    // otherwise, we pass the principal/keypad to spark to support the token renewal for
    // long-running application.
    if ("kerberos".equals(hiveConf.get(HADOOP_SECURITY_AUTHENTICATION))) {
        String principal = SecurityUtil
                .getServerPrincipal(hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL), "0.0.0.0");
        String keyTabFile = hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB);
        boolean isDoAsEnabled = hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS);
        if (StringUtils.isNotBlank(principal) && StringUtils.isNotBlank(keyTabFile)) {
            addKeytabAndPrincipal(isDoAsEnabled, keyTabFile, principal);
        }
    }
    if (hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS)) {
        try {
            String currentUser = Utils.getUGI().getShortUserName();
            // do not do impersonation in CLI mode
            if (!currentUser.equals(System.getProperty("user.name"))) {
                LOG.info("Attempting impersonation of " + currentUser);
                addProxyUser(currentUser);
            }
        } catch (Exception e) {
            String msg = "Cannot obtain username: " + e;
            throw new IllegalStateException(msg, e);
        }
    }

    String regStr = conf.get("spark.kryo.registrator");
    if (HIVE_KRYO_REG_NAME.equals(regStr)) {
        addJars(SparkClientUtilities.findKryoRegistratorJar(hiveConf));
    }

    addPropertiesFile(properties.getAbsolutePath());
    addClass(RemoteDriver.class.getName());

    String jar = "spark-internal";
    if (SparkContext.jarOfClass(this.getClass()).isDefined()) {
        jar = SparkContext.jarOfClass(this.getClass()).get();
    }
    addExecutableJar(jar);

    addAppArg(RemoteDriver.REMOTE_DRIVER_HOST_CONF);
    addAppArg(serverAddress);
    addAppArg(RemoteDriver.REMOTE_DRIVER_PORT_CONF);
    addAppArg(serverPort);

    //hive.spark.* keys are passed down to the RemoteDriver via REMOTE_DRIVER_CONF
    // so that they are not used in sparkContext but only in remote driver,
    //as --properties-file contains the spark.* keys that are meant for SparkConf object.
    for (String hiveSparkConfKey : RpcConfiguration.HIVE_SPARK_RSC_CONFIGS) {
        String value = RpcConfiguration.getValue(hiveConf, hiveSparkConfKey);
        addAppArg(RemoteDriver.REMOTE_DRIVER_CONF);
        addAppArg(String.format("%s=%s", hiveSparkConfKey, value));
    }

    return launchDriver(isTesting, rpcServer, clientId);
}