Example usage for java.nio.file Path getFileName

List of usage examples for java.nio.file Path getFileName

Introduction

In this page you can find the example usage for java.nio.file Path getFileName.

Prototype

Path getFileName();

Source Link

Document

Returns the name of the file or directory denoted by this path as a Path object.

Usage

From source file:com.liferay.sync.engine.session.Session.java

private HttpEntity _getEntity(Map<String, Object> parameters) throws Exception {

    Path deltaFilePath = (Path) parameters.get("deltaFilePath");
    Path filePath = (Path) parameters.get("filePath");
    String zipFileIds = (String) parameters.get("zipFileIds");
    Path zipFilePath = (Path) parameters.get("zipFilePath");

    MultipartEntityBuilder multipartEntityBuilder = _getMultipartEntityBuilder(parameters);

    if (deltaFilePath != null) {
        multipartEntityBuilder.addPart("deltaFile", _getFileBody(deltaFilePath,
                (String) parameters.get("mimeType"), (String) parameters.get("title")));
    } else if (filePath != null) {
        multipartEntityBuilder.addPart("file",
                _getFileBody(filePath, (String) parameters.get("mimeType"), (String) parameters.get("title")));
    } else if (zipFileIds != null) {
        return _getURLEncodedFormEntity(parameters);
    } else if (zipFilePath != null) {
        multipartEntityBuilder.addPart("zipFile",
                _getFileBody(zipFilePath, "application/zip", String.valueOf(zipFilePath.getFileName())));
    }/*from  ww  w .j a v  a  2  s . com*/

    return multipartEntityBuilder.build();
}

From source file:net.sf.jabref.gui.journals.ManageJournalsPanel.java

private boolean readyToClose() {
    Path filePath;
    if (newFile.isSelected()) {
        if (newNameTf.getText().isEmpty()) {
            if (tableModel.getRowCount() > 0) {
                JOptionPane.showMessageDialog(this,
                        Localization.lang("You must choose a filename to store journal abbreviations"),
                        Localization.lang("Store journal abbreviations"), JOptionPane.ERROR_MESSAGE);
                return false;
            } else {
                return true;
            }//from w  w  w .  ja v a 2  s .  c o  m
        } else {
            filePath = Paths.get(newNameTf.getText());
            return !Files.exists(filePath) || (JOptionPane.showConfirmDialog(this,
                    Localization.lang("'%0' exists. Overwrite file?", filePath.getFileName().toString()),
                    Localization.lang("Store journal abbreviations"),
                    JOptionPane.OK_CANCEL_OPTION) == JOptionPane.OK_OPTION);
        }
    }
    return true;
}

From source file:com.searchcode.app.jobs.IndexSvnRepoJob.java

/**
 * Indexes all the documents in the path provided. Will also remove anything from the index if not on disk
 * Generally this is a slow update used only for the inital clone of a repository
 * NB this can be used for updates but it will be much slower as it needs to to walk the contents of the disk
 *///from www  . j a v  a2s  .c  om
public void indexDocsByPath(Path path, String repoName, String repoLocations, String repoRemoteLocation,
        boolean existingRepo) {
    SearchcodeLib scl = Singleton.getSearchCodeLib(); // Should have data object by this point
    List<String> fileLocations = new ArrayList<>();
    Queue<CodeIndexDocument> codeIndexDocumentQueue = Singleton.getCodeIndexQueue();

    // Convert once outside the main loop
    String fileRepoLocations = FilenameUtils.separatorsToUnix(repoLocations);
    boolean lowMemory = this.LOWMEMORY;

    try {
        Files.walkFileTree(path, new SimpleFileVisitor<Path>() {
            @Override
            public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {

                while (CodeIndexer.shouldPauseAdding()) {
                    Singleton.getLogger().info("Pausing parser.");
                    try {
                        Thread.sleep(SLEEPTIME);
                    } catch (InterruptedException ex) {
                    }
                }

                // Convert Path file to unix style that way everything is easier to reason about
                String fileParent = FilenameUtils.separatorsToUnix(file.getParent().toString());
                String fileToString = FilenameUtils.separatorsToUnix(file.toString());
                String fileName = file.getFileName().toString();
                String md5Hash = Values.EMPTYSTRING;

                if (fileParent.endsWith("/.svn") || fileParent.contains("/.svn/")) {
                    return FileVisitResult.CONTINUE;
                }

                List<String> codeLines;
                try {
                    codeLines = Helpers.readFileLines(fileToString, MAXFILELINEDEPTH);
                } catch (IOException ex) {
                    return FileVisitResult.CONTINUE;
                }

                try {
                    FileInputStream fis = new FileInputStream(new File(fileToString));
                    md5Hash = org.apache.commons.codec.digest.DigestUtils.md5Hex(fis);
                    fis.close();
                } catch (IOException ex) {
                    Singleton.getLogger().warning("Unable to generate MD5 for " + fileToString);
                }

                // is the file minified?
                if (scl.isMinified(codeLines)) {
                    Singleton.getLogger().info("Appears to be minified will not index  " + fileToString);
                    return FileVisitResult.CONTINUE;
                }

                String languageName = scl.languageGuesser(fileName, codeLines);
                String fileLocation = fileToString.replace(fileRepoLocations, Values.EMPTYSTRING)
                        .replace(fileName, Values.EMPTYSTRING);
                String fileLocationFilename = fileToString.replace(fileRepoLocations, Values.EMPTYSTRING);
                String repoLocationRepoNameLocationFilename = fileToString;

                String newString = getBlameFilePath(fileLocationFilename);
                String codeOwner = getInfoExternal(codeLines.size(), repoName, fileRepoLocations, newString)
                        .getName();

                // If low memory don't add to the queue, just index it directly
                if (lowMemory) {
                    CodeIndexer.indexDocument(new CodeIndexDocument(repoLocationRepoNameLocationFilename,
                            repoName, fileName, fileLocation, fileLocationFilename, md5Hash, languageName,
                            codeLines.size(), StringUtils.join(codeLines, " "), repoRemoteLocation, codeOwner));
                } else {
                    Singleton.incrementCodeIndexLinesCount(codeLines.size());
                    codeIndexDocumentQueue.add(new CodeIndexDocument(repoLocationRepoNameLocationFilename,
                            repoName, fileName, fileLocation, fileLocationFilename, md5Hash, languageName,
                            codeLines.size(), StringUtils.join(codeLines, " "), repoRemoteLocation, codeOwner));
                }

                fileLocations.add(fileLocationFilename);
                return FileVisitResult.CONTINUE;
            }
        });
    } catch (IOException ex) {
        Singleton.getLogger().warning("ERROR - caught a " + ex.getClass() + " in " + this.getClass()
                + "\n with message: " + ex.getMessage());
    }

    if (existingRepo) {
        CodeSearcher cs = new CodeSearcher();
        List<String> indexLocations = cs.getRepoDocuments(repoName);

        for (String file : indexLocations) {
            if (!fileLocations.contains(file)) {
                Singleton.getLogger().info("Missing from disk, removing from index " + file);
                try {
                    CodeIndexer.deleteByFileLocationFilename(file);
                } catch (IOException ex) {
                    Singleton.getLogger().warning("ERROR - caught a " + ex.getClass() + " in " + this.getClass()
                            + "\n with message: " + ex.getMessage());
                }
            }
        }
    }
}

From source file:org.dawnsci.marketplace.ui.editors.OverviewPage.java

protected void uploadFile(CloseableHttpClient client, Path path, String segment)
        throws ClientProtocolException, IOException {
    String token = getCsrfToken(client);
    String url = getMarketplaceUrl();

    // upload the p2-repository file
    HttpPost httpPost = new HttpPost(url + "/" + segment);
    httpPost.addHeader(X_CSRF_TOKEN, token);
    HttpEntity file = MultipartEntityBuilder.create().addBinaryBody("file", path.toFile())
            .addTextBody("id", solution.getId().toString()).build();
    httpPost.setEntity(file);/*from   w  w  w .ja va2  s . c o m*/
    HttpResponse response = client.execute(httpPost);
    int statusCode = response.getStatusLine().getStatusCode();
    if (statusCode == 200) {
        String result = EntityUtils.toString(response.getEntity());
        // obtain some key values from the server version
        // and update the local instance
        Node node = MarketplaceSerializer.deSerializeSolution(result);
        solution.setChanged(node.getChanged());
        addMessage(IMessage.INFORMATION, "File " + path.getFileName() + " uploaded");
    } else {
        String reasonPhrase = response.getStatusLine().getReasonPhrase();
        addMessage(IMessage.ERROR, reasonPhrase);
    }
}

From source file:com.upplication.s3fs.util.AmazonS3ClientMock.java

@Override
public List<Bucket> listBuckets() throws AmazonClientException {
    List<Bucket> result = new ArrayList<>();
    try {//w  w  w .ja  v a  2  s .c  om
        for (Path path : Files.newDirectoryStream(base)) {
            String bucketName = path.getFileName().toString();
            Bucket bucket = new Bucket(bucketName);
            bucket.setOwner(getOwner(bucketName));
            bucket.setCreationDate(
                    new Date(Files.readAttributes(path, BasicFileAttributes.class).creationTime().toMillis()));
            result.add(bucket);
        }
    } catch (IOException e) {
        throw new AmazonClientException(e);
    }
    return result;
}

From source file:com.searchcode.app.jobs.IndexGitRepoJob.java

/**
 * Indexes all the documents in the path provided. Will also remove anything from the index if not on disk
 * Generally this is a slow update used only for the inital clone of a repository
 * NB this can be used for updates but it will be much slower as it needs to to walk the contents of the disk
 *///from ww w.  j a  va 2s . com
public void indexDocsByPath(Path path, String repoName, String repoLocations, String repoRemoteLocation,
        boolean existingRepo) {
    SearchcodeLib scl = Singleton.getSearchCodeLib(); // Should have data object by this point
    List<String> fileLocations = new ArrayList<>();
    Queue<CodeIndexDocument> codeIndexDocumentQueue = Singleton.getCodeIndexQueue();

    // Convert once outside the main loop
    String fileRepoLocations = FilenameUtils.separatorsToUnix(repoLocations);
    boolean lowMemory = this.LOWMEMORY;
    boolean useSystemGit = this.USESYSTEMGIT;

    try {
        Files.walkFileTree(path, new SimpleFileVisitor<Path>() {
            @Override
            public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {

                while (CodeIndexer.shouldPauseAdding()) {
                    Singleton.getLogger().info("Pausing parser.");
                    try {
                        Thread.sleep(SLEEPTIME);
                    } catch (InterruptedException ex) {
                    }
                }

                // Convert Path file to unix style that way everything is easier to reason about
                String fileParent = FilenameUtils.separatorsToUnix(file.getParent().toString());
                String fileToString = FilenameUtils.separatorsToUnix(file.toString());
                String fileName = file.getFileName().toString();
                String md5Hash = Values.EMPTYSTRING;

                if (fileParent.endsWith("/.git") || fileParent.contains("/.git/")) {
                    return FileVisitResult.CONTINUE;
                }

                List<String> codeLines;
                try {
                    codeLines = Helpers.readFileLines(fileToString, MAXFILELINEDEPTH);
                } catch (IOException ex) {
                    return FileVisitResult.CONTINUE;
                }

                try {
                    FileInputStream fis = new FileInputStream(new File(fileToString));
                    md5Hash = org.apache.commons.codec.digest.DigestUtils.md5Hex(fis);
                    fis.close();
                } catch (IOException ex) {
                    Singleton.getLogger().warning("Unable to generate MD5 for " + fileToString);
                }

                // is the file minified?
                if (scl.isMinified(codeLines)) {
                    Singleton.getLogger().info("Appears to be minified will not index  " + fileToString);
                    return FileVisitResult.CONTINUE;
                }

                String languageName = scl.languageGuesser(fileName, codeLines);
                String fileLocation = fileToString.replace(fileRepoLocations, Values.EMPTYSTRING)
                        .replace(fileName, Values.EMPTYSTRING);
                String fileLocationFilename = fileToString.replace(fileRepoLocations, Values.EMPTYSTRING);
                String repoLocationRepoNameLocationFilename = fileToString;

                String newString = getBlameFilePath(fileLocationFilename);
                List<CodeOwner> owners;
                if (useSystemGit) {
                    owners = getBlameInfoExternal(codeLines.size(), repoName, fileRepoLocations, newString);
                } else {
                    owners = getBlameInfo(codeLines.size(), repoName, fileRepoLocations, newString);
                }

                String codeOwner = scl.codeOwner(owners);

                // If low memory don't add to the queue, just index it directly
                if (lowMemory) {
                    CodeIndexer.indexDocument(new CodeIndexDocument(repoLocationRepoNameLocationFilename,
                            repoName, fileName, fileLocation, fileLocationFilename, md5Hash, languageName,
                            codeLines.size(), StringUtils.join(codeLines, " "), repoRemoteLocation, codeOwner));
                } else {
                    Singleton.incrementCodeIndexLinesCount(codeLines.size());
                    codeIndexDocumentQueue.add(new CodeIndexDocument(repoLocationRepoNameLocationFilename,
                            repoName, fileName, fileLocation, fileLocationFilename, md5Hash, languageName,
                            codeLines.size(), StringUtils.join(codeLines, " "), repoRemoteLocation, codeOwner));
                }

                fileLocations.add(fileLocationFilename);
                return FileVisitResult.CONTINUE;
            }
        });
    } catch (IOException ex) {
        Singleton.getLogger().warning("ERROR - caught a " + ex.getClass() + " in " + this.getClass()
                + "\n with message: " + ex.getMessage());
    }

    if (existingRepo) {
        CodeSearcher cs = new CodeSearcher();
        List<String> indexLocations = cs.getRepoDocuments(repoName);

        for (String file : indexLocations) {
            if (!fileLocations.contains(file)) {
                Singleton.getLogger().info("Missing from disk, removing from index " + file);
                try {
                    CodeIndexer.deleteByFileLocationFilename(file);
                } catch (IOException ex) {
                    Singleton.getLogger().warning("ERROR - caught a " + ex.getClass() + " in " + this.getClass()
                            + "\n with message: " + ex.getMessage());
                }
            }
        }
    }
}

From source file:fr.ortolang.diffusion.client.cmd.CopyCommand.java

private void copy(Path localPath, String workspace, String remotePath) {
    try {// w w w  .jav a 2  s  .  c  o m
        Files.walkFileTree(localPath, new FileVisitor<Path>() {

            @Override
            public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException {
                switch (mode) {
                case "objects":
                    String remoteDir = remotePath + localPath.getParent().relativize(dir).toString();
                    System.out.println("Copying dir " + dir + " to " + workspace + ":" + remoteDir);
                    try {
                        client.writeCollection(workspace, remoteDir, "");
                    } catch (OrtolangClientException | OrtolangClientAccountException e) {
                        e.printStackTrace();
                        errors.append("-> Unable to copy dir ").append(dir).append(" to ").append(remoteDir)
                                .append("\r\n");
                        return FileVisitResult.TERMINATE;
                    }
                }
                return FileVisitResult.CONTINUE;
            }

            @Override
            public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
                switch (mode) {
                case "objects":
                    String remoteFile = remotePath + localPath.getParent().relativize(file).toString();
                    System.out.println("Copying file " + file + " to " + workspace + ":" + remoteFile);
                    try {
                        client.writeDataObject(workspace, remoteFile, "", file.toFile(), null);
                    } catch (OrtolangClientException | OrtolangClientAccountException e) {
                        e.printStackTrace();
                        errors.append("-> Unable to copy file ").append(file).append(" to ").append(remoteFile)
                                .append("\r\n");
                        return FileVisitResult.TERMINATE;
                    }
                    break;
                case "metadata":
                    String remoteDir = remotePath
                            + localPath.getParent().relativize(file).getParent().toString();
                    System.out.println("Creating metadata file " + file + " to " + workspace + ":" + remoteDir);
                    String name = file.getFileName().toString();
                    try {
                        client.writeMetaData(workspace, remoteDir, name, null, file.toFile());
                    } catch (OrtolangClientException | OrtolangClientAccountException e) {
                        e.printStackTrace();
                        errors.append("-> Unable to copy file ").append(file).append(" to ").append(remoteDir)
                                .append("\r\n");
                        return FileVisitResult.TERMINATE;
                    }
                    break;
                }
                return FileVisitResult.CONTINUE;
            }

            @Override
            public FileVisitResult visitFileFailed(Path file, IOException exc) throws IOException {
                return FileVisitResult.CONTINUE;
            }

            @Override
            public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException {
                return FileVisitResult.CONTINUE;
            }

        });
    } catch (IOException e) {
        System.out.println("Unable to walk file tree: " + e.getMessage());
    }
}

From source file:com.upplication.s3fs.util.AmazonS3ClientMock.java

private S3Element parse(Path elem, Path bucket) throws IOException {
    S3Object object = new S3Object();

    String bucketName = bucket.getFileName().toString();
    object.setBucketName(bucketName);//from  w  w  w .  j a  v  a  2s  . c  o  m

    String key = bucket.relativize(elem).toString().replaceAll("%2F", "/");
    boolean dir = key.endsWith("/") || key.isEmpty();
    object.setKey(key);

    ObjectMetadata metadata = new ObjectMetadata();
    BasicFileAttributes attr = Files.readAttributes(elem, BasicFileAttributes.class);
    metadata.setLastModified(new Date(attr.lastAccessTime().toMillis()));
    if (dir) {
        metadata.setContentLength(0);
        object.setObjectContent(null);
    } else {
        metadata.setContentLength(attr.size());
        object.setObjectContent(new ByteArrayInputStream(Files.readAllBytes(elem)));
    }

    object.setObjectMetadata(metadata);
    AccessControlList permission = createAclPermission(elem, bucketName);

    return new S3Element(object, permission, dir);
}

From source file:de.tiqsolutions.hdfs.HadoopFileSystemProvider.java

private void remoteCopy(Path source, Path target, CopyOption... options) throws IOException {
    Configuration configuration = getConfiguration();
    Path tmp = target.getParent();
    Path dest = null;// www . ja va  2 s .c om
    do {
        dest = tmp.resolve(String.format("tmp%s/", System.currentTimeMillis()));
    } while (Files.exists(dest));
    try {
        DistCpOptions distCpOptions = new DistCpOptions(
                Arrays.asList(((HadoopFileSystemPath) source).getPath()),
                ((HadoopFileSystemPath) dest).getPath());
        List<CopyOption> optionList = Arrays.asList(options);

        distCpOptions.setOverwrite(optionList.contains(StandardCopyOption.REPLACE_EXISTING));
        try {
            DistCp distCp = new DistCp(configuration, distCpOptions);
            Job job = distCp.execute();
            job.waitForCompletion(true);
        } catch (Exception e) {
            throw new IOException(e.getLocalizedMessage(), e);
        }
        move(dest.resolve(source.getFileName()), target, options);
    } finally {
        delete(dest, false);
    }

}