Example usage for org.apache.commons.io FilenameUtils removeExtension

List of usage examples for org.apache.commons.io FilenameUtils removeExtension

Introduction

In this page you can find the example usage for org.apache.commons.io FilenameUtils removeExtension.

Prototype

public static String removeExtension(String filename) 

Source Link

Document

Removes the extension from a filename.

Usage

From source file:org.alfresco.web.bean.wcm.CreateFormWizard.java

/**
 * @return the human friendly name for this form.
 *//*w w w. jav  a  2s  .c o m*/
public String getFormName() {
    return (this.formName == null && this.getSchemaFileName() != null
            ? FilenameUtils.removeExtension(this.getSchemaFileName())
            : this.formName);
}

From source file:org.alfresco.web.bean.wcm.CreateFormWizard.java

/**
 * @return the title for this form.//from   www. j  a v a 2 s .com
 */
public String getFormTitle() {
    return (this.formTitle == null && this.getSchemaFileName() != null
            ? FilenameUtils.removeExtension(this.getSchemaFileName())
            : this.formTitle);
}

From source file:org.alfresco.web.bean.wcm.CreateFormWizard.java

/**
 * @return the title for this renderingEngineTemplate.
 *///from ww w .ja va2  s.c o m
public String getRenderingEngineTemplateTitle() {
    return (this.renderingEngineTemplateTitle == null && this.getRenderingEngineTemplateFileName() != null
            ? FilenameUtils.removeExtension(this.getRenderingEngineTemplateFileName())
            : this.renderingEngineTemplateTitle);
}

From source file:org.alfresco.web.bean.wcm.CreateWebContentWizard.java

/** Overrides in order to strip an xml extension if the user entered it */
@Override//from   w w w . j ava2 s.c  o  m
public String getFileName() {
    final String result = super.getFileName();
    return (result != null && MimetypeMap.MIMETYPE_XML.equals(this.mimeType) && this.getFormName() != null
            && "xml".equals(FilenameUtils.getExtension(result).toLowerCase())
                    ? FilenameUtils.removeExtension(result)
                    : result);
}

From source file:org.ambraproject.filestore.impl.RepoObjectIDMapper.java

public String zipToFSID(String doi, String fileName) {
    doi = doi.toLowerCase();/* ww w  .ja  v  a2 s. co m*/

    Matcher m1 = p1.matcher(doi);
    if (m1.matches()) {
        String name = FilenameUtils.removeExtension(fileName);
        String ext = FilenameUtils.getExtension(fileName).toUpperCase();
        fileName = name + (ext != null && !ext.isEmpty() ? "." + ext : "");
        return m1.group(2) + "/journal." + fileName;
    }

    // TODO: handle other cases, like annotation
    return doi + "/" + fileName;
}

From source file:org.apache.ambari.server.stack.StackDirectory.java

/**
 * Parse all stack upgrade files for the stack.
 *
 * @param subDirs  stack sub directories
 * @throws AmbariException if unable to parse stack upgrade file
 *///from ww w  .  j  av a  2 s  .c om
private void parseUpgradePacks(Collection<String> subDirs) throws AmbariException {
    Map<String, UpgradePack> upgradeMap = new HashMap<String, UpgradePack>();
    if (subDirs.contains(UPGRADE_PACK_FOLDER_NAME)) {
        File f = new File(getAbsolutePath() + File.separator + UPGRADE_PACK_FOLDER_NAME);
        if (f.isDirectory()) {
            upgradesDir = f.getAbsolutePath();
            for (File upgradeFile : f.listFiles(XML_FILENAME_FILTER)) {
                try {
                    upgradeMap.put(FilenameUtils.removeExtension(upgradeFile.getName()),
                            unmarshaller.unmarshal(UpgradePack.class, upgradeFile));
                } catch (JAXBException e) {
                    throw new AmbariException(
                            "Unable to parse stack upgrade file at location: " + upgradeFile.getAbsolutePath(),
                            e);
                }
            }
        }
    }

    if (upgradesDir == null) {
        LOG.info("Stack '{}' doesn't contain an upgrade directory ", getPath());
    }

    if (!upgradeMap.isEmpty()) {
        upgradePacks = upgradeMap;
    }
}

From source file:org.apache.archiva.rest.services.DefaultRepositoriesService.java

@Override
public Boolean copyArtifact(ArtifactTransferRequest artifactTransferRequest)
        throws ArchivaRestServiceException {
    // check parameters
    String userName = getAuditInformation().getUser().getUsername();
    if (StringUtils.isBlank(userName)) {
        throw new ArchivaRestServiceException("copyArtifact call: userName not found", null);
    }/*  w  w  w  . j av a  2 s .  c o  m*/

    if (StringUtils.isBlank(artifactTransferRequest.getRepositoryId())) {
        throw new ArchivaRestServiceException("copyArtifact call: sourceRepositoryId cannot be null", null);
    }

    if (StringUtils.isBlank(artifactTransferRequest.getTargetRepositoryId())) {
        throw new ArchivaRestServiceException("copyArtifact call: targetRepositoryId cannot be null", null);
    }

    ManagedRepository source = null;
    try {
        source = managedRepositoryAdmin.getManagedRepository(artifactTransferRequest.getRepositoryId());
    } catch (RepositoryAdminException e) {
        throw new ArchivaRestServiceException(e.getMessage(), e);
    }

    if (source == null) {
        throw new ArchivaRestServiceException(
                "cannot find repository with id " + artifactTransferRequest.getRepositoryId(), null);
    }

    ManagedRepository target = null;
    try {
        target = managedRepositoryAdmin.getManagedRepository(artifactTransferRequest.getTargetRepositoryId());
    } catch (RepositoryAdminException e) {
        throw new ArchivaRestServiceException(e.getMessage(), e);
    }

    if (target == null) {
        throw new ArchivaRestServiceException(
                "cannot find repository with id " + artifactTransferRequest.getTargetRepositoryId(), null);
    }

    if (StringUtils.isBlank(artifactTransferRequest.getGroupId())) {
        throw new ArchivaRestServiceException("groupId is mandatory", null);
    }

    if (StringUtils.isBlank(artifactTransferRequest.getArtifactId())) {
        throw new ArchivaRestServiceException("artifactId is mandatory", null);
    }

    if (StringUtils.isBlank(artifactTransferRequest.getVersion())) {
        throw new ArchivaRestServiceException("version is mandatory", null);
    }

    if (VersionUtil.isSnapshot(artifactTransferRequest.getVersion())) {
        throw new ArchivaRestServiceException("copy of SNAPSHOT not supported", null);
    }

    // end check parameters

    User user = null;
    try {
        user = securitySystem.getUserManager().findUser(userName);
    } catch (UserNotFoundException e) {
        throw new ArchivaRestServiceException("user " + userName + " not found", e);
    } catch (UserManagerException e) {
        throw new ArchivaRestServiceException("ArchivaRestServiceException:" + e.getMessage(), e);
    }

    // check karma on source : read
    AuthenticationResult authn = new AuthenticationResult(true, userName, null);
    SecuritySession securitySession = new DefaultSecuritySession(authn, user);
    try {
        boolean authz = securitySystem.isAuthorized(securitySession,
                ArchivaRoleConstants.OPERATION_REPOSITORY_ACCESS, artifactTransferRequest.getRepositoryId());
        if (!authz) {
            throw new ArchivaRestServiceException(
                    "not authorized to access repo:" + artifactTransferRequest.getRepositoryId(), null);
        }
    } catch (AuthorizationException e) {
        log.error("error reading permission: " + e.getMessage(), e);
        throw new ArchivaRestServiceException(e.getMessage(), e);
    }

    // check karma on target: write
    try {
        boolean authz = securitySystem.isAuthorized(securitySession,
                ArchivaRoleConstants.OPERATION_REPOSITORY_UPLOAD,
                artifactTransferRequest.getTargetRepositoryId());
        if (!authz) {
            throw new ArchivaRestServiceException(
                    "not authorized to write to repo:" + artifactTransferRequest.getTargetRepositoryId(), null);
        }
    } catch (AuthorizationException e) {
        log.error("error reading permission: " + e.getMessage(), e);
        throw new ArchivaRestServiceException(e.getMessage(), e);
    }

    // sounds good we can continue !

    ArtifactReference artifactReference = new ArtifactReference();
    artifactReference.setArtifactId(artifactTransferRequest.getArtifactId());
    artifactReference.setGroupId(artifactTransferRequest.getGroupId());
    artifactReference.setVersion(artifactTransferRequest.getVersion());
    artifactReference.setClassifier(artifactTransferRequest.getClassifier());
    String packaging = StringUtils.trim(artifactTransferRequest.getPackaging());
    artifactReference.setType(StringUtils.isEmpty(packaging) ? "jar" : packaging);

    try {

        ManagedRepositoryContent sourceRepository = repositoryFactory
                .getManagedRepositoryContent(artifactTransferRequest.getRepositoryId());

        String artifactSourcePath = sourceRepository.toPath(artifactReference);

        if (StringUtils.isEmpty(artifactSourcePath)) {
            log.error("cannot find artifact " + artifactTransferRequest.toString());
            throw new ArchivaRestServiceException("cannot find artifact " + artifactTransferRequest.toString(),
                    null);
        }

        File artifactFile = new File(source.getLocation(), artifactSourcePath);

        if (!artifactFile.exists()) {
            log.error("cannot find artifact " + artifactTransferRequest.toString());
            throw new ArchivaRestServiceException("cannot find artifact " + artifactTransferRequest.toString(),
                    null);
        }

        ManagedRepositoryContent targetRepository = repositoryFactory
                .getManagedRepositoryContent(artifactTransferRequest.getTargetRepositoryId());

        String artifactPath = targetRepository.toPath(artifactReference);

        int lastIndex = artifactPath.lastIndexOf('/');

        String path = artifactPath.substring(0, lastIndex);
        File targetPath = new File(target.getLocation(), path);

        Date lastUpdatedTimestamp = Calendar.getInstance().getTime();
        int newBuildNumber = 1;
        String timestamp = null;

        File versionMetadataFile = new File(targetPath, MetadataTools.MAVEN_METADATA);
        /* unused */ getMetadata(versionMetadataFile);

        if (!targetPath.exists()) {
            targetPath.mkdirs();
        }

        String filename = artifactPath.substring(lastIndex + 1);

        boolean fixChecksums = !(archivaAdministration.getKnownContentConsumers()
                .contains("create-missing-checksums"));

        File targetFile = new File(targetPath, filename);
        if (targetFile.exists() && target.isBlockRedeployments()) {
            throw new ArchivaRestServiceException("artifact already exists in target repo: "
                    + artifactTransferRequest.getTargetRepositoryId() + " and redeployment blocked", null);
        } else {
            copyFile(artifactFile, targetPath, filename, fixChecksums);
            queueRepositoryTask(target.getId(), targetFile);
        }

        // copy source pom to target repo
        String pomFilename = filename;
        if (StringUtils.isNotBlank(artifactTransferRequest.getClassifier())) {
            pomFilename = StringUtils.remove(pomFilename, "-" + artifactTransferRequest.getClassifier());
        }
        pomFilename = FilenameUtils.removeExtension(pomFilename) + ".pom";

        File pomFile = new File(
                new File(source.getLocation(), artifactSourcePath.substring(0, artifactPath.lastIndexOf('/'))),
                pomFilename);

        if (pomFile != null && pomFile.length() > 0) {
            copyFile(pomFile, targetPath, pomFilename, fixChecksums);
            queueRepositoryTask(target.getId(), new File(targetPath, pomFilename));

        }

        // explicitly update only if metadata-updater consumer is not enabled!
        if (!archivaAdministration.getKnownContentConsumers().contains("metadata-updater")) {
            updateProjectMetadata(targetPath.getAbsolutePath(), lastUpdatedTimestamp, timestamp, newBuildNumber,
                    fixChecksums, artifactTransferRequest);

        }

        String msg = "Artifact \'" + artifactTransferRequest.getGroupId() + ":"
                + artifactTransferRequest.getArtifactId() + ":" + artifactTransferRequest.getVersion()
                + "\' was successfully deployed to repository \'"
                + artifactTransferRequest.getTargetRepositoryId() + "\'";
        log.debug("copyArtifact {}", msg);

    } catch (RepositoryException e) {
        log.error("RepositoryException: " + e.getMessage(), e);
        throw new ArchivaRestServiceException(e.getMessage(), e);
    } catch (RepositoryAdminException e) {
        log.error("RepositoryAdminException: " + e.getMessage(), e);
        throw new ArchivaRestServiceException(e.getMessage(), e);
    } catch (IOException e) {
        log.error("IOException: " + e.getMessage(), e);
        throw new ArchivaRestServiceException(e.getMessage(), e);
    }
    return true;
}

From source file:org.apache.archiva.web.api.DefaultFileUploadService.java

protected void savePomFile(String repositoryId, FileMetadata fileMetadata, String groupId, String artifactId,
        String version, String packaging) throws ArchivaRestServiceException {

    try {//from  ww w  .j a v  a 2  s . c o m
        boolean fixChecksums = !(archivaAdministration.getKnownContentConsumers()
                .contains("create-missing-checksums"));

        ManagedRepository repoConfig = managedRepositoryAdmin.getManagedRepository(repositoryId);

        ArtifactReference artifactReference = new ArtifactReference();
        artifactReference.setArtifactId(artifactId);
        artifactReference.setGroupId(groupId);
        artifactReference.setVersion(version);
        artifactReference.setClassifier(fileMetadata.getClassifier());
        artifactReference.setType(packaging);

        ManagedRepositoryContent repository = repositoryFactory.getManagedRepositoryContent(repositoryId);

        String artifactPath = repository.toPath(artifactReference);

        int lastIndex = artifactPath.lastIndexOf('/');

        String path = artifactPath.substring(0, lastIndex);
        File targetPath = new File(repoConfig.getLocation(), path);

        String pomFilename = artifactPath.substring(lastIndex + 1);
        if (StringUtils.isNotEmpty(fileMetadata.getClassifier())) {
            pomFilename = StringUtils.remove(pomFilename, "-" + fileMetadata.getClassifier());
        }
        pomFilename = FilenameUtils.removeExtension(pomFilename) + ".pom";

        copyFile(new File(fileMetadata.getServerFileName()), targetPath, pomFilename, fixChecksums);
        triggerAuditEvent(repoConfig.getId(), path + "/" + pomFilename, AuditEvent.UPLOAD_FILE);
        queueRepositoryTask(repoConfig.getId(), new File(targetPath, pomFilename));
    } catch (IOException ie) {
        throw new ArchivaRestServiceException("Error encountered while uploading pom file: " + ie.getMessage(),
                Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), ie);
    } catch (RepositoryException rep) {
        throw new ArchivaRestServiceException("Repository exception: " + rep.getMessage(),
                Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), rep);
    } catch (RepositoryAdminException e) {
        throw new ArchivaRestServiceException("RepositoryAdmin exception: " + e.getMessage(),
                Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), e);
    }
}

From source file:org.apache.archiva.web.api.DefaultFileUploadService.java

protected void saveFile(String repositoryId, FileMetadata fileMetadata, boolean generatePom, String groupId,
        String artifactId, String version, String packaging) throws ArchivaRestServiceException {
    try {//from  w ww.  ja  va 2s  .c  o m

        ManagedRepository repoConfig = managedRepositoryAdmin.getManagedRepository(repositoryId);

        ArtifactReference artifactReference = new ArtifactReference();
        artifactReference.setArtifactId(artifactId);
        artifactReference.setGroupId(groupId);
        artifactReference.setVersion(version);
        artifactReference.setClassifier(fileMetadata.getClassifier());
        artifactReference.setType(
                StringUtils.isEmpty(fileMetadata.getPackaging()) ? packaging : fileMetadata.getPackaging());

        ManagedRepositoryContent repository = repositoryFactory.getManagedRepositoryContent(repositoryId);

        String artifactPath = repository.toPath(artifactReference);

        int lastIndex = artifactPath.lastIndexOf('/');

        String path = artifactPath.substring(0, lastIndex);
        File targetPath = new File(repoConfig.getLocation(), path);

        log.debug("artifactPath: {} found targetPath: {}", artifactPath, targetPath);

        Date lastUpdatedTimestamp = Calendar.getInstance().getTime();
        int newBuildNumber = -1;
        String timestamp = null;

        File versionMetadataFile = new File(targetPath, MetadataTools.MAVEN_METADATA);
        ArchivaRepositoryMetadata versionMetadata = getMetadata(versionMetadataFile);

        if (VersionUtil.isSnapshot(version)) {
            TimeZone timezone = TimeZone.getTimeZone("UTC");
            DateFormat fmt = new SimpleDateFormat("yyyyMMdd.HHmmss");
            fmt.setTimeZone(timezone);
            timestamp = fmt.format(lastUpdatedTimestamp);
            if (versionMetadata.getSnapshotVersion() != null) {
                newBuildNumber = versionMetadata.getSnapshotVersion().getBuildNumber() + 1;
            } else {
                newBuildNumber = 1;
            }
        }

        if (!targetPath.exists()) {
            targetPath.mkdirs();
        }

        String filename = artifactPath.substring(lastIndex + 1);
        if (VersionUtil.isSnapshot(version)) {
            filename = filename.replaceAll(VersionUtil.SNAPSHOT, timestamp + "-" + newBuildNumber);
        }

        boolean fixChecksums = !(archivaAdministration.getKnownContentConsumers()
                .contains("create-missing-checksums"));

        try {
            File targetFile = new File(targetPath, filename);
            if (targetFile.exists() && !VersionUtil.isSnapshot(version) && repoConfig.isBlockRedeployments()) {
                throw new ArchivaRestServiceException("Overwriting released artifacts in repository '"
                        + repoConfig.getId() + "' is not allowed.", Response.Status.BAD_REQUEST.getStatusCode(),
                        null);
            } else {
                copyFile(new File(fileMetadata.getServerFileName()), targetPath, filename, fixChecksums);
                triggerAuditEvent(repository.getId(), path + "/" + filename, AuditEvent.UPLOAD_FILE);
                queueRepositoryTask(repository.getId(), targetFile);
            }
        } catch (IOException ie) {
            log.error("IOException copying file: {}", ie.getMessage(), ie);
            throw new ArchivaRestServiceException(
                    "Overwriting released artifacts in repository '" + repoConfig.getId() + "' is not allowed.",
                    Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), ie);
        }

        if (generatePom) {
            String pomFilename = filename;
            if (StringUtils.isNotEmpty(fileMetadata.getClassifier())) {
                pomFilename = StringUtils.remove(pomFilename, "-" + fileMetadata.getClassifier());
            }
            pomFilename = FilenameUtils.removeExtension(pomFilename) + ".pom";

            try {
                File generatedPomFile = createPom(targetPath, pomFilename, fileMetadata, groupId, artifactId,
                        version, packaging);
                triggerAuditEvent(repoConfig.getId(), path + "/" + pomFilename, AuditEvent.UPLOAD_FILE);
                if (fixChecksums) {
                    fixChecksums(generatedPomFile);
                }
                queueRepositoryTask(repoConfig.getId(), generatedPomFile);
            } catch (IOException ie) {
                throw new ArchivaRestServiceException(
                        "Error encountered while writing pom file: " + ie.getMessage(),
                        Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), ie);
            }
        }

        // explicitly update only if metadata-updater consumer is not enabled!
        if (!archivaAdministration.getKnownContentConsumers().contains("metadata-updater")) {
            updateProjectMetadata(targetPath.getAbsolutePath(), lastUpdatedTimestamp, timestamp, newBuildNumber,
                    fixChecksums, fileMetadata, groupId, artifactId, version, packaging);

            if (VersionUtil.isSnapshot(version)) {
                updateVersionMetadata(versionMetadata, versionMetadataFile, lastUpdatedTimestamp, timestamp,
                        newBuildNumber, fixChecksums, fileMetadata, groupId, artifactId, version, packaging);
            }
        }
    } catch (RepositoryNotFoundException re) {
        throw new ArchivaRestServiceException("Target repository cannot be found: " + re.getMessage(),
                Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), re);
    } catch (RepositoryException rep) {
        throw new ArchivaRestServiceException("Repository exception: " + rep.getMessage(),
                Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), rep);
    } catch (RepositoryAdminException e) {
        throw new ArchivaRestServiceException("RepositoryAdmin exception: " + e.getMessage(),
                Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(), e);
    }
}

From source file:org.apache.brooklyn.entity.database.mysql.InitSlaveTaskBody.java

private void bootstrapSlaveAsync(final Future<ReplicationSnapshot> replicationInfoFuture,
        final MySqlNode slave) {
    DynamicTasks.queue("bootstrap slave replication", new Runnable() {
        @Override//from  ww w . j a va  2  s . co m
        public void run() {
            ReplicationSnapshot replicationSnapshot;
            try {
                replicationSnapshot = replicationInfoFuture.get();
            } catch (InterruptedException | ExecutionException e) {
                throw Exceptions.propagate(e);
            }

            MySqlNode master = getMaster();
            String masterAddress = MySqlClusterUtils
                    .validateSqlParam(master.getAttribute(MySqlNode.SUBNET_ADDRESS));
            Integer masterPort = master.getAttribute(MySqlNode.MYSQL_PORT);
            String slaveAddress = MySqlClusterUtils
                    .validateSqlParam(slave.getAttribute(MySqlNode.SUBNET_ADDRESS));
            String username = MySqlClusterUtils
                    .validateSqlParam(cluster.getConfig(MySqlCluster.SLAVE_USERNAME));
            String password = MySqlClusterUtils
                    .validateSqlParam(cluster.getAttribute(MySqlCluster.SLAVE_PASSWORD));

            if (replicationSnapshot.getEntityId() != null) {
                Entity sourceEntity = Iterables.find(cluster.getMembers(),
                        EntityPredicates.idEqualTo(replicationSnapshot.getEntityId()));
                String dumpId = FilenameUtils.removeExtension(replicationSnapshot.getSnapshotPath());
                copyDumpAsync(sourceEntity, slave, replicationSnapshot.getSnapshotPath(), dumpId);
                DynamicTasks.queue(Effectors.invocation(slave, MySqlNode.IMPORT_DUMP,
                        ImmutableMap.of("path", replicationSnapshot.getSnapshotPath())));
                //The dump resets the password to whatever is on the source instance, reset it back.
                //We are able to still login because privileges are not flushed, so we just set the password to the same value.
                DynamicTasks.queue(Effectors.invocation(slave, MySqlNode.CHANGE_PASSWORD,
                        ImmutableMap.of("password", slave.getAttribute(MySqlNode.PASSWORD)))); //
                //Flush privileges to load new users coming from the dump
                MySqlClusterUtils.executeSqlOnNodeAsync(slave, "FLUSH PRIVILEGES;");
            }

            MySqlClusterUtils.executeSqlOnNodeAsync(master,
                    String.format(
                            "CREATE USER '%s'@'%s' IDENTIFIED BY '%s';\n"
                                    + "GRANT REPLICATION SLAVE ON *.* TO '%s'@'%s';\n",
                            username, slaveAddress, password, username, slaveAddress));

            // Executing this will unblock SERVICE_UP wait in the start effector
            String slaveCmd = String.format(
                    "CHANGE MASTER TO " + "MASTER_HOST='%s', " + "MASTER_PORT=%d, " + "MASTER_USER='%s', "
                            + "MASTER_PASSWORD='%s', " + "MASTER_LOG_FILE='%s', " + "MASTER_LOG_POS=%d;\n"
                            + "START SLAVE;\n",
                    masterAddress, masterPort, username, password, replicationSnapshot.getBinLogName(),
                    replicationSnapshot.getBinLogPosition());
            MySqlClusterUtils.executeSqlOnNodeAsync(slave, slaveCmd);
        }
    });
}