List of usage examples for org.apache.maven.project MavenProject getArtifactId
public String getArtifactId()
From source file:com.soebes.maven.extensions.BuildTimeProfiler.java
License:Apache License
private ProjectKey mavenProjectToProjectKey(MavenProject project) { return new ProjectKey(project.getGroupId(), project.getArtifactId(), project.getVersion()); }
From source file:com.soebes.maven.extensions.MojoTimer.java
License:Apache License
private ProjectKey createProjectKey(MavenProject project) { return new ProjectKey(project.getGroupId(), project.getArtifactId(), project.getVersion()); }
From source file:com.sun.enterprise.module.maven.OSGiPackager.java
License:Open Source License
/** * Reads information from the POM and the artifact archive to configure * the OSGi manifest entries. Returns a new set of entries if the archive * does not already have manifest entries, else it uses the existing entries * map. If any of the attribute already exists, then * it skips its processing honoring user's request. It uses the following * rules://from w ww . j ava 2 s . c o m * * Bundle-SymbolicName is assumed to be "${groupId}.${artifactId}" * Bundle-Version is derived from "${pom.version}" * using {@link VersionTranslator#MavenToOSGi(String)} * Bundle-Description is assumed to be "${pom.description}". * Bundle-Vendor is assumed to be "${pom.organization.name}". * Require-Bundle is populated by values read from pom dependencies * Note: * There is no support for Export-Package yet. * It sets Bundle-ManifestVersion as 2 which indicates OSGi r4 bundle. * * @param pom The Maven project object * @param archive The archive that is being built * @param classesDirectory output for javac * @return Manifest entries * @throws java.io.IOException */ public Map<String, String> configureOSGiManifest(MavenProject pom, MavenArchiveConfiguration archive, File classesDirectory) throws IOException { Map<String, String> entries; if (archive != null) entries = archive.getManifestEntries(); else entries = new HashMap<String, String>(); if (entries.get(BUNDLE_MANIFESTVERSION) == null) { // 2 indicates compliance with r4, note: there is no value called 1 entries.put(BUNDLE_MANIFESTVERSION, "2"); } if (entries.get(BUNDLE_NAME) == null) { // Bundle-Name is a human readable localizable name that can contain spaces entries.put(BUNDLE_NAME, pom.getName()); } if (entries.get(BUNDLE_SYMBOLICNAME) == null) { // OSGi convention is to use reverse domain name for SymbolicName, hence use . entries.put(BUNDLE_SYMBOLICNAME, pom.getGroupId() + '.' + pom.getArtifactId()); } if (entries.get(BUNDLE_VERSION) == null) { entries.put(BUNDLE_VERSION, VersionTranslator.MavenToOSGi(pom.getVersion())); } if (entries.get(BUNDLE_DESCRIPTION) == null) { if (pom.getDescription() != null) entries.put(BUNDLE_DESCRIPTION, pom.getDescription()); } if (entries.get(BUNDLE_VENDOR) == null) { if (pom.getOrganization() != null && pom.getOrganization().getName() != null) entries.put(BUNDLE_VENDOR, pom.getOrganization().getName()); } // Handle Require-Bundle. if (entries.get(REQUIRE_BUNDLE) == null) { String requiredBundles = generateRequireBundleHeader(discoverRequiredBundles(pom)); if (requiredBundles.length() > 0) { entries.put(REQUIRE_BUNDLE, requiredBundles); } } // Handle Export-Package if (entries.get(EXPORT_PACKAGE) == null) { List<ExportedPackage> packages = discoverPackages(classesDirectory); // don't use version until we resolve split package issues in GF String exportPackages = generateExportPackageHeader(packages, null); if (exportPackages.length() > 0) { entries.put(EXPORT_PACKAGE, exportPackages); } } return entries; }
From source file:com.sun.enterprise.module.maven.Packager.java
License:Open Source License
public Map<String, String> configureManifest(MavenProject pom, MavenArchiveConfiguration archive, File classesDirectory) throws IOException { Map<String, String> entries; if (archive != null) entries = archive.getManifestEntries(); else//from w ww. jav a 2 s . c o m entries = new HashMap<String, String>(); entries.put(ManifestConstants.BUNDLE_NAME, pom.getGroupId() + '.' + pom.getArtifactId()); // check META-INF/services/xxx.ImportPolicy to fill in Import-Policy configureImportPolicy(classesDirectory, entries, ImportPolicy.class, ManifestConstants.IMPORT_POLICY); configureImportPolicy(classesDirectory, entries, LifecyclePolicy.class, ManifestConstants.LIFECYLE_POLICY); // check direct dependencies to find out dependency modules. // we don't need to list transitive dependencies here, so use getDependencyArtifacts(). TokenListBuilder dependencyModuleNames = new TokenListBuilder(); Set<String> dependencyModules = new HashSet<String>(); // used to find transitive dependencies through other modules. for (Artifact a : (Set<Artifact>) pom.getDependencyArtifacts()) { if (a.getScope() != null && a.getScope().equals("test")) continue; // http://www.nabble.com/V3-gf%3Arun-throws-NPE-tf4816802.html indicates // that some artifacts are not resolved at this point. Not sure when that could happen // so aborting with diagnostics if we find it. We need to better understand what this // means and work accordingly. - KK if (a.getFile() == null) { throw new AssertionError(a.getId() + " is not resolved. a=" + a); } Jar jar; try { jar = Jar.create(a.getFile()); } catch (IOException ioe) { // not a jar file, so continue. continue; } Manifest manifest = jar.getManifest(); String name = null; if (manifest != null) { Attributes attributes = manifest.getMainAttributes(); name = attributes.getValue(ManifestConstants.BUNDLE_NAME); } if (name != null) { // this is a hk2 module if (!a.isOptional()) dependencyModuleNames.add(name); // even optional modules need to be listed here dependencyModules.add(a.getGroupId() + '.' + a.getArtifactId() + ':' + a.getVersion()); } } // find jar files to be listed in Class-Path. This needs to include transitive // dependencies, except when the path involves a hk2 module. TokenListBuilder classPathNames = new TokenListBuilder(" "); TokenListBuilder classPathIds = new TokenListBuilder(" "); for (Artifact a : (Set<Artifact>) pom.getArtifacts()) { // check the trail. does that include hk2 module in the path? boolean throughModule = false; for (String module : dependencyModules) throughModule |= a.getDependencyTrail().get(1).toString().startsWith(module); if (throughModule) continue; // yep if (a.getScope().equals("system") || a.getScope().equals("provided") || a.getScope().equals("test")) continue; // ignore tools.jar and such dependencies. if (a.isOptional()) continue; // optional dependency classPathNames.add(stripVersion(a)); classPathIds.add(a.getId()); } if (!classPathNames.isEmpty()) { String existingClassPath = entries.get(ManifestConstants.CLASS_PATH); if (existingClassPath != null) entries.put(ManifestConstants.CLASS_PATH, existingClassPath + " " + classPathNames); else entries.put(ManifestConstants.CLASS_PATH, classPathNames.toString()); entries.put(ManifestConstants.CLASS_PATH_ID, classPathIds.toString()); } return entries; }
From source file:com.topclouders.releaseplugin.mojo.ReleaseSiteGeneratorMojo.java
License:Apache License
/** * Generate release notes for a MavenProject * /* w ww. jav a 2s. c o m*/ * @param mavenProject * @throws TemplateException * @return Return the resolved thymeleaf template */ private String generateTemplate(MavenProject mavenProject) throws MojoExecutionException { try { File inputFile = this.getInputFile(mavenProject); if (mavenProject == null || !inputFile.exists()) { getLog().warn(String.format( "Could not generate release note for artifact '%s' because the '%s' file does not exist", mavenProject.getArtifactId(), inputFilePath)); return null; } final ITemplateContext templateContext = this.prepareTempalteContext(mavenProject); String siteHtml = templateGenerator.generateSiteTemplate(templateContext); String fileName = String.format("%s.%s", mavenProject.getArtifactId(), this.ouputFileExtentsion); File outputFile = new File(this.outputDirectory.getPath() + "/" + fileName); FileUtils.writeStringToFile(outputFile, siteHtml, this.characterEncoding, false); return siteHtml; } catch (Exception e) { throw new MojoExecutionException("Failed to generate template", e); } }
From source file:com.topclouders.releaseplugin.mojo.ReleaseSiteGeneratorMojo.java
License:Apache License
/** * Prepare template context for maven project * /*from w ww . j a v a2s . c o m*/ * @param project * @return * @throws MojoExecutionException */ private ITemplateContext prepareTempalteContext(MavenProject project) throws MojoExecutionException { if (project == null) { throw new IllegalArgumentException("Failed to preapred tepmlate context because 'project' is null"); } File inputFile = this.getInputFile(project); try (FileInputStream fileInputStream = new FileInputStream(inputFile)) { Unmarshaller unmarshaller = JaxbFactory.newInstance(JAXB_CONTEXT_PATH).unmarshaller(); ChangesDocument changesDocument = unmarshaller .unmarshal(new StreamSource(fileInputStream), ChangesDocument.class).getValue(); // Prepare the evaluation context final ITemplateContext templateContext = new DefaultTemplateContext(new Locale(this.language)); templateContext.add("rootProject", this.mavenProject); templateContext.add("changesDocument", changesDocument); templateContext.add("project", project); templateContext.add("extension", this.ouputFileExtentsion); return templateContext; } catch (Exception e) { throw new MojoExecutionException( String.format("Failed to prepare template context for artifact %s", project.getArtifactId()), e); } }
From source file:com.topclouders.releaseplugin.mojo.ReleaseSiteGeneratorMojo.java
License:Apache License
/** * //from w w w .j a v a2 s.c om * @param mavenProject * @return * @throws MojoExecutionException */ private File getInputFile(MavenProject mavenProject) throws MojoExecutionException { File inputFile = new File(mavenProject.getBasedir(), this.inputFilePath); if (this.failOnMissingInputFile && (inputFile == null || !inputFile.exists())) { throw new MojoExecutionException("Input file %s does not exist for maven project %s", this.inputFilePath, mavenProject.getArtifactId()); } return inputFile; }
From source file:com.torchmind.maven.plugins.attribution.AttributionMojo.java
License:Apache License
/** * Creates an attribution object using a root artifact and its listed dependencies. * @param artifact the maven project.//from www . j av a 2s . com * @param dependencies the dependencies. * @return the attribution. */ @Nonnull public static AttributionDocument createAttribution(@Nonnull MavenProject artifact, @Nonnull List<Artifact> dependencies, @Nonnull List<Artifact> plugins) { return new AttributionDocument(artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion(), artifact.getName(), artifact.getDescription(), artifact.getUrl(), artifact.getLicenses().stream().map(AttributionMojo::createLicense).collect(Collectors.toList()), artifact.getDevelopers().stream().map(AttributionMojo::createDeveloper) .collect(Collectors.toList()), artifact.getContributors().stream().map(AttributionMojo::createDeveloper) .collect(Collectors.toList()), dependencies, plugins); }
From source file:com.torchmind.maven.plugins.attribution.AttributionMojo.java
License:Apache License
/** * Creates an artifact using a maven project. * @param artifact the maven project.//w ww .j a v a 2s . co m * @return the artifact. */ @Nonnull public static Artifact createArtifact(@Nonnull MavenProject artifact) { return new Artifact(artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion(), artifact.getName(), artifact.getDescription(), artifact.getUrl(), artifact.getLicenses().stream().map(AttributionMojo::createLicense).collect(Collectors.toList()), artifact.getDevelopers().stream().map(AttributionMojo::createDeveloper) .collect(Collectors.toList()), artifact.getContributors().stream().map(AttributionMojo::createDeveloper) .collect(Collectors.toList())); }
From source file:com.tvarit.plugin.TvaritTomcatDeployerMojo.java
License:Open Source License
@Override public void execute() throws MojoExecutionException, MojoFailureException { final MavenProject project = (MavenProject) this.getPluginContext().getOrDefault("project", null); if (templateUrl == null) try {/*from w w w .j a v a2s. c o m*/ templateUrl = new TemplateUrlMaker().makeUrl(project, "newinstance.template").toString(); } catch (MalformedURLException e) { throw new MojoExecutionException( "Could not create default url for templates. Please open an issue on github.", e); } final BasicAWSCredentials awsCredentials = new BasicAWSCredentials(accessKey, secretKey); AmazonS3Client s3Client = new AmazonS3Client(awsCredentials); final File warFile = project.getArtifact().getFile(); final String key = "deployables/" + project.getGroupId() + "/" + project.getArtifactId() + "/" + project.getVersion() + "/" + warFile.getName(); final PutObjectRequest putObjectRequest = new PutObjectRequest(bucketName, key, warFile); final ObjectMetadata metadata = new ObjectMetadata(); final HashMap<String, String> userMetadata = new HashMap<>(); userMetadata.put("project_name", projectName); userMetadata.put("stack_template_url", templateUrl); userMetadata.put("private_key_name", sshKeyName); metadata.setUserMetadata(userMetadata); putObjectRequest.withMetadata(metadata); final PutObjectResult putObjectResult = s3Client.putObject(putObjectRequest); /* AmazonCloudFormationClient amazonCloudFormationClient = new AmazonCloudFormationClient(awsCredentials); final com.amazonaws.services.cloudformation.model.Parameter projectNameParameter = new com.amazonaws.services.cloudformation.model.Parameter().withParameterKey("projectName").withParameterValue(this.projectName); final com.amazonaws.services.cloudformation.model.Parameter publicSubnetsParameter = new com.amazonaws.services.cloudformation.model.Parameter().withParameterKey("publicSubnets").withParameterValue(commaSeparatedSubnetIds); final com.amazonaws.services.cloudformation.model.Parameter tvaritRoleParameter = new com.amazonaws.services.cloudformation.model.Parameter().withParameterKey("tvaritRole").withParameterValue(tvaritRole); final com.amazonaws.services.cloudformation.model.Parameter tvaritInstanceProfileParameter = new com.amazonaws.services.cloudformation.model.Parameter().withParameterKey("tvaritInstanceProfile").withParameterValue(this.tvaritInstanceProfile); final com.amazonaws.services.cloudformation.model.Parameter tvaritBucketNameParameter = new com.amazonaws.services.cloudformation.model.Parameter().withParameterKey("bucketName").withParameterValue(this.bucketName); final com.amazonaws.services.cloudformation.model.Parameter instanceSecurityGroupIdParameter = new com.amazonaws.services.cloudformation.model.Parameter().withParameterKey("sgId").withParameterValue(this.instanceSecurityGroupId); final com.amazonaws.services.cloudformation.model.Parameter sshKeyNameParameter = new com.amazonaws.services.cloudformation.model.Parameter().withParameterKey("keyName").withParameterValue(this.sshKeyName); final String warFileUrl = s3Client.getUrl(bucketName, key).toString(); final com.amazonaws.services.cloudformation.model.Parameter warFileUrlParameter = new com.amazonaws.services.cloudformation.model.Parameter().withParameterKey("warFileUrl").withParameterValue(warFileUrl); final CreateStackRequest createStackRequest = new CreateStackRequest(); if (templateUrl == null) { try { templateUrl = new TemplateUrlMaker().makeUrl(project, "newinstance.template").toString(); } catch (MalformedURLException e) { throw new MojoExecutionException("Could not create default url for templates. Please open an issue on github.", e); } } createStackRequest. withStackName(projectName + "-instance-" + project.getVersion().replace(".", "-")). withParameters( projectNameParameter, publicSubnetsParameter, tvaritInstanceProfileParameter, tvaritRoleParameter, tvaritBucketNameParameter, instanceSecurityGroupIdParameter, warFileUrlParameter, sshKeyNameParameter ). withDisableRollback(true). withTemplateURL(templateUrl); createStackRequest.withDisableRollback(true); final Stack stack = new StackMaker().makeStack(createStackRequest, amazonCloudFormationClient, getLog()); AmazonAutoScalingClient amazonAutoScalingClient = new AmazonAutoScalingClient(awsCredentials); final AttachInstancesRequest attachInstancesRequest = new AttachInstancesRequest(); attachInstancesRequest.withInstanceIds(stack.getOutputs().get(0).getOutputValue(), stack.getOutputs().get(1).getOutputValue()).withAutoScalingGroupName(autoScalingGroupName); amazonAutoScalingClient.attachInstances(attachInstancesRequest); */ }