List of usage examples for org.apache.maven.project MavenProject getBuild
public Build getBuild()
From source file:org.opendaylight.yangtools.yang2sources.plugin.GeneratedDirectories.java
License:Open Source License
public GeneratedDirectories(MavenProject project) { this.targetGeneratedSources = new File(project.getBuild().getDirectory(), "generated-sources"); }
From source file:org.opendaylight.yangtools.yang2sources.plugin.YangProvider.java
License:Open Source License
void addYangsToMetaInf(final MavenProject project, final File yangFilesRootDir, final Collection<File> excludedFiles) throws MojoFailureException { // copy project's src/main/yang/*.yang to target/generated-sources/yang/META-INF/yang/*.yang File generatedYangDir = new File(project.getBasedir(), CodeGeneratorArg.YANG_GENERATED_DIR); addYangsToMetaInf(project, yangFilesRootDir, excludedFiles, generatedYangDir); // Also copy to the actual build output dir if different than "target". When running in // Eclipse this can differ (eg "target-ide"). File actualGeneratedYangDir = new File(project.getBuild().getDirectory(), CodeGeneratorArg.YANG_GENERATED_DIR.replace("target" + File.separator, "")); if (!actualGeneratedYangDir.equals(generatedYangDir)) { addYangsToMetaInf(project, yangFilesRootDir, excludedFiles, actualGeneratedYangDir); }/*from ww w .ja va2 s.com*/ }
From source file:org.openehealth.ipf.labs.maven.dsldoc.DslIndexReport.java
License:Apache License
protected void executeReport(Locale arg0) throws MavenReportException { StringBuilder mergedContent = new StringBuilder(); for (MavenProject subProject : reactorProjects) { String buildDir = subProject.getBuild().getDirectory(); String dslIndexPath = buildDir + "/dslindex"; File dslIndexFile = new File(dslIndexPath); if (dslIndexFile.exists()) { getLog().info("Merging DSL index file: " + dslIndexPath); try { String content = FileUtils.readFileToString(dslIndexFile); mergedContent.append(content); } catch (IOException e) { throw new MavenReportException("Cannot read file for report: " + dslIndexPath, e); }/*from w w w . ja v a 2 s .co m*/ } } getSink().rawText("<html><head><title>" + projectName + " DSL extensions index</title></head><body>"); getSink().rawText(mergedContent.toString()); getSink().rawText("</body></html>"); getSink().flush(); }
From source file:org.opennms.maven.plugins.tgz.AbstractAssemblyMojo.java
License:Apache License
private void processModules(Archiver archiver, List moduleSets, boolean includeBaseDirectory) throws MojoFailureException, MojoExecutionException { for (Iterator i = moduleSets.iterator(); i.hasNext();) { ModuleSet moduleSet = (ModuleSet) i.next(); AndArtifactFilter filter = new AndArtifactFilter(); if (!moduleSet.getIncludes().isEmpty()) { filter.add(new AssemblyIncludesArtifactFilter(moduleSet.getIncludes())); }/*from ww w . j a v a 2 s . co m*/ if (!moduleSet.getExcludes().isEmpty()) { filter.add(new AssemblyExcludesArtifactFilter(moduleSet.getExcludes())); } Set set = getModulesFromReactor(getExecutedProject()); List moduleFileSets = new ArrayList(); for (Iterator j = set.iterator(); j.hasNext();) { MavenProject moduleProject = (MavenProject) j.next(); if (filter.include(moduleProject.getArtifact())) { String name = moduleProject.getBuild().getFinalName(); ModuleSources sources = moduleSet.getSources(); if (sources != null) { String output = sources.getOutputDirectory(); output = getOutputDirectory(output, moduleProject, includeBaseDirectory); FileSet moduleFileSet = new FileSet(); moduleFileSet.setDirectory(moduleProject.getBasedir().getAbsolutePath()); moduleFileSet.setOutputDirectory(output); List excludesList = new ArrayList(); excludesList.add(PathUtils.toRelative(moduleProject.getBasedir(), moduleProject.getBuild().getDirectory()) + "/**"); excludesList.add(PathUtils.toRelative(moduleProject.getBasedir(), moduleProject.getBuild().getOutputDirectory()) + "/**"); excludesList.add(PathUtils.toRelative(moduleProject.getBasedir(), moduleProject.getBuild().getTestOutputDirectory()) + "/**"); excludesList.add(PathUtils.toRelative(moduleProject.getBasedir(), moduleProject.getReporting().getOutputDirectory()) + "/**"); moduleFileSet.setExcludes(excludesList); moduleFileSets.add(moduleFileSet); } ModuleBinaries binaries = moduleSet.getBinaries(); if (binaries != null) { Artifact artifact = moduleProject.getArtifact(); if (artifact.getFile() == null) { throw new MojoExecutionException("Included module: " + moduleProject.getId() + " does not have an artifact with a file. Please ensure the package phase is run before the assembly is generated."); } String output = binaries.getOutputDirectory(); output = getOutputDirectory(output, moduleProject, includeBaseDirectory); archiver.setDefaultDirectoryMode(Integer.parseInt(binaries.getDirectoryMode(), 8)); archiver.setDefaultFileMode(Integer.parseInt(binaries.getFileMode(), 8)); getLog().debug("ModuleSet[" + output + "]" + " dir perms: " + Integer.toString(archiver.getDefaultDirectoryMode(), 8) + " file perms: " + Integer.toString(archiver.getDefaultFileMode(), 8)); if (binaries.isUnpack()) { // TODO: something like zipfileset in plexus-archiver // archiver.addJar( ) // TODO refactor into the AbstractUnpackMojo File tempLocation = new File(workDirectory, name); boolean process = false; if (!tempLocation.exists()) { tempLocation.mkdirs(); process = true; } else if (artifact.getFile().lastModified() > tempLocation.lastModified()) { process = true; } if (process) { try { unpack(artifact.getFile(), tempLocation); if (binaries.isIncludeDependencies()) { Set artifactSet = moduleProject.getArtifacts(); for (Iterator artifacts = artifactSet.iterator(); artifacts.hasNext();) { Artifact dependencyArtifact = (Artifact) artifacts.next(); unpack(dependencyArtifact.getFile(), tempLocation); } } } catch (NoSuchArchiverException e) { throw new MojoExecutionException( "Unable to obtain unarchiver: " + e.getMessage(), e); } /* * If the assembly is 'jar-with-dependencies', remove the security files in all dependencies * that will prevent the uberjar to execute. Please see MASSEMBLY-64 for details. */ if (archiver instanceof JarArchiver) { String[] securityFiles = { "*.RSA", "*.DSA", "*.SF", "*.rsa", "*.dsa", "*.sf" }; org.apache.maven.shared.model.fileset.FileSet securityFileSet = new org.apache.maven.shared.model.fileset.FileSet(); securityFileSet.setDirectory(tempLocation.getAbsolutePath() + "/META-INF/"); for (int sfsi = 0; sfsi < securityFiles.length; sfsi++) { securityFileSet.addInclude(securityFiles[sfsi]); } FileSetManager fsm = new FileSetManager(getLog()); try { fsm.delete(securityFileSet); } catch (IOException e) { throw new MojoExecutionException( "Failed to delete security files: " + e.getMessage(), e); } } } addDirectory(archiver, tempLocation, output, null, FileUtils.getDefaultExcludesAsList()); } else { try { String outputFileNameMapping = binaries.getOutputFileNameMapping(); archiver.addFile(artifact.getFile(), output + evaluateFileNameMapping(artifact, outputFileNameMapping)); if (binaries.isIncludeDependencies()) { Set artifactSet = moduleProject.getArtifacts(); for (Iterator artifacts = artifactSet.iterator(); artifacts.hasNext();) { Artifact dependencyArtifact = (Artifact) artifacts.next(); archiver.addFile(dependencyArtifact.getFile(), output + evaluateFileNameMapping(dependencyArtifact, outputFileNameMapping)); } } } catch (ArchiverException e) { throw new MojoExecutionException("Error adding file to archive: " + e.getMessage(), e); } } } } else { // would be better to have a way to find out when a specified include or exclude // is never triggered and warn() it. getLog().debug("module: " + moduleProject.getId() + " not included"); } if (!moduleFileSets.isEmpty()) { // TODO: includes and excludes processFileSets(archiver, moduleFileSets, includeBaseDirectory); } } } }
From source file:org.opennms.maven.plugins.tgz.AbstractAssemblyMojo.java
License:Apache License
private String getOutputDirectory(String output, MavenProject project, boolean includeBaseDirectory) { String value = output;//ww w. j av a 2s . c o m if (value == null) { value = ""; } if (!value.endsWith("/") && !value.endsWith("\\")) { // TODO: shouldn't archiver do this? value += '/'; } if (includeBaseDirectory) { if (value.startsWith("/")) { value = finalName + value; } else { value = finalName + "/" + value; } } else { if (value.startsWith("/")) { value = value.substring(1); } } if (project != null) { value = StringUtils.replace(value, "${groupId}", project.getGroupId()); value = StringUtils.replace(value, "${artifactId}", project.getArtifactId()); value = StringUtils.replace(value, "${version}", project.getVersion()); Build build = project.getBuild(); value = StringUtils.replace(value, "${build.finalName}", build.getFinalName()); value = StringUtils.replace(value, "${finalName}", build.getFinalName()); } return value; }
From source file:org.openspaces.maven.plugin.DeployPUMojo.java
License:Apache License
/** * executes the mojo./*from w w w. j a v a 2 s . c om*/ */ public void executeMojo() throws MojoExecutionException, MojoFailureException { // Remove white spaces from ClassLoader's URLs ClassLoader currentCL = Thread.currentThread().getContextClassLoader(); try { Utils.changeClassLoaderToSupportWhiteSpacesRepository(currentCL); } catch (Exception e) { PluginLog.getLog().info("Unable to update ClassLoader. Proceeding with processing unit invocation.", e); } Utils.handleSecurity(); // get a list of project to execute in the order set by the reactor List projects = Utils.getProjectsToExecute(reactorProjects, module); for (Iterator projIt = projects.iterator(); projIt.hasNext();) { MavenProject proj = (MavenProject) projIt.next(); PluginLog.getLog().info("Deploying processing unit: " + proj.getBuild().getFinalName()); String[] attributesArray = createAttributesArray(Utils.getProcessingUnitJar(proj)); try { Class deployClass = Class.forName("org.openspaces.pu.container.servicegrid.deploy.Deploy", true, Thread.currentThread().getContextClassLoader()); deployClass.getMethod("main", new Class[] { String[].class }).invoke(null, new Object[] { attributesArray }); } catch (InvocationTargetException e) { throw new MojoExecutionException(e.getTargetException().getMessage(), e.getTargetException()); } catch (Exception e) { throw new MojoExecutionException(e.getMessage(), e); } } }
From source file:org.openspaces.maven.plugin.RunPUMojo.java
License:Apache License
/** * Prepares and executes the PU.//from ww w . j a v a 2 s . c om * * @throws MojoExecutionException * @throws MojoFailureException */ private void executePU(MavenProject project) throws MojoExecutionException, MojoFailureException { if (project == null || !project.getPackaging().equalsIgnoreCase("jar")) { throw new MojoExecutionException("The processing unit project '" + (project == null ? "unknown" : project.getName()) + "' must be of type jar (packaging=jar)."); } // run the PU PluginLog.getLog().info("Running processing unit: " + project.getBuild().getFinalName()); // resolve the classpath for the execution of the processing unit List classpath = null; ClassLoader classLoader = null; try { String[] includeScopes = Utils.convertCommaSeparatedListToArray(scopes); classpath = Utils.resolveExecutionClasspath(project, includeScopes, true, reactorProjects, dependencyTreeBuilder, metadataSource, artifactCollector, artifactResolver, artifactFactory, localRepository, remoteRepositories); PluginLog.getLog().info("Processing unit [" + project.getName() + "] classpath: " + classpath); classLoader = Utils.createClassLoader(classpath, null); } catch (Exception e1) { throw new MojoExecutionException("Failed to resolve the processing unit's classpath", e1); } // set groups if (groups != null && !groups.trim().equals("")) { SystemInfo.singleton().lookup().setGroups(groups); } // set locators if (locators != null && !locators.trim().equals("")) { SystemInfo.singleton().lookup().setLocators(locators); } // execute the processing unit in the new class loader ContainerRunnable conatinerRunnable = new ContainerRunnable( "org.openspaces.pu.container.integrated.IntegratedProcessingUnitContainer", createAttributesArray()); Thread thread = new Thread(conatinerRunnable, "Processing Unit [" + project.getBuild().getFinalName() + "]"); thread.setContextClassLoader(classLoader); thread.start(); while (!conatinerRunnable.hasStarted()) { try { Thread.sleep(200); } catch (InterruptedException e) { } } if (conatinerRunnable.getException() != null) { Utils.throwMissingLicenseException(conatinerRunnable.getException(), localRepository); throw new MojoExecutionException( "Failed to start processing unit [" + project.getBuild().getFinalName() + "]", conatinerRunnable.getException()); } containers.add(thread); }
From source file:org.openspaces.maven.plugin.RunStandalonePUMojo.java
License:Apache License
/** * Prepares and executes the PU./* w ww .j a va 2s. c o m*/ * * @throws MojoExecutionException * @throws MojoFailureException */ private void executePU(MavenProject project) throws MojoExecutionException, MojoFailureException { if (project == null || !project.getPackaging().equalsIgnoreCase("jar")) { throw new MojoExecutionException("The processing unit project '" + (project == null ? "unknown" : project.getName()) + "' must be of type jar (packaging=jar)."); } // resolve the classpath for the execution of the processing unit List<URL> classpath; ClassLoader classLoader; try { String[] includeScopes = Utils.convertCommaSeparatedListToArray(scopes); classpath = Utils.resolveExecutionClasspath(project, includeScopes, false, reactorProjects, dependencyTreeBuilder, metadataSource, artifactCollector, artifactResolver, artifactFactory, localRepository, remoteRepositories); PluginLog.getLog().info("Processing unit [" + project.getName() + "] classpath: " + classpath); classLoader = Utils.createClassLoader(classpath, null); } catch (Exception e1) { throw new MojoExecutionException("Failed to resolve the processing unit's classpath", e1); } if (groups != null && !groups.trim().equals("")) { SystemInfo.singleton().lookup().setGroups(groups); } if (locators != null && !locators.trim().equals("")) { SystemInfo.singleton().lookup().setLocators(locators); } // run the PU PluginLog.getLog().info("Running processing unit: " + project.getBuild().getFinalName()); ContainerRunnable conatinerRunnable = new ContainerRunnable( "org.openspaces.pu.container.standalone.StandaloneProcessingUnitContainer", createAttributesArray(Utils.getProcessingUnitJar((project)))); Thread thread = new Thread(conatinerRunnable, "Processing Unit [" + project.getBuild().getFinalName() + "]"); thread.setContextClassLoader(classLoader); thread.start(); while (!conatinerRunnable.hasStarted()) { try { Thread.sleep(200); } catch (InterruptedException e) { } } if (conatinerRunnable.getException() != null) { Utils.throwMissingLicenseException(conatinerRunnable.getException(), localRepository); throw new MojoExecutionException( "Failed to start processing unit [" + project.getBuild().getFinalName() + "]", conatinerRunnable.getException()); } containers.add(thread); }
From source file:org.openspaces.maven.plugin.UndeployPUMojo.java
License:Apache License
/** * executes the mojo./* ww w .j a va 2 s .c o m*/ */ public void executeMojo() throws MojoExecutionException, MojoFailureException { Utils.handleSecurity(); // get a list of project to execute in the order set by the reactor List projects = Utils.getProjectsToExecute(reactorProjects, module); // in undeploy reverse the order of projects Collections.reverse(projects); int failureCount = 0; Throwable lastException = null; for (Iterator projIt = projects.iterator(); projIt.hasNext();) { MavenProject proj = (MavenProject) projIt.next(); PluginLog.getLog().info("Undeploying processing unit: " + proj.getBuild().getFinalName()); String[] attributesArray = createAttributesArray(proj.getBuild().getFinalName()); try { Class deployClass = Class.forName("org.openspaces.pu.container.servicegrid.deploy.Undeploy", true, Thread.currentThread().getContextClassLoader()); deployClass.getMethod("main", new Class[] { String[].class }).invoke(null, new Object[] { attributesArray }); } catch (InvocationTargetException e) { lastException = e.getTargetException(); failureCount++; PluginLog.getLog().info("Failed to undeploy processing unit: " + proj.getBuild().getFinalName() + " reason: " + e.getTargetException().getMessage()); } catch (Exception e) { lastException = e; PluginLog.getLog().info("Failed to undeploy processing unit: " + proj.getBuild().getFinalName() + " reason: " + e.getMessage()); } } if (failureCount == projects.size() && lastException != null) { throw new MojoExecutionException(lastException.getMessage(), lastException); } }
From source file:org.openspaces.maven.plugin.Utils.java
License:Apache License
/** * Resolves the processing unit's dependencies classpath. * // w ww. j a v a 2 s . c o m * @param project the processing unit project * @param includeScopes the scopes of the dependencies to include * @param includeProjects whether to include project's output directories * @param reactorProjects the reactor projects * @param dependencyTreeBuilder the dependency tree builder * @param metadataSource the metadata source * @param artifactCollector the artifact collector * @param artifactResolver the artifact resolver * @param artifactFactory the artifact factory * @param localRepository the local repository * @param remoteRepositories the remote repositories * @return a list containing all dependency URLs. * @throws Exception */ static List resolveExecutionClasspath(MavenProject project, String[] includeScopes, boolean includeProjects, List reactorProjects, DependencyTreeBuilder dependencyTreeBuilder, ArtifactMetadataSource metadataSource, ArtifactCollector artifactCollector, ArtifactResolver artifactResolver, ArtifactFactory artifactFactory, ArtifactRepository localRepository, List remoteRepositories) throws Exception { Set scopes = new HashSet(includeScopes.length); Collections.addAll(scopes, includeScopes); // resolve all dependency of the specifies scope // scope 'test' is the widest scope available. ArtifactFilter artifactFilter = new ScopeArtifactFilter("test"); DependencyNode root = dependencyTreeBuilder.buildDependencyTree(project, localRepository, artifactFactory, metadataSource, artifactFilter, artifactCollector); // resolve all dependency files. if the dependency is a referenced project and not // a file in the repository add its output directory to the classpath. Iterator i = root.preorderIterator(); Set artifacts = new HashSet(); while (i.hasNext()) { DependencyNode node = (DependencyNode) i.next(); // the dependency may not be included due to duplication // dependency cycles and version conflict. // don't include those in the classpath. if (node.getState() != DependencyNode.INCLUDED) { PluginLog.getLog().debug("Not including dependency: " + node); continue; } Artifact artifact = node.getArtifact(); if (artifact.getFile() == null) { try { // if file is not found an exception is thrown artifactResolver.resolve(artifact, remoteRepositories, localRepository); } catch (Exception e) { if (includeProjects) { // try to see if the dependency is a referenced project Iterator projectsIterator = reactorProjects.iterator(); while (projectsIterator.hasNext()) { MavenProject proj = (MavenProject) projectsIterator.next(); if (proj.getArtifactId().equals(artifact.getArtifactId())) { artifact.setFile(new File(proj.getBuild().getOutputDirectory())); break; } } } } } if (!scopes.contains(artifact.getScope())) { if (artifact.getScope() != null) { continue; } // if it's not the same project don't add if (!includeProjects || !project.getArtifactId().equals(artifact.getArtifactId())) { continue; } } artifacts.add(artifact); } return getArtifactURLs(artifacts); }