List of usage examples for java.nio.file Path toUri
URI toUri();
From source file:org.n52.wps.server.database.PostgresDatabase.java
@Override protected synchronized String insertResultEntity(InputStream stream, String id, String type, String mimeType) { Timestamp timestamp = new Timestamp(Calendar.getInstance().getTimeInMillis()); FileInputStream fis = null;//from w w w . j a v a 2s. c o m Boolean storingOutput = null != id && id.toLowerCase().contains("output"); Boolean saveResultsToDB = Boolean.parseBoolean(getDatabaseProperties("saveResultsToDB")); String filename = storingOutput ? id : UUID.randomUUID().toString(); Path filePath = new File(BASE_DIRECTORY, filename).toPath(); try { filePath = Files.createFile(filePath); Files.copy(stream, filePath, StandardCopyOption.REPLACE_EXISTING); fis = new FileInputStream(filePath.toFile()); AbstractDatabase.insertSQL.setString(INSERT_COLUMN_REQUEST_ID, id); AbstractDatabase.insertSQL.setTimestamp(INSERT_COLUMN_REQUEST_DATE, timestamp); AbstractDatabase.insertSQL.setString(INSERT_COLUMN_RESPONSE_TYPE, type); AbstractDatabase.insertSQL.setString(INSERT_COLUMN_MIME_TYPE, mimeType); if (storingOutput) { if (!saveResultsToDB) { byte[] filePathByteArray = filePath.toUri().toString().getBytes(); AbstractDatabase.insertSQL.setAsciiStream(INSERT_COLUMN_RESPONSE, new ByteArrayInputStream(filePathByteArray), filePathByteArray.length); } else { AbstractDatabase.insertSQL.setAsciiStream(INSERT_COLUMN_RESPONSE, fis, (int) filePath.toFile().length()); } } else { AbstractDatabase.insertSQL.setAsciiStream(INSERT_COLUMN_RESPONSE, fis, (int) filePath.toFile().length()); } AbstractDatabase.insertSQL.executeUpdate(); getConnection().commit(); } catch (SQLException e) { LOGGER.error("Could not insert Response into database.", e); } catch (IOException e) { LOGGER.error("Could not insert Response into database.", e); } finally { if (fis != null) { try { fis.close(); } catch (IOException e) { LOGGER.error("Could not close file input stream", e); } } // If we are storing output, we want to only delete the file if we're // storing the results to the database. Otherwise, don't delete the // file since that will be served on request if (filePath != null) { try { if (storingOutput) { if (saveResultsToDB) { Files.deleteIfExists(filePath); } } else { Files.deleteIfExists(filePath); } } catch (IOException e) { LOGGER.error("Could not delete file: " + filePath.toString(), e); } } } return generateRetrieveResultURL(id); }
From source file:com.netflix.nicobar.core.archive.JarScriptArchive.java
protected JarScriptArchive(ScriptModuleSpec moduleSpec, Path jarPath, String moduleSpecEntry, long createTime) throws IOException { this.createTime = createTime; this.moduleSpec = Objects.requireNonNull(moduleSpec, "moduleSpec"); Objects.requireNonNull(jarPath, "jarFile"); if (!jarPath.isAbsolute()) throw new IllegalArgumentException("jarPath must be absolute."); // initialize the index JarFile jarFile = new JarFile(jarPath.toFile()); Set<String> indexBuilder; try {//from ww w . ja v a2 s. com Enumeration<JarEntry> jarEntries = jarFile.entries(); indexBuilder = new HashSet<String>(); while (jarEntries.hasMoreElements()) { JarEntry jarEntry = jarEntries.nextElement(); // Skip adding moduleSpec to archive entries if (jarEntry.getName().equals(moduleSpecEntry)) { continue; } if (!jarEntry.isDirectory()) { indexBuilder.add(jarEntry.getName()); } } } finally { jarFile.close(); } entryNames = Collections.unmodifiableSet(indexBuilder); rootUrl = jarPath.toUri().toURL(); }
From source file:org.elasticsearch.plugins.PluginManagerIT.java
/** creates a plugin .zip and bad checksum file and returns the url for testing */ private String createPluginWithBadChecksum(final Path structure, String... properties) throws IOException { writeProperties(structure, properties); Path zip = createTempDir().resolve(structure.getFileName() + ".zip"); try (ZipOutputStream stream = new ZipOutputStream(Files.newOutputStream(zip))) { Files.walkFileTree(structure, new SimpleFileVisitor<Path>() { @Override//from w w w .j av a 2s . c o m public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { stream.putNextEntry(new ZipEntry(structure.relativize(file).toString())); Files.copy(file, stream); return FileVisitResult.CONTINUE; } }); } if (randomBoolean()) { writeSha1(zip, true); } else { writeMd5(zip, true); } return zip.toUri().toURL().toString(); }
From source file:org.apache.openaz.xacml.std.pap.StdPDPGroup.java
public PDPPolicy publishPolicy(String id, String name, boolean isRoot, InputStream policy) throws PAPException { ////from www. ja v a 2s . co m // Does it exist already? // if (this.getPolicy(id) != null) { throw new PAPException("Policy with id " + id + " already exists - unpublish it first."); } Path tempFile = null; try { // // Copy the policy over // tempFile = Files.createFile(Paths.get(this.directory.toAbsolutePath().toString(), id)); long num; try (OutputStream os = Files.newOutputStream(tempFile)) { num = ByteStreams.copy(policy, os); } logger.info("Copied " + num + " bytes for policy " + name); StdPDPPolicy tempRootPolicy = new StdPDPPolicy(id, isRoot, name, tempFile.toUri()); if (!tempRootPolicy.isValid()) { try { Files.delete(tempFile); } catch (Exception ee) { logger.error("Policy was invalid, could NOT delete it.", ee); } throw new PAPException("Policy is invalid"); } // // Add it in // this.policies.add(tempRootPolicy); // // We are changed // this.firePDPGroupChanged(this); // // Return our new object. // return tempRootPolicy; } catch (IOException e) { logger.error("Failed to publishPolicy: ", e); } return null; }
From source file:org.elasticsearch.plugins.PluginManagerIT.java
/** creates a plugin .zip and returns the url for testing */ private String createPlugin(final Path structure, String... properties) throws IOException { writeProperties(structure, properties); Path zip = createTempDir().resolve(structure.getFileName() + ".zip"); try (ZipOutputStream stream = new ZipOutputStream(Files.newOutputStream(zip))) { Files.walkFileTree(structure, new SimpleFileVisitor<Path>() { @Override//from w ww . j a va2s .c o m public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { stream.putNextEntry(new ZipEntry(structure.relativize(file).toString())); Files.copy(file, stream); return FileVisitResult.CONTINUE; } }); } if (randomBoolean()) { writeSha1(zip, false); } else if (randomBoolean()) { writeMd5(zip, false); } return zip.toUri().toURL().toString(); }
From source file:org.apache.taverna.databundle.DataBundles.java
/** * Deeply resolve a {@link Path} to JVM objects. * <p>//from w w w. ja va2s. co m * This method is intended mainly for presentational uses * with a particular input/output port from * {@link #getPorts(Path)} or {@link #getPort(Path, String)}. * <p> * Note that as all lists are resolved deeply (including lists of lists) * and when using options {@link ResolveOptions#STRING} or {@link ResolveOptions#BYTES} * the full content of the values are read into memory, this * method can be time-consuming. * <p> * If the path is <code>null</code> or {@link #isMissing(Path)}, * <code>null</code> is returned, unless the option * {@link ResolveOptions#REPLACE_NULL} is specified, which would return the * empty String "". * <p> * If the path {@link #isValue(Path)} and the option * {@link ResolveOptions#STRING} is specified, its * {@link #getStringValue(Path)} is returned (assuming an UTF-8 encoding). * NOTE: Binary formats (e.g. PNG) will NOT be represented correctly read as * UTF-8 String and should instead be read directly with * {@link Files#newInputStream(Path, java.nio.file.OpenOption...)}. Note * that this could consume a large amount of memory as no size checks are * performed. * <p> * If the option {@link ResolveOptions#URI} is specified, all non-missing * non-error leaf values are resolved as a {@link URI}. If the path is a * {@link #isReference(Path)} the URI will be the reference from * {@link #getReference(Path)}, otherwise the URI will * identify a {@link Path} within the current {@link Bundle}. * <p> * If the path {@link #isValue(Path)} and the option * {@link ResolveOptions#BYTES} is specified, the complete content is returned as * a <code>byte[]</code>. Note that this could consume a large amount of memory * as no size checks are performed. * <p> * If the path {@link #isError(Path)}, the corresponding * {@link ErrorDocument} is returned, except when the option * {@link ResolveOptions#REPLACE_ERRORS} is specified, which means errors are * returned as <code>null</code> (or <code>""</code> if {@link ResolveOptions#REPLACE_NULL} is also specified). * <p> * If the path {@link #isReference(Path)} and the option * {@link ResolveOptions#URI} is <strong>not</strong> set, * either a {@link File} or a {@link URL} is returned, * depending on its protocol. If the reference protocol has no * corresponding {@link URLStreamHandler}, a {@link URI} is returned * instead. * <p> * If the path {@link #isList(Path)}, a {@link List} is returned * corresponding to resolving the paths from {@link #getList(Path)}. using * this method with the same options. * <p> * If none of the above, the {@link Path} itself is returned. This is * thus the default for non-reference non-error leaf values if neither * {@link ResolveOptions#STRING}, {@link ResolveOptions#BYTES} or * {@link ResolveOptions#URI} are specified. * To force returning of {@link Path}s for all non-missing leaf values, specify * {@link ResolveOptions#PATH}; * * @param path * Data bundle path to resolve * @param options * Resolve options * @return <code>null</code>, a {@link String}, {@link ErrorDocument}, * {@link URL}, {@link File}, {@link Path} or {@link List} * (containing any of these) depending on the path type and the options. * @throws IOException * If the path (or any of the path in a contained list) can't be * accessed */ @SuppressWarnings({ "unchecked", "rawtypes" }) public static Object resolve(Path path, ResolveOptions... options) throws IOException { EnumSet<ResolveOptions> opt; if (options.length == 0) { opt = EnumSet.of(ResolveOptions.DEFAULT); // no-op } else { opt = EnumSet.of(ResolveOptions.DEFAULT, options); } if (opt.contains(ResolveOptions.BYTES) && opt.contains(ResolveOptions.STRING)) { throw new IllegalArgumentException("Incompatible options: BYTES and STRING"); } if (opt.contains(ResolveOptions.BYTES) && opt.contains(ResolveOptions.PATH)) { throw new IllegalArgumentException("Incompatible options: BYTES and PATH"); } if (opt.contains(ResolveOptions.BYTES) && opt.contains(ResolveOptions.URI)) { throw new IllegalArgumentException("Incompatible options: BYTES and URI"); } if (opt.contains(ResolveOptions.STRING) && opt.contains(ResolveOptions.PATH)) { throw new IllegalArgumentException("Incompatible options: STRING and PATH"); } if (opt.contains(ResolveOptions.STRING) && opt.contains(ResolveOptions.URI)) { throw new IllegalArgumentException("Incompatible options: STRING and URI"); } if (opt.contains(ResolveOptions.PATH) && opt.contains(ResolveOptions.URI)) { throw new IllegalArgumentException("Incompatible options: PATH and URI"); } if (path == null || isMissing(path)) { if (!opt.contains(ResolveOptions.REPLACE_NULL)) { return null; } if (opt.contains(ResolveOptions.BYTES)) { return new byte[0]; } if (opt.contains(ResolveOptions.PATH)) { return path; } if (opt.contains(ResolveOptions.URI)) { return path.toUri(); } // STRING and DEFAULT return ""; } if (isList(path)) { List<Path> list = getList(path); List<Object> objectList = new ArrayList<Object>(list.size()); for (Path pathElement : list) { objectList.add(resolve(pathElement, options)); } return objectList; } if (opt.contains(ResolveOptions.PATH)) { return path; } if (isError(path)) { if (opt.contains(ResolveOptions.REPLACE_ERRORS)) { return opt.contains(ResolveOptions.REPLACE_NULL) ? "" : null; } return getError(path); } if (opt.contains(ResolveOptions.URI)) { if (isReference(path)) { return getReference(path); } else { return path.toUri(); } } if (isReference(path)) { URI reference = getReference(path); String scheme = reference.getScheme(); if ("file".equals(scheme)) { return new File(reference); } else { try { return reference.toURL(); } catch (IllegalArgumentException | MalformedURLException e) { return reference; } } } if (isValue(path)) { if (opt.contains(ResolveOptions.BYTES)) { return Files.readAllBytes(path); } if (opt.contains(ResolveOptions.STRING)) { return getStringValue(path); } } // Fall-back - return Path as-is return path; }
From source file:org.apache.openaz.xacml.std.pap.StdPDPGroup.java
/** * Copy one policy file into the Group's directory but do not change the configuration. This is one part * of a multi-step process of publishing policies. There may be multiple changes in the group (adding * multiple policies, deleting policies, changine root<->referenced) that must be done all at once, so we * just copy the file in preparation for a later "update whole group" operation. * * @param id//from w w w . j a v a2 s . co m * @param name * @param isRoot * @param policy * @return * @throws org.apache.openaz.xacml.api.pap.PAPException */ public void copyPolicyToFile(String id, InputStream policy) throws PAPException { try { // // Copy the policy over // long num; Path policyFilePath = Paths.get(this.directory.toAbsolutePath().toString(), id); // // THERE IS A WEIRD PROBLEM ON WINDOWS... // The file is already "in use" when we try to access it. // Looking at the file externally while this is halted here does not show the file in use, // so there is no indication what is causing the problem. // // As a way to by-pass the issue, I simply check if the input and the existing file are identical // and generate an exception if they are not. // // if (Files.exists(policyFilePath)) { // // compare the // String incomingPolicyString = null; // try (ByteArrayOutputStream os = new ByteArrayOutputStream()) { // num = ByteStreams.copy(policy, os); // incomingPolicyString = new String(os.toByteArray(), "UTF-8"); // } // String existingPolicyString = null; // try { // byte[] bytes = Files.readAllBytes(policyFilePath); // existingPolicyString = new String(bytes, "UTF-8"); // } catch (Exception e) { // logger.error("Unable to read existing file '" + policyFilePath + "': " + e, e); // throw new PAPException("Unable to read policy file for comparison: " + e); // } // if (incomingPolicyString.equals(existingPolicyString)) { // throw new PAPException("Policy '" + policyFilePath + // "' does not match existing policy on server"); // } // // input is same as existing file // return; // } Path policyFile; if (Files.exists(policyFilePath)) { policyFile = policyFilePath; } else { policyFile = Files.createFile(policyFilePath); } try (OutputStream os = Files.newOutputStream(policyFile)) { num = ByteStreams.copy(policy, os); } logger.info("Copied " + num + " bytes for policy " + name); for (PDPPolicy p : policies) { if (p.getId().equals(id)) { // we just re-copied/refreshed/updated the policy file for a policy that already exists in // this group logger.info("Policy '" + id + "' already exists in group '" + getId() + "'"); return; } } // policy is new to this group StdPDPPolicy tempRootPolicy = new StdPDPPolicy(id, true, name, policyFile.toUri()); if (!tempRootPolicy.isValid()) { try { Files.delete(policyFile); } catch (Exception ee) { logger.error("Policy was invalid, could NOT delete it.", ee); } throw new PAPException("Policy is invalid"); } // // Add it in // this.policies.add(tempRootPolicy); // // We are changed // this.firePDPGroupChanged(this); } catch (IOException e) { logger.error("Failed to copyPolicyToFile: ", e); throw new PAPException("Failed to copy policy to file: " + e); } }
From source file:org.apache.hive.beeline.BeeLine.java
private int executeFile(String fileName) { InputStream fileStream = null; try {/* w ww . ja v a2s .c o m*/ if (!isBeeLine) { org.apache.hadoop.fs.Path path = new org.apache.hadoop.fs.Path(fileName); FileSystem fs; HiveConf conf = getCommands().getHiveConf(true); if (!path.toUri().isAbsolute()) { fs = FileSystem.getLocal(conf); path = fs.makeQualified(path); } else { fs = FileSystem.get(path.toUri(), conf); } fileStream = fs.open(path); } else { fileStream = new FileInputStream(fileName); } return execute(initializeConsoleReader(fileStream), !getOpts().getForce()); } catch (Throwable t) { handleException(t); return ERRNO_OTHER; } finally { IOUtils.closeStream(fileStream); } }
From source file:org.opencb.opencga.app.cli.main.OpenCGAMainOld.java
private int runCommand(OptionsParser optionsParser) throws Exception { int returnValue = 0; if (catalogManager == null && !optionsParser.getSubCommand().isEmpty()) { CatalogConfiguration catalogConfiguration = CatalogConfiguration.load(new FileInputStream( Paths.get(Config.getOpenCGAHome(), "conf", "catalog-configuration.yml").toFile())); catalogManager = new CatalogManager(catalogConfiguration); }/*from w w w . j av a 2 s .c o m*/ String sessionId = login(optionsParser.getUserAndPasswordOptions()); switch (optionsParser.getCommand()) { case "users": switch (optionsParser.getSubCommand()) { case "create": { OptionsParser.UserCommands.CreateCommand c = optionsParser.getUserCommands().createCommand; //QueryResult<User> user = catalogManager.insertUser(new User(c.up.user, c.name, c.email, c.up.password, c.organization, User.Role.USER, "")); QueryResult<User> user = catalogManager.createUser(c.user, c.name, c.email, c.password, c.organization, null, null); System.out.println(createOutput(c.cOpt, user, null)); break; } case "info": { OptionsParser.UserCommands.InfoCommand c = optionsParser.getUserCommands().infoCommand; QueryResult<User> user = catalogManager.getUser( c.up.user != null ? c.up.user : catalogManager.getUserIdBySessionId(sessionId), null, new QueryOptions(c.cOpt.getQueryOptions()), sessionId); System.out.println(createOutput(c.cOpt, user, null)); break; } case "list": { OptionsParser.UserCommands.ListCommand c = optionsParser.getUserCommands().listCommand; String indent = ""; User user = catalogManager.getUser( c.up.user != null ? c.up.user : catalogManager.getUserIdBySessionId(sessionId), null, new QueryOptions("include", Arrays.asList("id", "name", "projects.id", "projects.alias", "projects.name")), sessionId).first(); System.out.println(user.getId() + " - " + user.getName()); indent += "\t"; System.out.println(listProjects(user.getProjects(), c.recursive ? c.level : 1, indent, c.uries, new StringBuilder(), sessionId)); break; } case "login": { OptionsParser.UserCommands.LoginCommand c = optionsParser.getUserCommands().loginCommand; // if (c.up.user == null || c.up.user.isEmpty()) { // throw new CatalogException("Required userId"); // } shellSessionId = sessionId; logoutAtExit = false; if (shellSessionId != null) { shellUserId = c.up.user; } if (sessionFile == null) { sessionFile = new SessionFile(); } sessionFile.setSessionId(sessionId); sessionFile.setUserId(catalogManager.getUserIdBySessionId(sessionId)); saveUserFile(sessionFile); System.out.println(shellSessionId); break; } case "logout": { OptionsParser.UserCommands.LogoutCommand c = optionsParser.getUserCommands().logoutCommand; QueryResult logout; if (c.sessionId == null) { //Logout from interactive shell logout = catalogManager.logout(shellUserId, shellSessionId); shellUserId = null; shellSessionId = null; if (sessionIdFromFile) { sessionFile.setSessionId(null); sessionFile.setUserId(null); saveUserFile(sessionFile); } } else { String userId = catalogManager.getUserIdBySessionId(c.sessionId); logout = catalogManager.logout(userId, c.sessionId); } logoutAtExit = false; System.out.println(logout); break; } default: optionsParser.printUsage(); break; } break; case "projects": switch (optionsParser.getSubCommand()) { case "create": { OptionsParser.ProjectCommands.CreateCommand c = optionsParser.getProjectCommands().createCommand; String user = c.up.user == null || c.up.user.isEmpty() ? catalogManager.getUserIdBySessionId(sessionId) : c.up.user; QueryResult<Project> project = catalogManager.createProject(c.name, c.alias, c.description, c.organization, new QueryOptions(c.cOpt.getQueryOptions()), sessionId); System.out.println(createOutput(c.cOpt, project, null)); break; } case "info": { OptionsParser.ProjectCommands.InfoCommand c = optionsParser.getProjectCommands().infoCommand; long projectId = catalogManager.getProjectId(c.id); QueryResult<Project> project = catalogManager.getProject(projectId, new QueryOptions(c.cOpt.getQueryOptions()), sessionId); System.out.println(createOutput(c.cOpt, project, null)); break; } // case "share": { // OptionsParser.CommandShareResource c = optionsParser.commandShareResource; // // int projectId = catalogManager.getProjectId(c.id); // QueryResult result = catalogManager.shareProject(projectId, new AclEntry(c.user, c.read, c.write, c.execute, c.delete), sessionId); // System.out.println(createOutput(c.cOpt, result, null)); // // break; // } default: optionsParser.printUsage(); break; } break; case "studies": switch (optionsParser.getSubCommand()) { case "create": { OptionsParser.StudyCommands.CreateCommand c = optionsParser.getStudyCommands().createCommand; URI uri = null; if (c.uri != null && !c.uri.isEmpty()) { uri = UriUtils.createUri(c.uri); } Map<File.Bioformat, DataStore> dataStoreMap = parseBioformatDataStoreMap(c); long projectId = catalogManager.getProjectId(c.projectId); ObjectMap attributes = new ObjectMap(); attributes.put(VariantStorageManager.Options.AGGREGATED_TYPE.key(), c.aggregated.toString()); QueryResult<Study> study = catalogManager.createStudy(projectId, c.name, c.alias, c.type, null, c.description, null, null, null, uri, dataStoreMap, null, attributes, new QueryOptions(c.cOpt.getQueryOptions()), sessionId); if (uri != null) { File root = catalogManager.searchFile(study.first().getId(), new Query(FileDBAdaptor.QueryParams.PATH.key(), ""), sessionId).first(); new FileScanner(catalogManager).scan(root, uri, FileScanner.FileScannerPolicy.REPLACE, true, false, sessionId); } System.out.println(createOutput(c.cOpt, study, null)); break; } case "resync": { OptionsParser.StudyCommands.ResyncCommand c = optionsParser.getStudyCommands().resyncCommand; long studyId = catalogManager.getStudyId(c.id); Study study = catalogManager.getStudy(studyId, sessionId).first(); FileScanner fileScanner = new FileScanner(catalogManager); List<File> scan = fileScanner.reSync(study, c.calculateChecksum, sessionId); System.out.println(createOutput(c.cOpt, scan, null)); break; } case "check-files": { OptionsParser.StudyCommands.CheckCommand c = optionsParser.getStudyCommands().checkCommand; long studyId = catalogManager.getStudyId(c.id); Study study = catalogManager.getStudy(studyId, sessionId).first(); FileScanner fileScanner = new FileScanner(catalogManager); List<File> check = fileScanner.checkStudyFiles(study, c.calculateChecksum, sessionId); System.out.println(createOutput(c.cOpt, check, null)); break; } case "info": { OptionsParser.StudyCommands.InfoCommand c = optionsParser.getStudyCommands().infoCommand; long studyId = catalogManager.getStudyId(c.id); QueryResult<Study> study = catalogManager.getStudy(studyId, new QueryOptions(c.cOpt.getQueryOptions()), sessionId); System.out.println(createOutput(c.cOpt, study, null)); break; } case "list": { OptionsParser.StudyCommands.ListCommand c = optionsParser.getStudyCommands().listCommand; long studyId = catalogManager.getStudyId(c.id); List<Study> studies = catalogManager.getStudy(studyId, sessionId).getResult(); String indent = ""; System.out.println(listStudies(studies, c.recursive ? c.level : 1, indent, c.uries, new StringBuilder(), sessionId)); break; } case "status": { OptionsParser.StudyCommands.StatusCommand c = optionsParser.getStudyCommands().statusCommand; long studyId = catalogManager.getStudyId(c.id); Study study = catalogManager.getStudy(studyId, sessionId).first(); FileScanner fileScanner = new FileScanner(catalogManager); /** First, run CheckStudyFiles to find new missing files **/ List<File> checkStudyFiles = fileScanner.checkStudyFiles(study, false, sessionId); List<File> found = checkStudyFiles.stream() .filter(f -> f.getStatus().getName().equals(File.FileStatus.READY)) .collect(Collectors.toList()); int maxFound = found.stream().map(f -> f.getPath().length()).max(Comparator.<Integer>naturalOrder()) .orElse(0); /** Get untracked files **/ // List<URI> untrackedFiles = fileScanner.untrackedFiles(study, sessionId); // // URI studyUri = catalogManager.getStudyUri(studyId); // Map<URI, String> relativeUrisMap = untrackedFiles.stream().collect(Collectors.toMap((k) -> k, (u) -> studyUri.relativize(u).toString())); Map<String, URI> relativeUrisMap = fileScanner.untrackedFiles(study, sessionId); int maxUntracked = relativeUrisMap.keySet().stream().map(String::length) .max(Comparator.<Integer>naturalOrder()).orElse(0); /** Get missing files **/ List<File> missingFiles = catalogManager.getAllFiles(studyId, new Query(FileDBAdaptor.QueryParams.FILE_STATUS.key(), File.FileStatus.MISSING), new QueryOptions(), sessionId).getResult(); int maxMissing = missingFiles.stream().map(f -> f.getPath().length()) .max(Comparator.<Integer>naturalOrder()).orElse(0); /** Print pretty **/ String format = "\t%-" + Math.max(Math.max(maxMissing, maxUntracked), maxFound) + "s -> %s\n"; if (!relativeUrisMap.isEmpty()) { System.out.println("UNTRACKED files"); relativeUrisMap.forEach((s, u) -> System.out.printf(format, s, u)); System.out.println("\n"); } if (!missingFiles.isEmpty()) { System.out.println("MISSING files"); for (File file : missingFiles) { System.out.printf(format, file.getPath(), catalogManager.getFileUri(file)); } System.out.println("\n"); } if (!found.isEmpty()) { System.out.println("FOUND files"); for (File file : found) { System.out.printf(format, file.getPath(), catalogManager.getFileUri(file)); } } break; } case "annotate-variants": { OptionsParser.StudyCommands.AnnotationCommand c = optionsParser .getStudyCommands().annotationCommand; VariantStorage variantStorage = new VariantStorage(catalogManager); long studyId = catalogManager.getStudyId(c.id); long outdirId = catalogManager.getFileId(c.outdir); QueryOptions queryOptions = new QueryOptions(c.cOpt.getQueryOptions()); queryOptions.put(ExecutorManager.EXECUTE, !c.enqueue); queryOptions.add(AnalysisFileIndexer.PARAMETERS, c.dashDashParameters); queryOptions.add(AnalysisFileIndexer.LOG_LEVEL, logLevel); System.out.println(createOutput(c.cOpt, variantStorage.annotateVariants(studyId, outdirId, sessionId, queryOptions), null)); break; } // case "share": { // OptionsParser.CommandShareResource c = optionsParser.commandShareResource; // // int studyId = catalogManager.getStudyId(c.id); // QueryResult result = catalogManager.shareProject(studyId, new AclEntry(c.user, c.read, c.write, c.execute, c.delete), sessionId); // System.out.println(createOutput(c.cOpt, result, null)); // // break; // } default: optionsParser.printUsage(); break; } break; case "files": { switch (optionsParser.getSubCommand()) { case "create": { OptionsParser.FileCommands.CreateCommand c = optionsParser.getFileCommands().createCommand; long studyId = catalogManager.getStudyId(c.studyId); Path inputFile = Paths.get(c.inputFile); URI sourceUri = new URI(null, c.inputFile, null); if (sourceUri.getScheme() == null || sourceUri.getScheme().isEmpty()) { sourceUri = inputFile.toUri(); } if (!catalogManager.getCatalogIOManagerFactory().get(sourceUri).exists(sourceUri)) { throw new IOException("File " + sourceUri + " does not exist"); } QueryResult<File> file = catalogManager.createFile(studyId, c.format, c.bioformat, Paths.get(c.path, inputFile.getFileName().toString()).toString(), c.description, c.parents, -1, sessionId); new CatalogFileUtils(catalogManager).upload(sourceUri, file.first(), null, sessionId, false, false, c.move, c.calculateChecksum); FileMetadataReader.get(catalogManager).setMetadataInformation(file.first(), null, new QueryOptions(c.cOpt.getQueryOptions()), sessionId, false); System.out.println(createOutput(c.cOpt, file, null)); break; } case "create-folder": { OptionsParser.FileCommands.CreateFolderCommand c = optionsParser .getFileCommands().createFolderCommand; long studyId = catalogManager.getStudyId(c.studyId); QueryResult<File> folder = catalogManager.createFolder(studyId, Paths.get(c.path), c.parents, new QueryOptions(c.cOpt.getQueryOptions()), sessionId); System.out.println(createOutput(c.cOpt, folder, null)); break; } case "upload": { OptionsParser.FileCommands.UploadCommand c = optionsParser.getFileCommands().uploadCommand; URI sourceUri = new URI(null, c.inputFile, null); if (sourceUri.getScheme() == null || sourceUri.getScheme().isEmpty()) { sourceUri = Paths.get(c.inputFile).toUri(); } if (!catalogManager.getCatalogIOManagerFactory().get(sourceUri).exists(sourceUri)) { throw new IOException("File " + sourceUri + " does not exist"); } long fileId = catalogManager.getFileId(c.id); QueryResult<File> file = catalogManager.getFile(fileId, new QueryOptions(c.cOpt.getQueryOptions()), sessionId); new CatalogFileUtils(catalogManager).upload(sourceUri, file.first(), null, sessionId, c.replace, c.replace, c.move, c.calculateChecksum); System.out.println(createOutput(c.cOpt, catalogManager.getFile(file.first().getId(), new QueryOptions(c.cOpt.getQueryOptions()), sessionId), null)); break; } case "link": { OptionsParser.FileCommands.LinkCommand c = optionsParser.getFileCommands().linkCommand; Path inputFile = Paths.get(c.inputFile); URI inputUri = UriUtils.createUri(c.inputFile); CatalogIOManager ioManager = catalogManager.getCatalogIOManagerFactory().get(inputUri); if (!ioManager.exists(inputUri)) { throw new FileNotFoundException("File " + inputUri + " not found"); } // long studyId = catalogManager.getStudyId(c.studyId); String path = c.path.isEmpty() ? inputFile.getFileName().toString() : Paths.get(c.path, inputFile.getFileName().toString()).toString(); File file; CatalogFileUtils catalogFileUtils = new CatalogFileUtils(catalogManager); if (ioManager.isDirectory(inputUri)) { ObjectMap params = new ObjectMap("parents", c.parents); file = catalogManager.link(inputUri, c.path, c.studyId, params, sessionId).first(); // file = catalogFileUtils.linkFolder(studyId, path, c.parents, c.description, c.calculateChecksum, inputUri, false, false, sessionId); new FileScanner(catalogManager).scan(file, null, FileScanner.FileScannerPolicy.REPLACE, c.calculateChecksum, false, sessionId); } else { ObjectMap params = new ObjectMap("parents", c.parents); file = catalogManager.link(inputUri, c.path, c.studyId, params, sessionId).first(); // file = catalogManager.createFile(studyId, null, null, // path, c.description, c.parents, -1, sessionId).first(); // file = catalogFileUtils.link(file, c.calculateChecksum, inputUri, false, false, sessionId); file = FileMetadataReader.get(catalogManager).setMetadataInformation(file, null, new QueryOptions(c.cOpt.getQueryOptions()), sessionId, false); } System.out.println(createOutput(c.cOpt, file, null)); break; } case "relink": { OptionsParser.FileCommands.RelinkCommand c = optionsParser.getFileCommands().relinkCommand; Path inputFile = Paths.get(c.inputFile); URI uri = UriUtils.createUri(c.inputFile); if (!inputFile.toFile().exists()) { throw new FileNotFoundException("File " + uri + " not found"); } long fileId = catalogManager.getFileId(c.id, sessionId); File file = catalogManager.getFile(fileId, sessionId).first(); new CatalogFileUtils(catalogManager).link(file, c.calculateChecksum, uri, false, true, sessionId); file = catalogManager.getFile(file.getId(), new QueryOptions(c.cOpt.getQueryOptions()), sessionId) .first(); file = FileMetadataReader.get(catalogManager).setMetadataInformation(file, null, new QueryOptions(c.cOpt.getQueryOptions()), sessionId, false); System.out.println(createOutput(c.cOpt, file, null)); break; } case "refresh": { OptionsParser.FileCommands.RefreshCommand c = optionsParser.getFileCommands().refreshCommand; long fileId = catalogManager.getFileId(c.id); File file = catalogManager.getFile(fileId, sessionId).first(); List<File> files; QueryOptions queryOptions = new QueryOptions(c.cOpt.getQueryOptions()); CatalogFileUtils catalogFileUtils = new CatalogFileUtils(catalogManager); FileMetadataReader fileMetadataReader = FileMetadataReader.get(catalogManager); if (file.getType() == File.Type.FILE) { File file1 = catalogFileUtils.checkFile(file, false, sessionId); file1 = fileMetadataReader.setMetadataInformation(file1, null, queryOptions, sessionId, false); if (file == file1) { //If the file is the same, it was not modified. Only return modified files. files = Collections.emptyList(); } else { files = Collections.singletonList(file); } } else { List<File> result = catalogManager.getAllFilesInFolder(file.getId(), null, sessionId) .getResult(); files = new ArrayList<>(result.size()); for (File f : result) { File file1 = fileMetadataReader.setMetadataInformation(f, null, queryOptions, sessionId, false); if (f != file1) { //Add only modified files. files.add(file1); } } } System.out.println(createOutput(c.cOpt, files, null)); break; } case "info": { OptionsParser.FileCommands.InfoCommand c = optionsParser.getFileCommands().infoCommand; long fileId = catalogManager.getFileId(c.id); QueryResult<File> file = catalogManager.getFile(fileId, new QueryOptions(c.cOpt.getQueryOptions()), sessionId); System.out.println(createOutput(optionsParser.getCommonOptions(), file, null)); break; } case "search": { OptionsParser.FileCommands.SearchCommand c = optionsParser.getFileCommands().searchCommand; long studyId = catalogManager.getStudyId(c.studyId); Query query = new Query(); if (c.name != null) query.put(FileDBAdaptor.QueryParams.NAME.key(), "~" + c.name); if (c.directory != null) query.put(FileDBAdaptor.QueryParams.DIRECTORY.key(), c.directory); if (c.bioformats != null) query.put(FileDBAdaptor.QueryParams.BIOFORMAT.key(), c.bioformats); if (c.types != null) query.put(FileDBAdaptor.QueryParams.TYPE.key(), c.types); if (c.status != null) query.put(FileDBAdaptor.QueryParams.STATUS_NAME.key(), c.status); QueryResult<File> fileQueryResult = catalogManager.searchFile(studyId, query, new QueryOptions(c.cOpt.getQueryOptions()), sessionId); System.out.println(createOutput(optionsParser.getCommonOptions(), fileQueryResult, null)); break; } case "list": { OptionsParser.FileCommands.ListCommand c = optionsParser.getFileCommands().listCommand; long fileId = catalogManager.getFileId(c.id); List<File> result = catalogManager.getFile(fileId, sessionId).getResult(); long studyId = catalogManager.getStudyIdByFileId(fileId); System.out.println(listFiles(result, studyId, c.recursive ? c.level : 1, "", c.uries, new StringBuilder(), sessionId)); break; } case "index": { OptionsParser.FileCommands.IndexCommand c = optionsParser.getFileCommands().indexCommand; AnalysisFileIndexer analysisFileIndexer = new AnalysisFileIndexer(catalogManager); long fileId = catalogManager.getFileId(c.id); long outdirId = catalogManager.getFileId(c.outdir); if (outdirId < 0) { outdirId = catalogManager.getFileParent(fileId, null, sessionId).first().getId(); } String sid = sessionId; QueryOptions queryOptions = new QueryOptions(c.cOpt.getQueryOptions()); if (c.enqueue) { queryOptions.put(ExecutorManager.EXECUTE, false); if (c.up.sessionId == null || c.up.sessionId.isEmpty()) { sid = login(c.up); } } else { queryOptions.add(ExecutorManager.EXECUTE, true); } queryOptions.put(AnalysisFileIndexer.TRANSFORM, c.transform); queryOptions.put(AnalysisFileIndexer.LOAD, c.load); queryOptions.add(AnalysisFileIndexer.PARAMETERS, c.dashDashParameters); queryOptions.add(AnalysisFileIndexer.LOG_LEVEL, logLevel); queryOptions.add(VariantStorageManager.Options.CALCULATE_STATS.key(), c.calculateStats); queryOptions.add(VariantStorageManager.Options.ANNOTATE.key(), c.annotate); logger.debug("logLevel: {}", logLevel); QueryResult<Job> queryResult = analysisFileIndexer.index(fileId, outdirId, sid, queryOptions); System.out.println(createOutput(c.cOpt, queryResult, null)); break; } default: optionsParser.printUsage(); break; } break; } case "samples": { switch (optionsParser.getSubCommand()) { case "info": { OptionsParser.SampleCommands.InfoCommand c = optionsParser.sampleCommands.infoCommand; QueryResult<Sample> sampleQueryResult = catalogManager.getSample(c.id, new QueryOptions(c.cOpt.getQueryOptions()), sessionId); System.out.println(createOutput(c.cOpt, sampleQueryResult, null)); break; } case "search": { OptionsParser.SampleCommands.SearchCommand c = optionsParser.sampleCommands.searchCommand; long studyId = catalogManager.getStudyId(c.studyId); QueryOptions queryOptions = new QueryOptions(c.cOpt.getQueryOptions()); Query query = new Query(); if (c.sampleIds != null && !c.sampleIds.isEmpty()) { query.append(SampleDBAdaptor.QueryParams.ID.key(), c.sampleIds); } if (c.sampleNames != null && !c.sampleNames.isEmpty()) { query.append(SampleDBAdaptor.QueryParams.NAME.key(), c.sampleNames); } if (c.annotation != null && !c.annotation.isEmpty()) { for (String s : c.annotation) { String[] strings = org.opencb.opencga.storage.core.variant.adaptors.VariantDBAdaptorUtils .splitOperator(s); query.append(SampleDBAdaptor.QueryParams.ANNOTATION.key() + "." + strings[0], strings[1] + strings[2]); } } if (c.variableSetId != null && !c.variableSetId.isEmpty()) { query.append(SampleDBAdaptor.QueryParams.VARIABLE_SET_ID.key(), c.variableSetId); } QueryResult<Sample> sampleQueryResult = catalogManager.getAllSamples(studyId, query, queryOptions, sessionId); System.out.println(createOutput(c.cOpt, sampleQueryResult, null)); break; } case "load": { OptionsParser.SampleCommands.LoadCommand c = optionsParser.sampleCommands.loadCommand; CatalogSampleAnnotationsLoader catalogSampleAnnotationsLoader = new CatalogSampleAnnotationsLoader( catalogManager); long fileId = catalogManager.getFileId(c.pedigreeFileId); File pedigreeFile = catalogManager.getFile(fileId, sessionId).first(); QueryResult<Sample> sampleQueryResult = catalogSampleAnnotationsLoader.loadSampleAnnotations( pedigreeFile, c.variableSetId == 0 ? null : c.variableSetId, sessionId); System.out.println(createOutput(c.cOpt, sampleQueryResult, null)); break; } case "delete": { OptionsParser.SampleCommands.DeleteCommand c = optionsParser.sampleCommands.deleteCommand; QueryResult<Sample> sampleQueryResult = catalogManager.deleteSample(c.id, new QueryOptions(c.cOpt.getQueryOptions()), sessionId); System.out.println(createOutput(c.cOpt, sampleQueryResult, null)); break; } default: { optionsParser.printUsage(); break; } } break; } case "cohorts": { switch (optionsParser.getSubCommand()) { case OptionsParser.CohortCommands.InfoCommand.COMMAND_NAME: { OptionsParser.CohortCommands.InfoCommand c = optionsParser.cohortCommands.infoCommand; QueryResult<Cohort> cohortQueryResult = catalogManager.getCohort(c.id, new QueryOptions(c.cOpt.getQueryOptions()), sessionId); System.out.println(createOutput(c.cOpt, cohortQueryResult, null)); break; } case OptionsParser.CohortCommands.SamplesCommand.COMMAND_NAME: { OptionsParser.CohortCommands.SamplesCommand c = optionsParser.cohortCommands.samplesCommand; Cohort cohort = catalogManager.getCohort(c.id, null, sessionId).first(); QueryOptions queryOptions = new QueryOptions(c.cOpt.getQueryOptions()); Query query = new Query(SampleDBAdaptor.QueryParams.ID.key(), cohort.getSamples()); QueryResult<Sample> sampleQueryResult = catalogManager.getAllSamples( catalogManager.getStudyIdByCohortId(cohort.getId()), query, queryOptions, sessionId); OptionsParser.CommonOptions cOpt = c.cOpt; StringBuilder sb = createOutput(cOpt, sampleQueryResult, null); System.out.println(sb.toString()); break; } case OptionsParser.CohortCommands.CreateCommand.COMMAND_NAME: { OptionsParser.CohortCommands.CreateCommand c = optionsParser.cohortCommands.createCommand; Map<String, List<Sample>> cohorts = new HashMap<>(); long studyId = catalogManager.getStudyId(c.studyId); if (c.sampleIds != null && !c.sampleIds.isEmpty()) { QueryOptions queryOptions = new QueryOptions("include", "projects.studies.samples.id"); Query query = new Query(SampleDBAdaptor.QueryParams.ID.key(), c.sampleIds); // queryOptions.put("variableSetId", c.variableSetId); QueryResult<Sample> sampleQueryResult = catalogManager.getAllSamples(studyId, query, queryOptions, sessionId); cohorts.put(c.name, sampleQueryResult.getResult()); } else if (StringUtils.isNotEmpty(c.tagmap)) { List<QueryResult<Cohort>> queryResults = createCohorts(sessionId, studyId, c.tagmap, catalogManager, logger); System.out.println(createOutput(c.cOpt, queryResults, null)); } else { // QueryOptions queryOptions = c.cOpt.getQueryOptions(); // queryOptions.put("annotation", c.annotation); final long variableSetId; final VariableSet variableSet; if (StringUtils.isNumeric(c.variableSet)) { variableSetId = Long.parseLong(c.variableSet); variableSet = catalogManager.getVariableSet(variableSetId, null, sessionId).first(); } else if (StringUtils.isEmpty(c.variableSet)) { List<VariableSet> variableSets = catalogManager.getStudy(studyId, new QueryOptions("include", "projects.studies.variableSets"), sessionId).first() .getVariableSets(); if (!variableSets.isEmpty()) { variableSet = variableSets.get(0); //Get the first VariableSetId variableSetId = variableSet.getId(); } else { throw new CatalogException("Expected variableSetId"); } } else { QueryOptions query = new QueryOptions(StudyDBAdaptor.VariableSetParams.NAME.key(), c.variableSet); variableSet = catalogManager.getAllVariableSet(studyId, query, sessionId).first(); if (variableSet == null) { throw new CatalogException("Variable set \"" + c.variableSet + "\" not found"); } variableSetId = variableSet.getId(); } c.name = ((c.name == null) || c.name.isEmpty()) ? "" : (c.name + "."); for (Variable variable : variableSet.getVariables()) { if (variable.getName().equals(c.variable)) { for (String value : variable.getAllowedValues()) { QueryOptions queryOptions = new QueryOptions(c.cOpt.getQueryOptions()); Query query = new Query( SampleDBAdaptor.QueryParams.ANNOTATION.key() + "." + c.variable, value) .append(SampleDBAdaptor.QueryParams.VARIABLE_SET_ID.key(), variableSetId); QueryResult<Sample> sampleQueryResult = catalogManager.getAllSamples(studyId, query, queryOptions, sessionId); cohorts.put(c.name + value, sampleQueryResult.getResult()); } } } if (cohorts.isEmpty()) { logger.error("VariableSetId {} does not contain any variable with id = {}.", variableSetId, c.variable); returnValue = 2; } } if (!cohorts.isEmpty()) { List<QueryResult<Cohort>> queryResults = new ArrayList<>(cohorts.size()); for (Map.Entry<String, List<Sample>> entry : cohorts.entrySet()) { List<Long> sampleIds = new LinkedList<>(); for (Sample sample : entry.getValue()) { sampleIds.add(sample.getId()); } QueryResult<Cohort> cohort = catalogManager.createCohort(studyId, entry.getKey(), c.type, c.description, sampleIds, c.cOpt.getQueryOptions(), sessionId); queryResults.add(cohort); } System.out.println(createOutput(c.cOpt, queryResults, null)); } // System.out.println(createSamplesOutput(c.cOpt, sampleQueryResult)); break; } case OptionsParser.CohortCommands.StatsCommand.COMMAND_NAME: { OptionsParser.CohortCommands.StatsCommand c = optionsParser.cohortCommands.statsCommand; VariantStorage variantStorage = new VariantStorage(catalogManager); long outdirId = catalogManager.getFileId(c.outdir); QueryOptions queryOptions = new QueryOptions(c.cOpt.getQueryOptions()); if (c.enqueue) { queryOptions.put(ExecutorManager.EXECUTE, false); } else { queryOptions.add(ExecutorManager.EXECUTE, true); } queryOptions.add(AnalysisFileIndexer.PARAMETERS, c.dashDashParameters); queryOptions.add(AnalysisFileIndexer.LOG_LEVEL, logLevel); if (c.tagmap != null) { queryOptions.put(VariantStorageManager.Options.AGGREGATION_MAPPING_PROPERTIES.key(), c.tagmap); } else if (c.cohortIds == null) { logger.error("--cohort-id nor --aggregation-mapping-file provided"); throw new IllegalArgumentException( "--cohort-id or --aggregation-mapping-file is required to specify cohorts"); } System.out.println(createOutput(c.cOpt, variantStorage.calculateStats(outdirId, c.cohortIds, sessionId, queryOptions), null)); break; } default: { optionsParser.printUsage(); break; } } break; } case "jobs": { switch (optionsParser.getSubCommand()) { case "info": { OptionsParser.JobsCommands.InfoCommand c = optionsParser.getJobsCommands().infoCommand; QueryResult<Job> jobQueryResult = catalogManager.getJob(c.id, new QueryOptions(c.cOpt.getQueryOptions()), sessionId); System.out.println(createOutput(c.cOpt, jobQueryResult, null)); break; } case "finished": { OptionsParser.JobsCommands.DoneJobCommand c = optionsParser.getJobsCommands().doneJobCommand; QueryResult<Job> jobQueryResult = catalogManager.getJob(c.id, new QueryOptions(c.cOpt.getQueryOptions()), sessionId); Job job = jobQueryResult.first(); if (c.force) { if (job.getStatus().getName().equals(Job.JobStatus.ERROR) || job.getStatus().getName().equals(Job.JobStatus.READY)) { logger.info("Job status is '{}' . Nothing to do.", job.getStatus().getName()); System.out.println(createOutput(c.cOpt, jobQueryResult, null)); } } else if (!job.getStatus().getName().equals(Job.JobStatus.DONE)) { throw new Exception("Job status != DONE. Need --force to continue"); } /** Record output **/ ExecutionOutputRecorder outputRecorder = new ExecutionOutputRecorder(catalogManager, sessionId); if (c.discardOutput) { String tempJobsDir = catalogManager.getCatalogConfiguration().getTempJobsDir(); URI tmpOutDirUri = IndexDaemon.getJobTemporaryFolder(job.getId(), tempJobsDir).toUri(); CatalogIOManager ioManager = catalogManager.getCatalogIOManagerFactory().get(tmpOutDirUri); if (ioManager.exists(tmpOutDirUri)) { logger.info("Deleting temporal job output folder: {}", tmpOutDirUri); ioManager.deleteDirectory(tmpOutDirUri); } else { logger.info("Temporal job output folder already removed: {}", tmpOutDirUri); } } else { outputRecorder.recordJobOutput(job); } outputRecorder.postProcessJob(job, c.error); /** Change status to ERROR or READY **/ ObjectMap parameters = new ObjectMap(); if (c.error) { parameters.put("status.name", Job.JobStatus.ERROR); parameters.put("error", Job.ERRNO_ABORTED); parameters.put("errorDescription", Job.ERROR_DESCRIPTIONS.get(Job.ERRNO_ABORTED)); } else { parameters.put("status.name", Job.JobStatus.READY); } catalogManager.modifyJob(job.getId(), parameters, sessionId); jobQueryResult = catalogManager.getJob(c.id, new QueryOptions(c.cOpt.getQueryOptions()), sessionId); System.out.println(createOutput(c.cOpt, jobQueryResult, null)); break; } case "status": { OptionsParser.JobsCommands.StatusCommand c = optionsParser.getJobsCommands().statusCommand; final List<Long> studyIds; if (c.studyId == null || c.studyId.isEmpty()) { studyIds = catalogManager .getAllStudies(new Query(), new QueryOptions("include", "id"), sessionId).getResult() .stream().map(Study::getId).collect(Collectors.toList()); } else { studyIds = new LinkedList<>(); for (String s : c.studyId.split(",")) { studyIds.add(catalogManager.getStudyId(s)); } } for (Long studyId : studyIds) { QueryResult<Job> allJobs = catalogManager.getAllJobs(studyId, new Query(JobDBAdaptor.QueryParams.STATUS_NAME.key(), Collections.singletonList(Job.JobStatus.RUNNING.toString())), new QueryOptions(), sessionId); for (Iterator<Job> iterator = allJobs.getResult().iterator(); iterator.hasNext();) { Job job = iterator.next(); System.out.format("Job - %s [%d] - %s\n", job.getName(), job.getId(), job.getDescription()); // URI tmpOutDirUri = job.getTmpOutDirUri(); String tempJobsDir = catalogManager.getCatalogConfiguration().getTempJobsDir(); URI tmpOutDirUri = IndexDaemon.getJobTemporaryFolder(job.getId(), tempJobsDir).toUri(); CatalogIOManager ioManager = catalogManager.getCatalogIOManagerFactory().get(tmpOutDirUri); try { ioManager.listFilesStream(tmpOutDirUri).sorted().forEach(uri -> { String count; try { long fileSize = ioManager.getFileSize(uri); count = humanReadableByteCount(fileSize, false); } catch (CatalogIOException e) { count = "ERROR"; } System.out.format("\t%s [%s]\n", tmpOutDirUri.relativize(uri), count); }); } catch (CatalogIOException e) { System.out.println("Unable to read files from " + tmpOutDirUri + " - " + e.getCause().getMessage()); } if (iterator.hasNext()) { System.out.println("-----------------------------------------"); } } } break; } case "run": { OptionsParser.JobsCommands.RunJobCommand c = optionsParser.getJobsCommands().runJobCommand; long studyId = catalogManager.getStudyId(c.studyId); long outdirId = catalogManager.getFileId(c.outdir); long toolId = catalogManager.getToolId(c.toolId); String toolName; ToolManager toolManager; if (toolId < 0) { toolManager = new ToolManager(c.toolId, c.execution); //LEGACY MODE, AVOID USING toolName = c.toolId; } else { Tool tool = catalogManager.getTool(toolId, sessionId).getResult().get(0); toolManager = new ToolManager(Paths.get(tool.getPath()).getParent(), tool.getName(), c.execution); toolName = tool.getName(); } List<Long> inputFiles = new LinkedList<>(); Map<String, List<String>> localParams = new HashMap<>(); for (String key : c.params.keySet()) { localParams.put(key, c.params.getAsStringList(key)); } Execution ex = toolManager.getExecution(); // Set input param for (InputParam inputParam : ex.getInputParams()) { if (c.params.containsKey(inputParam.getName())) { List<String> filePaths = new LinkedList<>(); for (String fileId : c.params.getAsStringList(inputParam.getName())) { File file = catalogManager.getFile(catalogManager.getFileId(fileId), sessionId) .getResult().get(0); filePaths.add(catalogManager.getFileUri(file).getPath()); inputFiles.add(file.getId()); } localParams.put(inputParam.getName(), filePaths); } } // Set outdir String outputParam = toolManager.getExecution().getOutputParam(); File outdir = catalogManager.getFile(outdirId, sessionId).first(); localParams.put(outputParam, Collections.singletonList(catalogManager.getFileUri(outdir).getPath())); QueryResult<Job> jobQueryResult = new JobFactory(catalogManager).createJob(toolManager, localParams, studyId, c.name, c.description, outdir, inputFiles, sessionId, true); System.out.println(createOutput(c.cOpt, jobQueryResult, null)); break; } default: { optionsParser.printUsage(); break; } } break; } case "tools": { switch (optionsParser.getSubCommand()) { case "create": { OptionsParser.ToolCommands.CreateCommand c = optionsParser.getToolCommands().createCommand; Path path = Paths.get(c.path); FileUtils.checkDirectory(path); QueryResult<Tool> tool = catalogManager.createTool(c.alias, c.description, null, null, path.toAbsolutePath().toString(), c.openTool, sessionId); System.out.println(createOutput(c.cOpt, tool, null)); break; } case "info": { OptionsParser.ToolCommands.InfoCommand c = optionsParser.getToolCommands().infoCommand; long toolId = catalogManager.getToolId(c.id); ToolManager toolManager; String toolName; if (toolId < 0) { toolManager = new ToolManager(c.id, null); //LEGACY MODE, AVOID USING toolName = c.id; System.out.println(createOutput(c.cOpt, toolManager.getManifest(), null)); } else { Tool tool = catalogManager.getTool(toolId, sessionId).getResult().get(0); toolManager = new ToolManager(Paths.get(tool.getPath()).getParent(), tool.getName(), null); toolName = tool.getName(); System.out.println(createOutput(c.cOpt, tool, null)); } break; } default: { optionsParser.printUsage(); break; } } break; } case "exit": { } break; case "help": default: optionsParser.printUsage(); // logger.info("Unknown command"); break; } logout(sessionId); return returnValue; }
From source file:org.apache.taverna.prov.W3ProvenanceExport.java
public void writeBundle(WorkflowBundle wfBundle) throws IOException { Bundle dataBundle = getBundle();/*from www . j av a2s . co m*/ // Workflow DataBundles.setWorkflowBundle(dataBundle, wfBundle); // Generate Manifest // TODO: This should be done automatically on close/save Manifest manifest = new Manifest(dataBundle); manifest.populateFromBundle(); Path workflowRunProvenance = DataBundles.getWorkflowRunProvenance(dataBundle); // Additional metadata manifest.getAggregation(workflowRunProvenance).setMediatype("text/turtle"); Agent provPlugin = new Agent(); provPlugin.setName( "Taverna-PROV plugin, " + applicationConfig.getTitle() + " " + applicationConfig.getName()); provPlugin.setUri(getPluginIdentifier(getClass())); manifest.getAggregation(workflowRunProvenance).setCreatedBy(provPlugin); manifest.setCreatedBy(provPlugin); // Media types: for (Entry<URI, String> e : mediaTypes.entrySet()) { URI uri = e.getKey(); String mediatype = e.getValue(); PathMetadata aggregation = manifest.getAggregation(uri); if (aggregation == null) { // An external reference? Add it. aggregation = manifest.getAggregation(uri); //aggregation = new PathMetadata(); //aggregation.setUri(uri); //manifest.getAggregates().add(aggregation); } aggregation.setMediatype(mediatype); } // Add annotations // This RO Bundle is about a run PathAnnotation bundleAboutRun = new PathAnnotation(); bundleAboutRun.setAbout(runURI); bundleAboutRun.setContent(URI.create("/")); manifest.getAnnotations().add(bundleAboutRun); // Also aggregate the run by ID, and that it was done by taverna Agent taverna = new Agent(); taverna.setName(applicationConfig.getTitle()); taverna.setUri(getTavernaVersion()); manifest.getAggregation(runURI).setCreatedBy(taverna); // TODO: Do we need both the "history" link and the annotation below? manifest.setHistory(Arrays.asList(workflowRunProvenance)); // This RO Bundle is described in the provenance file PathAnnotation provenanceAboutBundle = new PathAnnotation(); provenanceAboutBundle.setAbout(URI.create("/")); provenanceAboutBundle.setContent(URI.create(workflowRunProvenance.toUri().getPath())); manifest.getAnnotations().add(provenanceAboutBundle); // The wfdesc is about the workflow definition Path workflow = DataBundles.getWorkflow(dataBundle); // String workflowType = Files.probeContentType(workflow); manifest.getAggregation(workflow).setMediatype(WORKFLOW_BUNDLE); Path wfdesc = DataBundles.getWorkflowDescription(dataBundle); if (Files.exists(wfdesc)) { PathAnnotation wfdescAboutWfBundle = new PathAnnotation(); wfdescAboutWfBundle.setAbout(URI.create(workflow.toUri().getPath())); wfdescAboutWfBundle.setContent(URI.create(wfdesc.toUri().getPath())); manifest.getAnnotations().add(wfdescAboutWfBundle); } // And the workflow definition is about the workflow PathAnnotation wfBundleAboutWf = new PathAnnotation(); URITools uriTools = new URITools(); URI mainWorkflow = uriTools.uriForBean(wfBundle.getMainWorkflow()); wfBundleAboutWf.setAbout(mainWorkflow); URI wfBundlePath = URI.create(workflow.toUri().getPath()); wfBundleAboutWf.setContent(wfBundlePath); manifest.getAnnotations().add(wfBundleAboutWf); manifest.getAggregation(mainWorkflow); // hasWorkflowDefinition PathAnnotation hasWorkflowDefinition = new PathAnnotation(); hasWorkflowDefinition.setAbout(wfBundlePath); UUID uuid = UUID.randomUUID(); hasWorkflowDefinition.setUri(URI.create("urn:uuid:" + uuid)); Path annotationBody = DataBundles.getAnnotations(dataBundle).resolve(uuid + ".ttl"); hasWorkflowDefinition.setContent(URI.create(annotationBody.toUri().getPath())); Model model = ModelFactory.createDefaultModel(); URI relPathToWfBundle = uriTools.relativePath(annotationBody.toUri(), workflow.toUri()); model.setNsPrefix("wfdesc", WFDESC); model.add(model.createResource(mainWorkflow.toASCIIString()), model.createProperty(WFDESC + "hasWorkflowDefinition"), model.createResource(relPathToWfBundle.toASCIIString())); try (OutputStream out = Files.newOutputStream(annotationBody)) { model.write(out, "TURTLE", annotationBody.toUri().toASCIIString()); } manifest.getAnnotations().add(hasWorkflowDefinition); PathAnnotation wfBundleAboutWfB = new PathAnnotation(); wfBundleAboutWfB.setAbout(wfBundle.getGlobalBaseURI()); wfBundleAboutWfB.setContent(URI.create(workflow.toUri().getPath())); manifest.getAnnotations().add(wfBundleAboutWfB); manifest.writeAsJsonLD(); // // Saving a data bundle: // Path bundleFile = runPath.getParent().resolve(runPath.getFileName() + // ".bundle.zip"); // DataBundles.closeAndSaveBundle(dataBundle, bundleFile); // NOTE: From now dataBundle and its Path's are CLOSED // and can no longer be accessed }