List of usage examples for java.net URI relativize
public URI relativize(URI uri)
From source file:org.jenkins.plugins.leroy.NewProject.java
private void readWorkflowsFromDisk(String workflowsDir) { File wfDir = new File(workflowsDir); workflow = new ArrayList<String>(); if (wfDir.exists() && wfDir.canRead()) { //get file names IOFileFilter workflowFileFilter = new AbstractFileFilter() { @Override/* w ww . j ava 2s .c o m*/ public boolean accept(File file) { try { if (LeroyUtils.isWorkflow(file)) { return true; } } catch (Throwable t) { t.printStackTrace(); } return false; } }; Iterator<File> fileIterator = FileUtils.iterateFiles(new File(workflowsDir), workflowFileFilter, TrueFileFilter.INSTANCE); if (fileIterator != null) { URI workFlowsBase = new File(workflowsDir).toURI(); while (fileIterator.hasNext()) { // get relative path using workflow folder as a base and remove extension File wf = fileIterator.next(); String relative = workFlowsBase.relativize(wf.toURI()).getPath(); String wfName = relative.substring(0, relative.lastIndexOf('.')); workflow.add(wfName); } } } }
From source file:org.metaeffekt.dcc.commons.execution.ExecutionStateHandler.java
/** * Collect all execution properties files in the ${dcc.solution.dir}/config directory into a zip * file and return it as an InputStream//from www . j a va 2 s. c om * * @return An InputStream */ public InputStream consolidateState(Id<DeploymentId> deploymentId) { Validate.notNull(deploymentId); Validate.notNull(deploymentId.getValue()); LOG.debug("Collecting all execution properties to create consolidated state."); // FIXME: this collects also properties in the tmp structures Collection<File> allExecutionProperties = Collections.emptyList(); if (configurationDirectory.exists()) { allExecutionProperties = FileUtils.listFiles(configurationDirectory, new String[] { "properties" }, true); } if (allExecutionProperties.size() > 0) { final URI configurationFolderURI = configurationDirectory.toURI(); try { final File targetFile = new File(configurationDirectory, ZIP_FILE_NAME); deleteFile(targetFile); try (OutputStream out = new FileOutputStream(targetFile); ZipOutputStream zos = new ZipOutputStream(out)) { for (File file : allExecutionProperties) { String name = configurationFolderURI.relativize(file.toURI()).getPath(); LOG.debug("Found [{}]", name); if (name.contains("/tmp/") || name.contains("\\tmp\\")) { // FIXME: we should move the tmp folder out of the config folder LOG.debug("Skipping [{}] as it is included in a tmp folder", name); } else { zos.putNextEntry(new ZipEntry(name)); try (FileInputStream fis = new FileInputStream(file)) { IOUtils.copy(fis, zos); } } } zos.finish(); } if (targetFile.exists()) { AutoCloseInputStream autoCloseStream = new AutoCloseInputStream( new FileInputStream(targetFile)) { @Override public void close() throws IOException { super.close(); targetFile.delete(); } }; return autoCloseStream; } // no state produced return null; } catch (IOException e) { throw new IllegalStateException("Error while updating execution status:", e); } } return null; }
From source file:org.wso2.carbon.humantask.core.deployment.ArchiveBasedHumanTaskDeploymentUnitBuilder.java
@Override public void buildWSDLs() throws HumanTaskDeploymentException { HashSet<Definition> tmpWsdlDefinitions = new HashSet<>(); URI baseUri = humantaskDir.toURI(); for (File file : FileUtils.directoryEntriesInPath(humantaskDir, wsdlFilter)) { try {/*from w w w. j a va 2 s .c o m*/ URI uri = baseUri.relativize(file.toURI()); if (!uri.isAbsolute()) { File f = new File(baseUri.getPath() + File.separator + uri.getPath()); URI abUri = f.toURI(); if (abUri.isAbsolute()) { uri = abUri; } } WSDLReader reader = WSDLFactory.newInstance().newWSDLReader(); reader.setFeature(HumanTaskConstants.JAVAX_WSDL_VERBOSE_MODE_KEY, false); reader.setFeature("javax.wsdl.importDocuments", true); Definition definition = reader.readWSDL(new HumanTaskWSDLLocator(uri)); if (definition != null) { tmpWsdlDefinitions.add(definition); } } catch (WSDLException e) { log.error("Error processing wsdl " + file.getName()); throw new HumanTaskDeploymentException(" Error processing wsdl ", e); } catch (URISyntaxException e) { log.error("Invalid uri in reading wsdl ", e); throw new HumanTaskDeploymentException(" Invalid uri in reading wsdl ", e); } } // Optimizing WSDLs imports. Using HashSet to avoid duplicate entices. HashSet<Definition> optimizedDefinitions = new HashSet<>(); HTDeploymentConfigDocument htDeploymentConfigDocument = getHTDeploymentConfigDocument(); // Iterating Tasks. THTDeploymentConfig.Task[] taskArray = htDeploymentConfigDocument.getHTDeploymentConfig().getTaskArray(); if (taskArray != null) { for (THTDeploymentConfig.Task task : taskArray) { QName taskService = task.getPublish().getService().getName(); Definition taskServiceDefinition = getDefinition(taskService, tmpWsdlDefinitions); if (log.isDebugEnabled()) { log.debug("Optimizing WSDL import for Task : " + task.getName()); } if (taskServiceDefinition != null) { optimizedDefinitions.add(taskServiceDefinition); if (log.isDebugEnabled()) { log.debug("Added WSDL for Task : " + task.getName() + ", Service : " + taskService + ", Imported/Total definition : " + optimizedDefinitions.size() + "/" + tmpWsdlDefinitions.size()); } } else { log.warn("Can't find valid WSDL definition for Task" + task.getName() + ", Service: " + taskService); } QName callbackService = task.getCallback().getService().getName(); Definition callbackServiceDefinition = getDefinition(callbackService, tmpWsdlDefinitions); if (callbackServiceDefinition != null) { optimizedDefinitions.add(callbackServiceDefinition); if (log.isDebugEnabled()) { log.debug("Added WSDL for Task : " + task.getName() + ", Callback Service : " + callbackService + ", Imported/Total definition : " + optimizedDefinitions.size() + "/" + tmpWsdlDefinitions.size()); } } else { log.warn("Can't find valid WSDL definition for Task : " + task.getName() + ", Callback Service" + callbackService); } } } // Iterating Notifications. THTDeploymentConfig.Notification[] notificationsArray = htDeploymentConfigDocument.getHTDeploymentConfig() .getNotificationArray(); if (notificationsArray != null) { for (THTDeploymentConfig.Notification notification : notificationsArray) { QName notificationService = notification.getPublish().getService().getName(); Definition notificationServiceDefinition = getDefinition(notificationService, tmpWsdlDefinitions); if (notificationServiceDefinition != null) { optimizedDefinitions.add(notificationServiceDefinition); if (log.isDebugEnabled()) { log.debug("Added WSDL for Task : " + notification.getName() + ", Callback Service : " + notificationService + ", Imported/Total definition : " + optimizedDefinitions.size() + "/" + tmpWsdlDefinitions.size()); } } else { log.warn("Can't find valid WSDL definition for Notification " + notification.getName() + ", Service: " + notificationService); } } } // Converting HashSet to ArrayList. wsdlDefinitions = new ArrayList<>(optimizedDefinitions); if (log.isDebugEnabled()) { log.debug("Optimized Imported/Total definition : " + wsdlDefinitions.size() + "/" + tmpWsdlDefinitions.size()); } }
From source file:de.nrw.hbz.regal.sync.ingest.Downloader.java
/** * @param directory/*from www. j ava 2s. c o m*/ * the directory will be zipped * @param zipfile * the Outputfile */ @SuppressWarnings("resource") protected void zip(File directory, File zipfile) { try (ZipOutputStream zout = new ZipOutputStream(new FileOutputStream(zipfile))) { URI base = directory.toURI(); Deque<File> queue = new LinkedList<File>(); queue.push(directory); while (!queue.isEmpty()) { directory = queue.pop(); for (File kid : directory.listFiles()) { String name = base.relativize(kid.toURI()).getPath(); if (kid.isDirectory()) { queue.push(kid); name = name.endsWith("/") ? name : name + "/"; zout.putNextEntry(new ZipEntry(name)); } else { zout.putNextEntry(new ZipEntry(name)); copy(kid, zout); zout.closeEntry(); } } } } catch (IOException e) { throw new ZipDownloaderException(e); } }
From source file:software.coolstuff.springframework.owncloud.service.impl.rest.OwncloudRestResourceServiceImpl.java
private OwncloudRestResourceExtension createOwncloudResourceFrom(DavResource davResource, OwncloudResourceConversionProperties conversionProperties) { log.debug("Create OwncloudResource based on DavResource {}", davResource.getHref()); MediaType mediaType = MediaType.valueOf(davResource.getContentType()); URI rootPath = conversionProperties.getRootPath(); URI href = rootPath.resolve(davResource.getHref()); String name = davResource.getName(); if (davResource.isDirectory() && href.equals(rootPath)) { name = SLASH;/*from w w w .j a v a 2 s .c o m*/ } LocalDateTime lastModifiedAt = LocalDateTime.ofInstant(davResource.getModified().toInstant(), ZoneId.systemDefault()); href = rootPath.relativize(href); href = URI.create(SLASH).resolve(href).normalize(); // prepend "/" to the href OwncloudRestResourceExtension owncloudResource = OwncloudRestResourceImpl.builder().href(href).name(name) .lastModifiedAt(lastModifiedAt).mediaType(mediaType) .eTag(StringUtils.strip(davResource.getEtag(), QUOTE)).build(); if (davResource.isDirectory()) { return owncloudResource; } return OwncloudRestFileResourceImpl.fileBuilder().owncloudResource(owncloudResource) .contentLength(davResource.getContentLength()).build(); }
From source file:org.apache.hadoop.fs.s3native.NativeS3FileSystem.java
/** * <p>//w w w.j a v a 2 s.c o m * If <code>f</code> is a file, this method will make a single call to S3. * If <code>f</code> is a directory, this method will make a maximum of * (<i>n</i> / 1000) + 2 calls to S3, where <i>n</i> is the total number of * files and directories contained directly in <code>f</code>. * </p> */ @Override public FileStatus[] listStatus(Path f) throws IOException { Path absolutePath = makeAbsolute(f); String key = pathToKey(absolutePath); if (key.length() > 0) { FileMetadata meta = store.retrieveMetadata(key); if (meta != null) { return new FileStatus[] { newFile(meta, absolutePath) }; } } URI pathUri = absolutePath.toUri(); Set<FileStatus> status = new TreeSet<FileStatus>(); String priorLastKey = null; do { PartialListing listing = store.list(key, S3_MAX_LISTING_LENGTH, priorLastKey); for (FileMetadata fileMetadata : listing.getFiles()) { Path subpath = keyToPath(fileMetadata.getKey()); String relativePath = pathUri.relativize(subpath.toUri()).getPath(); if (relativePath.endsWith(FOLDER_SUFFIX)) { status.add(newDirectory(new Path(absolutePath, relativePath.substring(0, relativePath.indexOf(FOLDER_SUFFIX))))); } else { status.add(newFile(fileMetadata, subpath)); } } for (String commonPrefix : listing.getCommonPrefixes()) { Path subpath = keyToPath(commonPrefix); String relativePath = pathUri.relativize(subpath.toUri()).getPath(); status.add(newDirectory(new Path(absolutePath, relativePath))); } priorLastKey = listing.getPriorLastKey(); } while (priorLastKey != null); if (status.isEmpty() && store.retrieveMetadata(key + FOLDER_SUFFIX) == null) { return null; } return status.toArray(new FileStatus[0]); }
From source file:org.orbisgis.view.map.MapEditor.java
/** * Load a new map context/*from ww w . ja va 2s . co m*/ * @param element Editable to load */ private void loadMap(MapElement element) { MapElement oldMapElement = mapElement; ToolManager oldToolManager = getToolManager(); removeListeners(); mapElement = element; if (element != null) { try { mapContext = (MapContext) element.getObject(); mapContext.addPropertyChangeListener(MapContext.PROP_ACTIVELAYER, activeLayerListener); //We (unfortunately) need a cross reference here : this way, we'll //be able to retrieve the MapTransform from the Toc.. element.setMapEditor(this); mapControl.setMapContext(mapContext); mapControl.getMapTransform().setExtent(mapContext.getBoundingBox()); mapControl.setElement(this); mapControl.initMapControl(new PanTool()); // Update the default map context path with the relative path ViewWorkspace viewWorkspace = Services.getService(ViewWorkspace.class); URI rootDir = (new File(viewWorkspace.getMapContextPath() + File.separator)).toURI(); String relative = rootDir.relativize(element.getMapContextFile().toURI()).getPath(); mapEditorPersistence.setDefaultMapContext(relative); // Set the loaded map hint to the MapCatalog mapsManager.setLoadedMap(element.getMapContextFile()); // Update the editor label with the new editable name updateMapLabel(); mapElement.addPropertyChangeListener(MapElement.PROP_MODIFIED, modificationListener); repaint(); } catch (IllegalStateException ex) { GUILOGGER.error(ex); } catch (TransitionException ex) { GUILOGGER.error(ex); } } else { // Load null MapElement mapControl.setMapContext(null); } firePropertyChange(PROP_TOOL_MANAGER, oldToolManager, getToolManager()); firePropertyChange(PROP_MAP_ELEMENT, oldMapElement, mapElement); }
From source file:com.asakusafw.runtime.directio.hadoop.HadoopDataSourceUtil.java
@SuppressWarnings("unchecked") private static List<Path> createFileListRelative(Counter counter, FileSystem fs, Path source) throws IOException { assert counter != null; assert fs != null; assert source != null; assert source.isAbsolute(); URI baseUri = source.toUri(); FileStatus root;//w w w . j a va2s . c o m try { root = fs.getFileStatus(source); } catch (FileNotFoundException e) { LOG.warn(MessageFormat.format("Source path is not found: {0} (May be already moved)", baseUri)); return Collections.emptyList(); } counter.add(1); List<FileStatus> all = recursiveStep(fs, Collections.singletonList(root)); if (LOG.isDebugEnabled()) { LOG.debug(MessageFormat.format("Source path contains {1} files/directories: {0}", //$NON-NLS-1$ baseUri, all.size())); } List<Path> results = new ArrayList<>(); for (FileStatus stat : all) { if (stat.isDirectory()) { continue; } Path path = stat.getPath(); URI uri = path.toUri(); URI relative = baseUri.relativize(uri); if (relative.equals(uri) == false) { results.add(new Path(relative)); } else { throw new IOException(MessageFormat.format("Failed to compute relative path: base={0}, target={1}", baseUri, uri)); } counter.add(1); } Collections.sort(results); return results; }
From source file:org.apache.solr.core.SolrResourceLoader.java
/** Opens any resource by its name. * By default, this will look in multiple locations to load the resource: * $configDir/$resource (if resource is not absolute) * $CWD/$resource//from ww w.j a v a 2 s . co m * otherwise, it will look for it in any jar accessible through the class loader. * Override this method to customize loading resources. *@return the stream for the named resource */ @Override public InputStream openResource(String resource) throws IOException { InputStream is = null; try { File f0 = new File(resource), f = f0; if (!f.isAbsolute()) { // try $CWD/$configDir/$resource f = new File(getConfigDir() + resource).getAbsoluteFile(); } boolean found = f.isFile() && f.canRead(); if (!found) { // no success with $CWD/$configDir/$resource f = f0.getAbsoluteFile(); found = f.isFile() && f.canRead(); } // check that we don't escape instance dir if (found) { if (!Boolean.parseBoolean(System.getProperty("solr.allow.unsafe.resourceloading", "false"))) { final URI instanceURI = new File(getInstanceDir()).getAbsoluteFile().toURI().normalize(); final URI fileURI = f.toURI().normalize(); if (instanceURI.relativize(fileURI) == fileURI) { // no URI relativize possible, so they don't share same base folder throw new IOException( "For security reasons, SolrResourceLoader cannot load files from outside the instance's directory: " + f + "; if you want to override this safety feature and you are sure about the consequences, you can pass the system property " + "-Dsolr.allow.unsafe.resourceloading=true to your JVM"); } } // relativize() returned a relative, new URI, so we are fine! return new FileInputStream(f); } // Delegate to the class loader (looking into $INSTANCE_DIR/lib jars). // We need a ClassLoader-compatible (forward-slashes) path here! is = classLoader.getResourceAsStream(resource.replace(File.separatorChar, '/')); // This is a hack just for tests (it is not done in ZKResourceLoader)! // -> the getConfigDir's path must not be absolute! if (is == null && System.getProperty("jetty.testMode") != null && !new File(getConfigDir()).isAbsolute()) { is = classLoader.getResourceAsStream((getConfigDir() + resource).replace(File.separatorChar, '/')); } } catch (IOException ioe) { throw ioe; } catch (Exception e) { throw new IOException("Error opening " + resource, e); } if (is == null) { throw new IOException("Can't find resource '" + resource + "' in classpath or '" + new File(getConfigDir()).getAbsolutePath() + "'"); } return is; }
From source file:com.aliyun.fs.oss.nat.NativeOssFileSystem.java
/** * <p>/*from w w w . java 2 s . c o m*/ * If <code>f</code> is a file, this method will make a single call to Oss. * If <code>f</code> is a directory, this method will make a maximum of * (<i>n</i> / 1000) + 2 calls to Oss, where <i>n</i> is the total number of * files and directories contained directly in <code>f</code>. * </p> */ @Override public FileStatus[] listStatus(Path f) throws IOException { Path absolutePath = makeAbsolute(f); String key = pathToKey(absolutePath); if (key.length() > 0) { FileMetadata meta = store.retrieveMetadata(key); if (meta != null) { return new FileStatus[] { newFile(meta, absolutePath) }; } } URI pathUri = absolutePath.toUri(); Set<FileStatus> status = new TreeSet<FileStatus>(); String priorLastKey = null; do { PartialListing listing = store.list(key, OSS_MAX_LISTING_LENGTH, priorLastKey, false); for (FileMetadata fileMetadata : listing.getFiles()) { Path subPath = keyToPath(fileMetadata.getKey()); String relativePath = pathUri.relativize(subPath.toUri()).getPath(); if (fileMetadata.getKey().equals(key + "/")) { // this is just the directory we have been asked to list } else if (relativePath.endsWith(FOLDER_SUFFIX)) { status.add(newDirectory( new Path("/" + relativePath.substring(0, relativePath.indexOf(FOLDER_SUFFIX))))); } else { // Here, we need to convert "file/path" to "/file/path". // Otherwise, Path.makeQualified will throw `URISyntaxException`. Path modifiedPath = new Path("/" + subPath.toString()); status.add(newFile(fileMetadata, modifiedPath)); } } for (String commonPrefix : listing.getCommonPrefixes()) { Path subPath = keyToPath(commonPrefix); String relativePath = pathUri.relativize(subPath.toUri()).getPath(); status.add(newDirectory(new Path("/" + relativePath))); } priorLastKey = listing.getPriorLastKey(); } while (priorLastKey != null); if (status.isEmpty()) { return new FileStatus[0]; } return status.toArray(new FileStatus[status.size()]); }