List of usage examples for java.net URI resolve
public URI resolve(String str)
From source file:org.apache.ode.bpel.compiler.v1.WSDLRegistry.java
/** * Adds a WSDL definition for use in resolving MessageType, PortType, * Operation and BPEL properties and property aliases * * @param def WSDL definition// w ww .ja v a2 s . c o m */ @SuppressWarnings("unchecked") public void addDefinition(Definition4BPEL def, ResourceFinder rf, URI defuri) throws CompilationException { if (def == null) throw new NullPointerException("def=null"); if (__log.isDebugEnabled()) { __log.debug("addDefinition(" + def.getTargetNamespace() + " from " + def.getDocumentBaseURI() + ")"); } if (_definitions.containsKey(def.getTargetNamespace())) { // This indicates that we imported a WSDL with the same namespace from // two different locations. This is not an error, but should be a warning. if (__log.isInfoEnabled()) { __log.info("WSDL at " + defuri + " is a duplicate import, your documents " + "should all be in different namespaces (its's not nice but will still work)."); } } ArrayList<Definition4BPEL> defs = null; if (_definitions.get(def.getTargetNamespace()) == null) defs = new ArrayList<Definition4BPEL>(); else defs = _definitions.get(def.getTargetNamespace()); defs.add(def); _definitions.put(def.getTargetNamespace(), defs); captureSchemas(def, rf, defuri); if (__log.isDebugEnabled()) __log.debug("Processing <imports> in " + def.getDocumentBaseURI()); for (List<Import> imports : ((Map<String, List<Import>>) def.getImports()).values()) { HashSet<String> imported = new HashSet<String>(); for (Import im : imports) { // If there are several imports in the same WSDL all importing the same namespace // that is a sure sign of programmer error. if (imported.contains(im.getNamespaceURI())) { if (__log.isInfoEnabled()) { __log.info("WSDL at " + im.getLocationURI() + " imports several documents in the same " + "namespace (" + im.getNamespaceURI() + "), your documents should all be in different " + "namespaces (its's not nice but will still work)."); } } Definition4BPEL importDef = (Definition4BPEL) im.getDefinition(); // The assumption here is that if the definition is not set on the // import object then there was some problem parsing the thing, // although it would have been nice to actually get the parse // error. if (importDef == null) { CompilationException ce = new CompilationException( __cmsgs.errWsdlImportNotFound(im.getNamespaceURI(), im.getLocationURI()) .setSource(new SourceLocation(defuri))); if (_ctx == null) throw ce; _ctx.recoveredFromError(new SourceLocation(defuri), ce); continue; } imported.add(im.getNamespaceURI()); addDefinition((Definition4BPEL) im.getDefinition(), rf, defuri.resolve(im.getLocationURI())); } } }
From source file:net.sf.taverna.t2.security.credentialmanager.impl.CredentialManagerImpl.java
protected LinkedHashSet<URI> getPossibleServiceURIsToLookup(URI serviceURI, boolean usePathRecursion) { try {/*w w w .j a va 2s. c om*/ serviceURI = serviceURI.normalize(); serviceURI = dnParser.setUserInfoForURI(serviceURI, null); } catch (URISyntaxException ex) { logger.warn("Could not strip userinfo from " + serviceURI, ex); } /* * We'll use a LinkedHashSet to avoid checking for duplicates, like if * serviceURI.equals(withoutQuery) Only the first hit should be added to * the set. */ LinkedHashSet<URI> possibles = new LinkedHashSet<URI>(); possibles.add(serviceURI); if (!usePathRecursion || !serviceURI.isAbsolute()) return possibles; /* * We'll preserve the fragment, as it is used to indicate the realm */ String rawFragment = serviceURI.getRawFragment(); if (rawFragment == null) rawFragment = ""; URI withoutQuery = serviceURI.resolve(serviceURI.getRawPath()); addFragmentedURI(possibles, withoutQuery, rawFragment); // Immediate parent URI parent = withoutQuery.resolve("."); addFragmentedURI(possibles, parent, rawFragment); URI oldParent = null; // Top parent (to be added later) URI root = parent.resolve("/"); while (!parent.equals(oldParent) && !parent.equals(root) && parent.getPath().length() > 0) { // Intermediate parents, but not for "http://bla.org" as we would // find "http://bla.org.." oldParent = parent; parent = parent.resolve(".."); addFragmentedURI(possibles, parent, rawFragment); } // In case while-loop did not do so, also include root addFragmentedURI(possibles, root, rawFragment); if (rawFragment.length() > 0) // Add the non-fragment versions in the bottom of the list for (URI withFragment : new ArrayList<>(possibles)) try { possibles.add(dnParser.setFragmentForURI(withFragment, null)); } catch (URISyntaxException e) { logger.warn("Could not non-fragment URI " + withFragment); } return possibles; }
From source file:com.netflix.genie.web.services.impl.JobDirectoryServerServiceImpl.java
private void handleRequest(final URI baseUri, final String relativePath, final HttpServletRequest request, final HttpServletResponse response, final JobDirectoryManifest manifest, final URI jobDirectoryRoot) throws IOException, ServletException { log.debug("Handle request, baseUri: '{}', relpath: '{}', jobRootUri: '{}'", baseUri, relativePath, jobDirectoryRoot);/* ww w . jav a2s.c o m*/ final JobDirectoryManifest.ManifestEntry entry; final Optional<JobDirectoryManifest.ManifestEntry> entryOptional = manifest.getEntry(relativePath); if (entryOptional.isPresent()) { entry = entryOptional.get(); } else { log.error("No such entry in job manifest: {}", relativePath); response.sendError(HttpStatus.NOT_FOUND.value(), "Not found: " + relativePath); return; } if (entry.isDirectory()) { // For now maintain the V3 structure // TODO: Once we determine what we want for V4 use v3/v4 flags or some way to differentiate // TODO: there's no unit test covering this section final DefaultDirectoryWriter.Directory directory = new DefaultDirectoryWriter.Directory(); final List<DefaultDirectoryWriter.Entry> files = Lists.newArrayList(); final List<DefaultDirectoryWriter.Entry> directories = Lists.newArrayList(); try { entry.getParent().ifPresent(parentPath -> { final JobDirectoryManifest.ManifestEntry parentEntry = manifest.getEntry(parentPath) .orElseThrow(IllegalArgumentException::new); directory.setParent(createEntry(parentEntry, baseUri)); }); for (final String childPath : entry.getChildren()) { final JobDirectoryManifest.ManifestEntry childEntry = manifest.getEntry(childPath) .orElseThrow(IllegalArgumentException::new); if (childEntry.isDirectory()) { directories.add(this.createEntry(childEntry, baseUri)); } else { files.add(this.createEntry(childEntry, baseUri)); } } } catch (final IllegalArgumentException iae) { log.error("Encountered unexpected problem traversing the manifest for directory entry {}", entry, iae); response.sendError(HttpStatus.INTERNAL_SERVER_ERROR.value()); return; } directories.sort(Comparator.comparing(DefaultDirectoryWriter.Entry::getName)); files.sort(Comparator.comparing(DefaultDirectoryWriter.Entry::getName)); directory.setDirectories(directories); directory.setFiles(files); final String accept = request.getHeader(HttpHeaders.ACCEPT); if (accept != null && accept.contains(MediaType.TEXT_HTML_VALUE)) { response.setContentType(MediaType.TEXT_HTML_VALUE); response.getOutputStream().write(DefaultDirectoryWriter.directoryToHTML(entry.getName(), directory) .getBytes(StandardCharsets.UTF_8)); } else { response.setContentType(MediaType.APPLICATION_JSON_VALUE); GenieObjectMapper.getMapper().writeValue(response.getOutputStream(), directory); } } else { final URI location = jobDirectoryRoot.resolve(entry.getPath()); log.debug("Get resource: {}", location); final Resource jobResource = this.resourceLoader.getResource(location.toString()); // Every file really should have a media type but if not use text/plain final String mediaType = entry.getMimeType().orElse(MediaType.TEXT_PLAIN_VALUE); final ResourceHttpRequestHandler handler = this.genieResourceHandlerFactory.get(mediaType, jobResource); handler.handleRequest(request, response); } }
From source file:de.sub.goobi.metadaten.Metadaten.java
/** * Reorder pagination./*from ww w . j a va 2 s. c o m*/ */ public void reOrderPagination() throws IOException { URI imageDirectory; imageDirectory = fileService.getImagesDirectory(process); if (imageDirectory.equals("")) { Helper.setFehlerMeldung("ErrorMetsEditorImageRenaming"); return; } List<URI> oldfilenames = new ArrayList<>(); for (DocStruct page : digitalDocument.getPhysicalDocStruct().getAllChildren()) { oldfilenames.add(URI.create(page.getImageName())); } for (URI imagename : oldfilenames) { for (URI folder : allTifFolders) { URI filename = imageDirectory.resolve(folder).resolve(imagename); String newFileName = filename + "_bak"; fileService.renameFile(filename, newFileName); } URI ocrFolder = fileService.getProcessSubTypeURI(process, ProcessSubType.OCR, null); if (fileService.fileExist(ocrFolder)) { ArrayList<URI> allOcrFolder = fileService.getSubUris(ocrFolder); for (URI folder : allOcrFolder) { URI filename = folder.resolve(imagename); String newFileName = filename + "_bak"; fileService.renameFile(filename, newFileName); } } int counter = 1; for (URI oldImagename : oldfilenames) { String newfilenamePrefix = generateFileName(counter); for (URI folder : allTifFolders) { URI fileToSort = imageDirectory.resolve(folder).resolve(oldImagename); String fileExtension = Metadaten .getFileExtension(fileService.getFileName(fileToSort).replace("_bak", "")); URI tempFileName = imageDirectory.resolve(folder) .resolve(fileService.getFileName(fileToSort) + "_bak"); String sortedName = newfilenamePrefix + fileExtension.toLowerCase(); fileService.renameFile(tempFileName, sortedName); digitalDocument.getPhysicalDocStruct().getAllChildren().get(counter - 1) .setImageName(sortedName); } try { URI ocr = fileService.getProcessSubTypeURI(process, ProcessSubType.OCR, null); if (fileService.fileExist(ocr)) { ArrayList<URI> allOcrFolder = fileService.getSubUris(ocr); for (URI folder : allOcrFolder) { URI fileToSort = folder.resolve(imagename); String fileExtension = Metadaten .getFileExtension(fileService.getFileName(fileToSort).replace("_bak", "")); URI tempFileName = fileToSort.resolve("_bak"); String sortedName = newfilenamePrefix + fileExtension.toLowerCase(); fileService.renameFile(tempFileName, sortedName); } } } catch (IOException e) { logger.error(e); } counter++; } retrieveAllImages(); identifyImage(0); } }
From source file:org.apache.ode.bpel.compiler.WSDLRegistry.java
/** * Adds a WSDL definition for use in resolving MessageType, PortType, * Operation and BPEL properties and property aliases * * @param def WSDL definition/*from ww w .j av a2 s . c o m*/ */ @SuppressWarnings("unchecked") public void addDefinition(Definition4BPEL def, ResourceFinder rf, URI defuri) throws CompilationException { if (def == null) throw new NullPointerException("def=null"); if (__log.isDebugEnabled()) { __log.debug("addDefinition(" + def.getTargetNamespace() + " from " + def.getDocumentBaseURI() + ")"); } if (_definitions.containsKey(def.getTargetNamespace())) { // This indicates that we imported a WSDL with the same namespace from // two different locations. This is not an error, but should be a warning. if (__log.isInfoEnabled()) { __log.info("WSDL at " + defuri + " is a duplicate import, your documents " + "should all be in different namespaces (its's not nice but will still work)."); } for (Definition4BPEL aDef : _definitions.get(def.getTargetNamespace())) { if (aDef.getDocumentBaseURI().equals(def.getDocumentBaseURI())) { if (__log.isInfoEnabled()) { __log.info( "WSDL at " + defuri + " is already imported, this denotes a circular reference."); // no need to keep going: either return or throw an error } return; } } } ArrayList<Definition4BPEL> defs = null; if (_definitions.get(def.getTargetNamespace()) == null) defs = new ArrayList<Definition4BPEL>(); else defs = _definitions.get(def.getTargetNamespace()); defs.add(def); _definitions.put(def.getTargetNamespace(), defs); captureSchemas(def, rf, defuri); if (__log.isDebugEnabled()) __log.debug("Processing <imports> in " + def.getDocumentBaseURI()); for (List<Import> imports : ((Map<String, List<Import>>) def.getImports()).values()) { HashSet<String> imported = new HashSet<String>(); for (Import im : imports) { // If there are several imports in the same WSDL all importing the same namespace // that is a sure sign of programmer error. if (imported.contains(im.getNamespaceURI())) { if (__log.isInfoEnabled()) { __log.info("WSDL at " + im.getLocationURI() + " imports several documents in the same " + "namespace (" + im.getNamespaceURI() + "), your documents should all be in different " + "namespaces (its's not nice but will still work)."); } } Definition4BPEL importDef = (Definition4BPEL) im.getDefinition(); // The assumption here is that if the definition is not set on the // import object then there was some problem parsing the thing, // although it would have been nice to actually get the parse // error. if (importDef == null) { CompilationException ce = new CompilationException( __cmsgs.errWsdlImportNotFound(im.getNamespaceURI(), im.getLocationURI()) .setSource(new SourceLocationImpl(defuri))); if (_ctx == null) throw ce; _ctx.recoveredFromError(new SourceLocationImpl(defuri), ce); continue; } imported.add(im.getNamespaceURI()); addDefinition((Definition4BPEL) im.getDefinition(), rf, defuri.resolve(im.getLocationURI())); } } }
From source file:com.mirth.connect.client.core.Client.java
public Client(String address, int timeout, String[] httpsProtocols, String[] httpsCipherSuites, String[] apiProviderClasses) throws URISyntaxException { if (!address.endsWith("/")) { address += "/"; }//from w w w . jav a 2 s. c o m URI addressURI = new URI(address); serverConnection = new ServerConnection(timeout, httpsProtocols, httpsCipherSuites, StringUtils.equalsIgnoreCase(addressURI.getScheme(), "http")); ClientConfig config = new ClientConfig().connectorProvider(new ConnectorProvider() { @Override public Connector getConnector(javax.ws.rs.client.Client client, Configuration runtimeConfig) { return serverConnection; } }); // Register providers for (Class<?> providerClass : new Reflections("com.mirth.connect.client.core.api.providers") .getTypesAnnotatedWith(javax.ws.rs.ext.Provider.class)) { config.register(providerClass); } config.register(MultiPartFeature.class); // Register servlet interfaces Set<Class<? extends BaseServletInterface>> servletClasses = new Reflections( "com.mirth.connect.client.core.api.servlets").getSubTypesOf(BaseServletInterface.class); for (Class<?> servletClass : servletClasses) { config.register(servletClass); } if (ArrayUtils.isNotEmpty(apiProviderClasses)) { for (String apiProviderClass : apiProviderClasses) { try { config.register(Class.forName(apiProviderClass)); } catch (Throwable t) { logger.error("Error registering API provider class: " + apiProviderClass); } } } client = ClientBuilder.newClient(config); api = addressURI.resolve("api/" + Version.getLatest().toString()); }
From source file:gr.iti.mklab.bubing.parser.ITIHTMLParser.java
@Override public byte[] parse(final URI uri, final HttpResponse httpResponse, final LinkReceiver linkReceiver) throws IOException { guessedCharset = "ISO-8859-1"; final HttpEntity entity = httpResponse.getEntity(); // TODO: check if it will make sense to use getValue() of entity // Try to guess using headers final Header contentTypeHeader = entity.getContentType(); if (contentTypeHeader != null) { final String headerCharset = getCharsetNameFromHeader(contentTypeHeader.getValue()); if (headerCharset != null) guessedCharset = headerCharset; }/* www . j ava 2 s . co m*/ final InputStream contentStream = entity.getContent(); /* Note that the bubing-guessed-charset header and the header guessed by inspecting the entity content are complementary. The first is supposed to appear when parsing a store, the second while crawling. They should be aligned. This is a bit tricky, but we want to avoid the dependency on "rewindable" streams while parsing. */ final Header bubingGuessedCharsetHeader = httpResponse instanceof WarcRecord ? ((WarcRecord) httpResponse).getWarcHeader(WarcHeader.Name.BUBING_GUESSED_CHARSET) : null; if (bubingGuessedCharsetHeader != null) guessedCharset = bubingGuessedCharsetHeader.getValue(); else { if (contentStream instanceof InspectableFileCachedInputStream) { final InspectableFileCachedInputStream inspectableStream = (InspectableFileCachedInputStream) contentStream; final String metaCharset = getCharsetName(inspectableStream.buffer, inspectableStream.inspectable); if (metaCharset != null) guessedCharset = metaCharset; } } if (LOGGER.isDebugEnabled()) LOGGER.debug("Guessing charset \"{}\" for URL {}", guessedCharset, uri); Charset charset = Charsets.ISO_8859_1; // Fallback try { charset = Charset.forName(guessedCharset); } catch (IllegalCharsetNameException e) { if (LOGGER.isDebugEnabled()) LOGGER.debug("Response for {} contained an illegal charset name: \"{}\"", uri, guessedCharset); } catch (UnsupportedCharsetException e) { if (LOGGER.isDebugEnabled()) LOGGER.debug("Response for {} contained an unsupported charset: \"{}\"", uri, guessedCharset); } linkReceiver.init(uri); if (textProcessor != null) textProcessor.init(uri); // Get location if present location = null; metaLocation = null; final Header locationHeader = httpResponse.getFirstHeader(HttpHeaders.LOCATION); if (locationHeader != null) { final URI location = BURL.parse(locationHeader.getValue()); if (location != null) { // This shouldn't happen by standard, but people unfortunately does it. if (!location.isAbsolute() && LOGGER.isDebugEnabled()) LOGGER.debug("Found relative header location URL: \"{}\"", location); linkReceiver.location(this.location = uri.resolve(location)); } } @SuppressWarnings("resource") final StreamedSource streamedSource = new StreamedSource(new InputStreamReader(contentStream, charset)); if (buffer != null) streamedSource.setBuffer(buffer); if (digestAppendable != null) digestAppendable.init(crossAuthorityDuplicates ? null : uri); URI base = uri; int lastSegmentEnd = 0; int inSpecialText = 0; for (Segment segment : streamedSource) { if (segment.getEnd() > lastSegmentEnd) { lastSegmentEnd = segment.getEnd(); if (segment instanceof StartTag) { final StartTag startTag = (StartTag) segment; if (startTag.getTagType() != StartTagType.NORMAL) continue; final String name = startTag.getName(); if ((name == HTMLElementName.STYLE || name == HTMLElementName.SCRIPT) && !startTag.isSyntacticalEmptyElementTag()) inSpecialText++; if (digestAppendable != null) digestAppendable.startTag(startTag); // TODO: detect flow breakers if (linkReceiver == null) continue; // No link receiver, nothing to do. // IFRAME or FRAME + SRC if (name == HTMLElementName.IFRAME || name == HTMLElementName.FRAME || name == HTMLElementName.EMBED) process(linkReceiver, base, startTag.getAttributeValue("src"), startTag.getAttributeValue("name"), true); else if (name == HTMLElementName.IMG) { processImageURL(uri, base, startTag.getAttributeValue("src"), startTag.getAttributeValue("alt")); } else if (name == HTMLElementName.SCRIPT) process(linkReceiver, base, startTag.getAttributeValue("src"), null, false); else if (name == HTMLElementName.OBJECT) process(linkReceiver, base, startTag.getAttributeValue("data"), startTag.getAttributeValue("name"), true); else if (name == HTMLElementName.A || name == HTMLElementName.AREA || name == HTMLElementName.LINK) process(linkReceiver, base, startTag.getAttributeValue("href"), null, true); else if (name == HTMLElementName.BASE) { String s = startTag.getAttributeValue("href"); if (s != null) { final URI link = BURL.parse(s); if (link != null) { if (link.isAbsolute()) base = link; else if (LOGGER.isDebugEnabled()) LOGGER.debug("Found relative BASE URL: \"{}\"", link); } } } // META REFRESH/LOCATION else if (name == HTMLElementName.META) { final String equiv = startTag.getAttributeValue("http-equiv"); final String content = startTag.getAttributeValue("content"); if (equiv != null && content != null) { equiv.toLowerCase(); // http-equiv="refresh" content="0;URL=http://foo.bar/..." if (equiv.equals("refresh")) { final int pos = URLEQUAL_PATTERN.search(content); if (pos != -1) { final String urlPattern = content.substring(pos + URLEQUAL_PATTERN.length()); final URI refresh = BURL.parse(urlPattern); if (refresh != null) { // This shouldn't happen by standard, but people unfortunately does it. if (!refresh.isAbsolute() && LOGGER.isDebugEnabled()) LOGGER.debug("Found relative META refresh URL: \"{}\"", urlPattern); linkReceiver.metaRefresh(base.resolve(refresh)); } } } // http-equiv="location" content="http://foo.bar/..." if (equiv.equals("location")) { final URI metaLocation = BURL.parse(content); if (metaLocation != null) { // This shouldn't happen by standard, but people unfortunately does it. if (!metaLocation.isAbsolute() && LOGGER.isDebugEnabled()) LOGGER.debug("Found relative META location URL: \"{}\"", content); linkReceiver.metaLocation(this.metaLocation = base.resolve(metaLocation)); } } } } } else if (segment instanceof EndTag) { final EndTag endTag = (EndTag) segment; final String name = endTag.getName(); if (name == HTMLElementName.STYLE || name == HTMLElementName.SCRIPT) { inSpecialText = Math.max(0, inSpecialText - 1); // Ignore extra closing tags } if (digestAppendable != null) { if (endTag.getTagType() != EndTagType.NORMAL) continue; digestAppendable.endTag(endTag); } } else if (inSpecialText == 0) { if (textProcessor != null) { if (segment instanceof CharacterReference) ((CharacterReference) segment).appendCharTo(textProcessor); else textProcessor.append(segment); } if (digestAppendable != null) { if (segment instanceof CharacterReference) ((CharacterReference) segment).appendCharTo(digestAppendable); else digestAppendable.append(segment); } } } } if (DigestAppendable.DEBUG) if (digestAppendable != null) { System.err.println("Closing " + digestAppendable.debugFile + " for " + uri); digestAppendable.debugStream.close(); } return digestAppendable != null ? digestAppendable.digest() : null; }
From source file:org.kitodo.production.metadata.MetadataProcessor.java
private void assignNewImage(URI pagesDirectory, String currentPngFile) { File temporaryTifFile = null; try {// ww w .j a v a 2 s . c om temporaryTifFile = File.createTempFile("tempTif_", ".tif"); } catch (IOException e) { logger.error(e.getMessage(), e); } if (this.image != null) { try { URI tifFile = this.currentTifFolder.resolve(this.image); logger.trace("tiffconverterpfad: {}", tifFile); if (!fileService.fileExist(tifFile)) { tifFile = ServiceManager.getProcessService().getImagesTifDirectory(true, this.process.getId(), this.process.getTitle(), this.process.getProcessBaseUri()).resolve(this.image); Helper.setErrorMessage("formularOrdner:TifFolders", "", "image " + this.image + " does not exist in folder " + this.currentTifFolder + ", using image from " + new File(ServiceManager.getProcessService().getImagesTifDirectory(true, this.process.getId(), this.process.getTitle(), this.process.getProcessBaseUri())).getName()); } // Copy tif-file to temporary folder try (InputStream tifFileInputStream = fileService.read(tifFile)) { if (temporaryTifFile != null) { FileUtils.copyInputStreamToFile(tifFileInputStream, temporaryTifFile); this.imageHelper.scaleFile(temporaryTifFile.toURI(), pagesDirectory.resolve(currentPngFile), this.imageSize, this.imageRotation); logger.trace("scaleFile"); } } } catch (IOException | ImageManipulatorException | ImageManagerException | RuntimeException e) { Helper.setErrorMessage("could not getById image folder", logger, e); } finally { if (temporaryTifFile != null) { try { if (!fileService.delete(temporaryTifFile.toURI())) { logger.error("Error while deleting temporary tif file: " + temporaryTifFile.getAbsolutePath()); } // not working } catch (IOException e) { logger.error("Error while deleting temporary tif file: " + e.getMessage()); } } } } }
From source file:org.opencb.opencga.analysis.storage.variant.VariantStorage.java
public void calculateStats(long studyId, List<Long> cohortIds, String catalogOutDirIdStr, String outdirStr, String sessionId, QueryOptions options) throws AnalysisExecutionException, CatalogException, IOException, URISyntaxException { Job.Type step = Job.Type.COHORT_STATS; String fileIdStr = options.getString(Options.FILE_ID.key(), null); boolean overwriteStats = options.getBoolean(Options.OVERWRITE_STATS.key(), false); boolean updateStats = options.getBoolean(Options.UPDATE_STATS.key(), false); final Long fileId = fileIdStr == null ? null : catalogManager.getFileId(fileIdStr, sessionId); // Outdir must be empty URI outdirUri = UriUtils.createDirectoryUri(outdirStr); final Path outdir = Paths.get(outdirUri); outdirMustBeEmpty(outdir);//from w w w. ja v a2 s. co m cohortIds = checkCohorts(studyId, cohortIds, options, sessionId); Map<Long, Cohort> cohortsMap = checkCanCalculateCohorts(studyId, cohortIds, updateStats, sessionId); String region = options.getString(VariantDBAdaptor.VariantQueryParams.REGION.key()); String outputFileName = buildOutputFileName(cohortIds, options, cohortsMap, region); Long catalogOutDirId; if (catalogOutDirIdStr != null) { catalogOutDirId = catalogManager.getFileManager().getId(catalogOutDirIdStr, studyId, sessionId); if (catalogOutDirId <= 0) { throw new CatalogException( "Output directory " + catalogOutDirIdStr + " could not be found within catalog."); } } else { catalogOutDirId = null; } QueryOptions calculateStatsOptions = new QueryOptions(options) // .append(VariantStorageManager.Options.LOAD_BATCH_SIZE.key(), 100) // .append(VariantStorageManager.Options.LOAD_THREADS.key(), 6) .append(Options.OVERWRITE_STATS.key(), overwriteStats) .append(Options.UPDATE_STATS.key(), updateStats); calculateStatsOptions.putIfNotNull(Options.FILE_ID.key(), fileId); calculateStatsOptions.putIfNotEmpty(VariantDBAdaptor.VariantQueryParams.REGION.key(), region); // if the study is aggregated and a mapping file is provided, pass it to storage // and create in catalog the cohorts described in the mapping file Aggregation aggregation = getAggregation(studyId, options, sessionId); String aggregationMappingFile = options.getString(Options.AGGREGATION_MAPPING_PROPERTIES.key()); if (Aggregation.isAggregated(aggregation) && !aggregationMappingFile.isEmpty()) { try (InputStream is = FileUtils.newInputStream(Paths.get(aggregationMappingFile))) { Properties properties = new Properties(); properties.load(is); calculateStatsOptions.append(Options.AGGREGATION_MAPPING_PROPERTIES.key(), properties); } } DataStore dataStore = AbstractFileIndexer.getDataStore(catalogManager, studyId, File.Bioformat.VARIANT, sessionId); StudyConfiguration studyConfiguration = updateStudyConfiguration(sessionId, studyId, dataStore); Thread hook = buildHook(cohortIds, sessionId, outdir); writeJobStatus(outdir, new Job.JobStatus(Job.JobStatus.RUNNING, "Job has just started")); Runtime.getRuntime().addShutdownHook(hook); // Up to this point, catalog has not been modified try { // Modify cohort status to "CALCULATING" updateCohorts(cohortIds, sessionId, Cohort.CohortStatus.CALCULATING); VariantStorageManager variantStorageManager = StorageManagerFactory.get() .getVariantStorageManager(dataStore.getStorageEngine()); VariantStatisticsManager variantStatisticsManager = new VariantStatisticsManager(); VariantDBAdaptor dbAdaptor = variantStorageManager.getDBAdaptor(dataStore.getDbName()); Map<String, Integer> cohortNameIdMap = new HashMap<>(cohortIds.size()); Map<String, Set<String>> cohortSamplesMap = new HashMap<>(cohortIds.size()); for (Map.Entry<Long, Cohort> entry : cohortsMap.entrySet()) { cohortNameIdMap.put(entry.getValue().getName(), entry.getKey().intValue()); cohortSamplesMap.put(entry.getValue().getName(), entry.getValue().getSamples().stream().map(sampleId -> { return studyConfiguration.getSampleIds().inverse().get(sampleId.intValue()); }).collect(Collectors.toSet())); } URI stats = variantStatisticsManager.createStats(dbAdaptor, outdirUri.resolve(outputFileName), cohortSamplesMap, cohortNameIdMap, studyConfiguration, calculateStatsOptions); writeJobStatus(outdir, new Job.JobStatus(Job.JobStatus.RUNNING, "Job still running. Statistics created.")); variantStatisticsManager.loadStats(dbAdaptor, stats, studyConfiguration, options); if (catalogOutDirId != null) { copyResults(Paths.get(outdirUri), catalogOutDirId, sessionId); } writeJobStatus(outdir, new Job.JobStatus(Job.JobStatus.DONE, "Job completed")); // Modify cohort status to "READY" updateCohorts(cohortIds, sessionId, Cohort.CohortStatus.READY); } catch (Exception e) { // Error! logger.error("Error executing stats. Set cohorts status to " + Cohort.CohortStatus.INVALID, e); writeJobStatus(outdir, new Job.JobStatus(Job.JobStatus.ERROR, "Job with error : " + e.getMessage())); // Modify to "INVALID" updateCohorts(cohortIds, sessionId, Cohort.CohortStatus.INVALID); throw new AnalysisExecutionException("Error calculating statistics.", e); } finally { // Remove hook Runtime.getRuntime().removeShutdownHook(hook); } }
From source file:org.openhab.binding.amazonechocontrol.internal.Connection.java
public HttpsURLConnection makeRequest(String verb, String url, @Nullable String postData, boolean json, boolean autoredirect, @Nullable Map<String, String> customHeaders) throws IOException, URISyntaxException { String currentUrl = url;// www . j a v a2s . c o m for (int i = 0; i < 30; i++) // loop for handling redirect, using automatic redirect is not possible, because // all response headers must be catched { int code; HttpsURLConnection connection = null; try { logger.debug("Make request to {}", url); connection = (HttpsURLConnection) new URL(currentUrl).openConnection(); connection.setRequestMethod(verb); connection.setRequestProperty("Accept-Language", "en-US"); if (customHeaders == null || !customHeaders.containsKey("User-Agent")) { connection.setRequestProperty("User-Agent", userAgent); } connection.setRequestProperty("Accept-Encoding", "gzip"); connection.setRequestProperty("DNT", "1"); connection.setRequestProperty("Upgrade-Insecure-Requests", "1"); if (customHeaders != null) { for (String key : customHeaders.keySet()) { String value = customHeaders.get(key); if (StringUtils.isNotEmpty(value)) { connection.setRequestProperty(key, value); } } } connection.setInstanceFollowRedirects(false); // add cookies URI uri = connection.getURL().toURI(); if (customHeaders == null || !customHeaders.containsKey("Cookie")) { StringBuilder cookieHeaderBuilder = new StringBuilder(); for (HttpCookie cookie : cookieManager.getCookieStore().get(uri)) { if (cookieHeaderBuilder.length() > 0) { cookieHeaderBuilder.append(";"); } cookieHeaderBuilder.append(cookie.getName()); cookieHeaderBuilder.append("="); cookieHeaderBuilder.append(cookie.getValue()); if (cookie.getName().equals("csrf")) { connection.setRequestProperty("csrf", cookie.getValue()); } } if (cookieHeaderBuilder.length() > 0) { String cookies = cookieHeaderBuilder.toString(); connection.setRequestProperty("Cookie", cookies); } } if (postData != null) { logger.debug("{}: {}", verb, postData); // post data byte[] postDataBytes = postData.getBytes(StandardCharsets.UTF_8); int postDataLength = postDataBytes.length; connection.setFixedLengthStreamingMode(postDataLength); if (json) { connection.setRequestProperty("Content-Type", "application/json; charset=UTF-8"); } else { connection.setRequestProperty("Content-Type", "application/x-www-form-urlencoded"); } connection.setRequestProperty("Content-Length", Integer.toString(postDataLength)); if (verb == "POST") { connection.setRequestProperty("Expect", "100-continue"); } connection.setDoOutput(true); OutputStream outputStream = connection.getOutputStream(); outputStream.write(postDataBytes); outputStream.close(); } // handle result code = connection.getResponseCode(); String location = null; // handle response headers Map<String, List<String>> headerFields = connection.getHeaderFields(); for (Map.Entry<String, List<String>> header : headerFields.entrySet()) { String key = header.getKey(); if (StringUtils.isNotEmpty(key)) { if (key.equalsIgnoreCase("Set-Cookie")) { // store cookie for (String cookieHeader : header.getValue()) { if (StringUtils.isNotEmpty(cookieHeader)) { List<HttpCookie> cookies = HttpCookie.parse(cookieHeader); for (HttpCookie cookie : cookies) { cookieManager.getCookieStore().add(uri, cookie); } } } } if (key.equalsIgnoreCase("Location")) { // get redirect location location = header.getValue().get(0); if (StringUtils.isNotEmpty(location)) { location = uri.resolve(location).toString(); // check for https if (location.toLowerCase().startsWith("http://")) { // always use https location = "https://" + location.substring(7); logger.debug("Redirect corrected to {}", location); } } } } } if (code == 200) { logger.debug("Call to {} succeeded", url); return connection; } if (code == 302 && location != null) { logger.debug("Redirected to {}", location); currentUrl = location; if (autoredirect) { continue; } return connection; } } catch (IOException e) { if (connection != null) { connection.disconnect(); } logger.warn("Request to url '{}' fails with unkown error", url, e); throw e; } catch (Exception e) { if (connection != null) { connection.disconnect(); } throw e; } if (code != 200) { throw new HttpException(code, verb + " url '" + url + "' failed: " + connection.getResponseMessage()); } } throw new ConnectionException("Too many redirects"); }