List of usage examples for java.net URI toURL
public URL toURL() throws MalformedURLException
From source file:org.apache.woden.internal.DOMWSDLReader.java
private XmlSchema retrieveSchema(XMLElement contextElement, URI contextURI, String schemaSpec) throws WSDLException { Document importedSchemaDoc = null; Element schemaEl = null;//from w w w . j a v a2 s.c o m String schemaLoc = null; URL url = null; try { /* * For simple resolvers, we resolve the parent (Description) URI * to be used as the context. This allows for relative locationURIs * to be resolved implicitly - they are considered to be located * relative to the resolved parent. Therefore, relative URIs such as these * need not be listed in the catalog file. */ /* TODO * OASIS-style catalogs have a convenience notation to define root URIs * thus grouping related URLs together. In this case the context URI here * should be left alone, but the resultant locationURL resolved instead. * * Implement a boolean system property like org.apache.woden.resolver.useRelativeURLs * (set by the resolver ctor). SimpleURIResolver (et al) should set this to true, * OASISCatalogResolver should set to false. */ URL contextURL = (contextURI != null) ? contextURI.toURL() : null; url = StringUtils.getURL(contextURL, schemaSpec); } catch (MalformedURLException e) { String baseLoc = contextURI != null ? contextURI.toString() : null; getErrorReporter().reportError(new ErrorLocatorImpl(), //TODO line&col nos. "WSDL502", new Object[] { baseLoc, schemaLoc }, ErrorReporter.SEVERITY_ERROR); //can't continue schema retrieval with a bad URL. return null; } String schemaURL = url.toString(); //If the schema has already been imported, reuse it. XmlSchema schemaDef = (XmlSchema) fImportedSchemas.get(schemaURL); if (schemaDef == null) { //not previously imported, so retrieve it now. String resolvedLoc = null; try { URI resolvedURI = resolveURI(getURI(schemaURL)); resolvedLoc = resolvedURI.toString(); importedSchemaDoc = getDocument(new InputSource(resolvedLoc), resolvedLoc); } catch (IOException e4) { //schema retrieval failed (e.g. 'not found') getErrorReporter().reportError(new ErrorLocatorImpl(), //TODO line&col nos. "WSDL504", new Object[] { schemaURL }, ErrorReporter.SEVERITY_WARNING, e4); //cannot continue without resolving the URL return null; } schemaEl = importedSchemaDoc.getDocumentElement(); try { //String baseLoc = contextURI != null ? contextURI.toString() : null; String baseLoc = resolvedLoc; XmlSchemaCollection xsc = new XmlSchemaCollection(); xsc.setBaseUri(resolvedLoc); // Plug in the selected woden URI Resolver xsc.setSchemaResolver(new DOMSchemaResolverAdapter(getURIResolver(), contextElement)); schemaDef = xsc.read(schemaEl, baseLoc); fImportedSchemas.put(schemaURL, schemaDef); } catch (XmlSchemaException e) { getErrorReporter().reportError(new ErrorLocatorImpl(), //TODO line&col nos. "WSDL522", new Object[] { schemaURL }, ErrorReporter.SEVERITY_WARNING, e); } } return schemaDef; }
From source file:microsoft.exchange.webservices.data.AutodiscoverService.java
/** * Gets the enabled autodiscover endpoints on a specific host. * * @param host The host./*from w w w. j a v a 2s . c o m*/ * @param endpoints Endpoints found for host. * @return Flags indicating which endpoints are enabled. * @throws Exception the exception */ private boolean tryGetEnabledEndpointsForHost(String host, OutParam<EnumSet<AutodiscoverEndpoints>> endpoints) throws Exception { this.traceMessage(TraceFlags.AutodiscoverConfiguration, String.format("Determining which endpoints are enabled for host %s", host)); // We may get redirected to another host. And therefore need to limit the number of redirections we'll // tolerate. for (int currentHop = 0; currentHop < AutodiscoverMaxRedirections; currentHop++) { URI autoDiscoverUrl = new URI(String.format(AutodiscoverLegacyHttpsUrl, host)); endpoints.setParam(EnumSet.of(AutodiscoverEndpoints.None)); HttpWebRequest request = null; try { request = new HttpClientWebRequest(httpClient, httpContext); try { request.setUrl(autoDiscoverUrl.toURL()); } catch (MalformedURLException e) { String strErr = String.format("Incorrect format : %s", url); throw new ServiceLocalException(strErr); } request.setRequestMethod("GET"); request.setAllowAutoRedirect(false); request.setPreAuthenticate(false); request.setUseDefaultCredentials(this.getUseDefaultCredentials()); prepareCredentials(request); request.prepareConnection(); try { request.executeRequest(); } catch (IOException e) { return false; } OutParam<URI> outParam = new OutParam<URI>(); if (this.tryGetRedirectionResponse(request, outParam)) { URI redirectUrl = outParam.getParam(); this.traceMessage(TraceFlags.AutodiscoverConfiguration, String.format("Host returned redirection to host '%s'", redirectUrl.getHost())); host = redirectUrl.getHost(); } else { endpoints.setParam(this.getEndpointsFromHttpWebResponse(request)); this.traceMessage(TraceFlags.AutodiscoverConfiguration, String .format("Host returned enabled endpoint flags: %s", endpoints.getParam().toString())); return true; } } finally { if (request != null) { try { request.close(); } catch (Exception e) { // Connection can't be closed. We'll ignore this... } } } } this.traceMessage(TraceFlags.AutodiscoverConfiguration, String.format("Maximum number of redirection hops %d exceeded", AutodiscoverMaxRedirections)); throw new AutodiscoverLocalException(Strings.MaximumRedirectionHopsExceeded); }
From source file:microsoft.exchange.webservices.data.autodiscover.AutodiscoverService.java
/** * Gets the enabled autodiscover endpoints on a specific host. * * @param host The host./*from w w w. j a v a2 s . co m*/ * @param endpoints Endpoints found for host. * @return Flags indicating which endpoints are enabled. * @throws Exception the exception */ private boolean tryGetEnabledEndpointsForHost(String host, OutParam<EnumSet<AutodiscoverEndpoints>> endpoints) throws Exception { this.traceMessage(TraceFlags.AutodiscoverConfiguration, String.format("Determining which endpoints are enabled for host %s", host)); // We may get redirected to another host. And therefore need to limit the number of redirections we'll // tolerate. for (int currentHop = 0; currentHop < AutodiscoverMaxRedirections; currentHop++) { URI autoDiscoverUrl = new URI(String.format(AutodiscoverLegacyHttpsUrl, host)); endpoints.setParam(EnumSet.of(AutodiscoverEndpoints.None)); HttpWebRequest request = null; try { request = new HttpClientWebRequest(httpClient, httpContext); try { request.setUrl(autoDiscoverUrl.toURL()); } catch (MalformedURLException e) { String strErr = String.format("Incorrect format : %s", url); throw new ServiceLocalException(strErr); } request.setRequestMethod("GET"); request.setAllowAutoRedirect(false); request.setPreAuthenticate(false); request.setUseDefaultCredentials(this.getUseDefaultCredentials()); prepareCredentials(request); request.prepareConnection(); try { request.executeRequest(); } catch (IOException e) { return false; } OutParam<URI> outParam = new OutParam<URI>(); if (this.tryGetRedirectionResponse(request, outParam)) { URI redirectUrl = outParam.getParam(); this.traceMessage(TraceFlags.AutodiscoverConfiguration, String.format("Host returned redirection to host '%s'", redirectUrl.getHost())); host = redirectUrl.getHost(); } else { endpoints.setParam(this.getEndpointsFromHttpWebResponse(request)); this.traceMessage(TraceFlags.AutodiscoverConfiguration, String .format("Host returned enabled endpoint flags: %s", endpoints.getParam().toString())); return true; } } finally { if (request != null) { try { request.close(); } catch (Exception e) { // Connection can't be closed. We'll ignore this... } } } } this.traceMessage(TraceFlags.AutodiscoverConfiguration, String.format("Maximum number of redirection hops %d exceeded", AutodiscoverMaxRedirections)); throw new MaximumRedirectionHopsExceededException(); }
From source file:org.apache.archiva.admin.mock.ArchivaIndexManagerMock.java
@Override public void update(final ArchivaIndexingContext context, final boolean fullUpdate) throws IndexUpdateFailedException { log.info("start download remote index for remote repository {}", context.getRepository().getId()); URI remoteUpdateUri; if (!(context.getRepository() instanceof RemoteRepository) || !(context.getRepository().supportsFeature(RemoteIndexFeature.class))) { throw new IndexUpdateFailedException( "The context is not associated to a remote repository with remote index " + context.getId()); } else {/*from ww w .j av a2 s . com*/ RemoteIndexFeature rif = context.getRepository().getFeature(RemoteIndexFeature.class).get(); remoteUpdateUri = context.getRepository().getLocation().resolve(rif.getIndexUri()); } final RemoteRepository remoteRepository = (RemoteRepository) context.getRepository(); executeUpdateFunction(context, indexingContext -> { try { // create a temp directory to download files Path tempIndexDirectory = Paths.get(indexingContext.getIndexDirectoryFile().getParent(), ".tmpIndex"); Path indexCacheDirectory = Paths.get(indexingContext.getIndexDirectoryFile().getParent(), ".indexCache"); Files.createDirectories(indexCacheDirectory); if (Files.exists(tempIndexDirectory)) { org.apache.archiva.common.utils.FileUtils.deleteDirectory(tempIndexDirectory); } Files.createDirectories(tempIndexDirectory); tempIndexDirectory.toFile().deleteOnExit(); String baseIndexUrl = indexingContext.getIndexUpdateUrl(); String wagonProtocol = remoteUpdateUri.toURL().getProtocol(); NetworkProxy networkProxy = null; if (remoteRepository.supportsFeature(RemoteIndexFeature.class)) { RemoteIndexFeature rif = remoteRepository.getFeature(RemoteIndexFeature.class).get(); if (StringUtils.isNotBlank(rif.getProxyId())) { try { networkProxy = networkProxyAdmin.getNetworkProxy(rif.getProxyId()); } catch (RepositoryAdminException e) { log.error("Error occured while retrieving proxy {}", e.getMessage()); } if (networkProxy == null) { log.warn( "your remote repository is configured to download remote index trought a proxy we cannot find id:{}", rif.getProxyId()); } } final StreamWagon wagon = (StreamWagon) wagonFactory .getWagon(new WagonFactoryRequest(wagonProtocol, remoteRepository.getExtraHeaders()) .networkProxy(networkProxy)); int readTimeout = (int) rif.getDownloadTimeout().toMillis() * 1000; wagon.setReadTimeout(readTimeout); wagon.setTimeout((int) remoteRepository.getTimeout().toMillis() * 1000); if (wagon instanceof AbstractHttpClientWagon) { HttpConfiguration httpConfiguration = new HttpConfiguration(); HttpMethodConfiguration httpMethodConfiguration = new HttpMethodConfiguration(); httpMethodConfiguration.setUsePreemptive(true); httpMethodConfiguration.setReadTimeout(readTimeout); httpConfiguration.setGet(httpMethodConfiguration); AbstractHttpClientWagon.class.cast(wagon).setHttpConfiguration(httpConfiguration); } wagon.addTransferListener(new DownloadListener()); ProxyInfo proxyInfo = null; if (networkProxy != null) { proxyInfo = new ProxyInfo(); proxyInfo.setType(networkProxy.getProtocol()); proxyInfo.setHost(networkProxy.getHost()); proxyInfo.setPort(networkProxy.getPort()); proxyInfo.setUserName(networkProxy.getUsername()); proxyInfo.setPassword(networkProxy.getPassword()); } AuthenticationInfo authenticationInfo = null; if (remoteRepository.getLoginCredentials() != null && (remoteRepository.getLoginCredentials() instanceof PasswordCredentials)) { PasswordCredentials creds = (PasswordCredentials) remoteRepository.getLoginCredentials(); authenticationInfo = new AuthenticationInfo(); authenticationInfo.setUserName(creds.getUsername()); authenticationInfo.setPassword(new String(creds.getPassword())); } wagon.connect(new org.apache.maven.wagon.repository.Repository(remoteRepository.getId(), baseIndexUrl), authenticationInfo, proxyInfo); Path indexDirectory = indexingContext.getIndexDirectoryFile().toPath(); if (!Files.exists(indexDirectory)) { Files.createDirectories(indexDirectory); } ResourceFetcher resourceFetcher = new WagonResourceFetcher(log, tempIndexDirectory, wagon, remoteRepository); IndexUpdateRequest request = new IndexUpdateRequest(indexingContext, resourceFetcher); request.setForceFullUpdate(fullUpdate); request.setLocalIndexCacheDir(indexCacheDirectory.toFile()); // indexUpdater.fetchAndUpdateIndex( request ); indexingContext.updateTimestamp(true); } } catch (AuthenticationException e) { log.error("Could not login to the remote proxy for updating index of {}", remoteRepository.getId(), e); throw new IndexUpdateFailedException( "Login in to proxy failed while updating remote repository " + remoteRepository.getId(), e); } catch (ConnectionException e) { log.error("Connection error during index update for remote repository {}", remoteRepository.getId(), e); throw new IndexUpdateFailedException( "Connection error during index update for remote repository " + remoteRepository.getId(), e); } catch (MalformedURLException e) { log.error("URL for remote index update of remote repository {} is not correct {}", remoteRepository.getId(), remoteUpdateUri, e); throw new IndexUpdateFailedException( "URL for remote index update of repository is not correct " + remoteUpdateUri, e); } catch (IOException e) { log.error("IOException during index update of remote repository {}: {}", remoteRepository.getId(), e.getMessage(), e); throw new IndexUpdateFailedException( "IOException during index update of remote repository " + remoteRepository.getId() + (StringUtils.isNotEmpty(e.getMessage()) ? ": " + e.getMessage() : ""), e); } catch (WagonFactoryException e) { log.error("Wagon for remote index download of {} could not be created: {}", remoteRepository.getId(), e.getMessage(), e); throw new IndexUpdateFailedException( "Error while updating the remote index of " + remoteRepository.getId(), e); } }); }
From source file:org.apache.archiva.indexer.maven.MavenIndexManager.java
@Override public void update(final ArchivaIndexingContext context, final boolean fullUpdate) throws IndexUpdateFailedException { log.info("start download remote index for remote repository {}", context.getRepository().getId()); URI remoteUpdateUri; if (!(context.getRepository() instanceof RemoteRepository) || !(context.getRepository().supportsFeature(RemoteIndexFeature.class))) { throw new IndexUpdateFailedException( "The context is not associated to a remote repository with remote index " + context.getId()); } else {/*from w w w.ja v a2s.c o m*/ RemoteIndexFeature rif = context.getRepository().getFeature(RemoteIndexFeature.class).get(); remoteUpdateUri = context.getRepository().getLocation().resolve(rif.getIndexUri()); } final RemoteRepository remoteRepository = (RemoteRepository) context.getRepository(); executeUpdateFunction(context, indexingContext -> { try { // create a temp directory to download files Path tempIndexDirectory = Paths.get(indexingContext.getIndexDirectoryFile().getParent(), ".tmpIndex"); Path indexCacheDirectory = Paths.get(indexingContext.getIndexDirectoryFile().getParent(), ".indexCache"); Files.createDirectories(indexCacheDirectory); if (Files.exists(tempIndexDirectory)) { org.apache.archiva.common.utils.FileUtils.deleteDirectory(tempIndexDirectory); } Files.createDirectories(tempIndexDirectory); tempIndexDirectory.toFile().deleteOnExit(); String baseIndexUrl = indexingContext.getIndexUpdateUrl(); String wagonProtocol = remoteUpdateUri.toURL().getProtocol(); NetworkProxy networkProxy = null; if (remoteRepository.supportsFeature(RemoteIndexFeature.class)) { RemoteIndexFeature rif = remoteRepository.getFeature(RemoteIndexFeature.class).get(); if (StringUtils.isNotBlank(rif.getProxyId())) { try { networkProxy = networkProxyAdmin.getNetworkProxy(rif.getProxyId()); } catch (RepositoryAdminException e) { log.error("Error occured while retrieving proxy {}", e.getMessage()); } if (networkProxy == null) { log.warn( "your remote repository is configured to download remote index trought a proxy we cannot find id:{}", rif.getProxyId()); } } final StreamWagon wagon = (StreamWagon) wagonFactory .getWagon(new WagonFactoryRequest(wagonProtocol, remoteRepository.getExtraHeaders()) .networkProxy(networkProxy)); int readTimeout = (int) rif.getDownloadTimeout().toMillis() * 1000; wagon.setReadTimeout(readTimeout); wagon.setTimeout((int) remoteRepository.getTimeout().toMillis() * 1000); if (wagon instanceof AbstractHttpClientWagon) { HttpConfiguration httpConfiguration = new HttpConfiguration(); HttpMethodConfiguration httpMethodConfiguration = new HttpMethodConfiguration(); httpMethodConfiguration.setUsePreemptive(true); httpMethodConfiguration.setReadTimeout(readTimeout); httpConfiguration.setGet(httpMethodConfiguration); AbstractHttpClientWagon.class.cast(wagon).setHttpConfiguration(httpConfiguration); } wagon.addTransferListener(new DownloadListener()); ProxyInfo proxyInfo = null; if (networkProxy != null) { proxyInfo = new ProxyInfo(); proxyInfo.setType(networkProxy.getProtocol()); proxyInfo.setHost(networkProxy.getHost()); proxyInfo.setPort(networkProxy.getPort()); proxyInfo.setUserName(networkProxy.getUsername()); proxyInfo.setPassword(networkProxy.getPassword()); } AuthenticationInfo authenticationInfo = null; if (remoteRepository.getLoginCredentials() != null && (remoteRepository.getLoginCredentials() instanceof PasswordCredentials)) { PasswordCredentials creds = (PasswordCredentials) remoteRepository.getLoginCredentials(); authenticationInfo = new AuthenticationInfo(); authenticationInfo.setUserName(creds.getUsername()); authenticationInfo.setPassword(new String(creds.getPassword())); } wagon.connect(new org.apache.maven.wagon.repository.Repository(remoteRepository.getId(), baseIndexUrl), authenticationInfo, proxyInfo); Path indexDirectory = indexingContext.getIndexDirectoryFile().toPath(); if (!Files.exists(indexDirectory)) { Files.createDirectories(indexDirectory); } ResourceFetcher resourceFetcher = new WagonResourceFetcher(log, tempIndexDirectory, wagon, remoteRepository); IndexUpdateRequest request = new IndexUpdateRequest(indexingContext, resourceFetcher); request.setForceFullUpdate(fullUpdate); request.setLocalIndexCacheDir(indexCacheDirectory.toFile()); indexUpdater.fetchAndUpdateIndex(request); indexingContext.updateTimestamp(true); } } catch (AuthenticationException e) { log.error("Could not login to the remote proxy for updating index of {}", remoteRepository.getId(), e); throw new IndexUpdateFailedException( "Login in to proxy failed while updating remote repository " + remoteRepository.getId(), e); } catch (ConnectionException e) { log.error("Connection error during index update for remote repository {}", remoteRepository.getId(), e); throw new IndexUpdateFailedException( "Connection error during index update for remote repository " + remoteRepository.getId(), e); } catch (MalformedURLException e) { log.error("URL for remote index update of remote repository {} is not correct {}", remoteRepository.getId(), remoteUpdateUri, e); throw new IndexUpdateFailedException( "URL for remote index update of repository is not correct " + remoteUpdateUri, e); } catch (IOException e) { log.error("IOException during index update of remote repository {}: {}", remoteRepository.getId(), e.getMessage(), e); throw new IndexUpdateFailedException( "IOException during index update of remote repository " + remoteRepository.getId() + (StringUtils.isNotEmpty(e.getMessage()) ? ": " + e.getMessage() : ""), e); } catch (WagonFactoryException e) { log.error("Wagon for remote index download of {} could not be created: {}", remoteRepository.getId(), e.getMessage(), e); throw new IndexUpdateFailedException( "Error while updating the remote index of " + remoteRepository.getId(), e); } }); }
From source file:com.ibm.jaggr.core.impl.modulebuilder.css.CSSModuleBuilder.java
/** * Replace <code>url(<<i>relative-path</i>>)</code> references in the * input CSS with/* ww w. j a va2 s. co m*/ * <code>url(data:<<i>mime-type</i>>;<<i>base64-encoded-data</i>></code> * ). The conversion is controlled by option settings as described in * {@link CSSModuleBuilder}. * * @param req * The request associated with the call. * @param css * The input CSS * @param res * The resource for the CSS file * @return The transformed CSS with images in-lined as determined by option * settings. */ protected String inlineImageUrls(HttpServletRequest req, String css, IResource res) { if (imageSizeThreshold == 0 && inlinedImageIncludeList.size() == 0) { // nothing to do return css; } // In-lining of imports can be disabled by request parameter for debugging if (!TypeUtil.asBoolean(req.getParameter(INLINEIMAGES_REQPARAM_NAME), true)) { return css; } StringBuffer buf = new StringBuffer(); Matcher m = urlPattern.matcher(css); while (m.find()) { String fullMatch = m.group(0); String urlMatch = m.group(1); // remove quotes. urlMatch = dequote(urlMatch); urlMatch = forwardSlashPattern.matcher(urlMatch).replaceAll("/"); //$NON-NLS-1$ // Don't do anything with non-relative URLs if (urlMatch.startsWith("/") || urlMatch.startsWith("#") || protocolPattern.matcher(urlMatch).find()) { //$NON-NLS-1$ //$NON-NLS-2$ m.appendReplacement(buf, ""); //$NON-NLS-1$ buf.append(fullMatch); continue; } URI imageUri = res.resolve(urlMatch).getURI(); boolean exclude = false, include = false; // Determine if this image is in the include list for (Pattern regex : inlinedImageIncludeList) { if (regex.matcher(imageUri.getPath()).find()) { include = true; break; } } // Determine if this image is in the exclude list for (Pattern regex : inlinedImageExcludeList) { if (regex.matcher(imageUri.getPath()).find()) { exclude = true; break; } } // If there's an include list, then only the files in the include list // will be inlined if (inlinedImageIncludeList.size() > 0 && !include || exclude) { m.appendReplacement(buf, ""); //$NON-NLS-1$ buf.append(fullMatch); continue; } boolean imageInlined = false; String type = URLConnection.getFileNameMap().getContentTypeFor(imageUri.getPath()); String extension = PathUtil.getExtension(imageUri.getPath()); if (type == null) { type = inlineableImageTypeMap.get(extension); } if (type == null) { type = "content/unknown"; //$NON-NLS-1$ } if (include || inlineableImageTypes.contains(type) || inlineableImageTypeMap.containsKey(extension)) { InputStream in = null; try { // In-line the image. URLConnection connection = imageUri.toURL().openConnection(); if (include || connection.getContentLength() <= imageSizeThreshold) { in = connection.getInputStream(); String base64 = getBase64(connection); m.appendReplacement(buf, ""); //$NON-NLS-1$ buf.append("url('data:" + type + //$NON-NLS-1$ ";base64," + base64 + "')"); //$NON-NLS-1$ //$NON-NLS-2$ imageInlined = true; } } catch (IOException ex) { if (log.isLoggable(Level.WARNING)) { log.log(Level.WARNING, MessageFormat.format(Messages.CSSModuleBuilder_0, new Object[] { imageUri }), ex); } } finally { if (in != null) { try { in.close(); } catch (IOException ignore) { } } } } if (!imageInlined) { // Image not in-lined. Write the original URL m.appendReplacement(buf, ""); //$NON-NLS-1$ buf.append(fullMatch); } } m.appendTail(buf); return buf.toString(); }
From source file:nl.mpi.lamus.workspace.upload.implementation.LamusWorkspaceUploaderTest.java
@Test public void processUploadedFileWorkspaceException() throws IOException, WorkspaceNodeNotFoundException, URISyntaxException, WorkspaceException, NodeNotFoundException, TypeCheckerException { final String filename = "someFile.txt"; final URI workspaceTopNodeArchiveURI = URI .create(handleProxyPlusPrefixWithSlash + UUID.randomUUID().toString()); final File uploadedFile = new File(workspaceUploadDirectory, filename); final URI uploadedFileURI = uploadedFile.toURI(); final URL uploadedFileURL = uploadedFileURI.toURL(); final WorkspaceNodeType fileType = WorkspaceNodeType.RESOURCE_WRITTEN; final String fileMimetype = "text/plain"; final WorkspaceNode uploadedNode = new LamusWorkspaceNode(workspaceID, null, null); uploadedNode.setName(filename);/*from w w w . ja v a 2 s . c o m*/ uploadedNode.setStatus(WorkspaceNodeStatus.UPLOADED); uploadedNode.setType(fileType); uploadedNode.setFormat(fileMimetype); uploadedNode.setWorkspaceURL(uploadedFileURL); Collection<File> uploadedFiles = new ArrayList<>(); uploadedFiles.add(mockFile1); final String expectedErrorMessage = "Error retrieving archive URL from the top node of workspace " + workspaceID; final NodeNotFoundException expectedException = new NodeNotFoundException(workspaceTopNodeArchiveURI, "some exception message"); //only one file in the collection, so only one loop cycle context.checking(new Expectations() { { oneOf(mockWorkspaceDao).getWorkspace(workspaceID); will(returnValue(mockWorkspace)); oneOf(mockWorkspaceDao).getWorkspaceTopNode(workspaceID); will(returnValue(mockWorkspaceTopNode)); oneOf(mockWorkspaceTopNode).getArchiveURI(); will(returnValue(workspaceTopNodeArchiveURI)); oneOf(mockNodeDataRetriever).getNodeLocalFile(workspaceTopNodeArchiveURI); will(throwException(expectedException)); } }); try { uploader.processUploadedFiles(workspaceID, uploadedFiles); fail("should have thrown exception"); } catch (WorkspaceException ex) { assertEquals("Message different from expected", expectedErrorMessage, ex.getMessage()); assertEquals("Workspace ID different from expected", workspaceID, ex.getWorkspaceID()); assertEquals("Cause different from expected", expectedException, ex.getCause()); } }
From source file:nl.mpi.lamus.workspace.upload.implementation.LamusWorkspaceUploaderTest.java
@Test public void processUploadedFileUnarchivable() throws IOException, WorkspaceNodeNotFoundException, URISyntaxException, WorkspaceException, NodeNotFoundException, TypeCheckerException { final String filename = "someFile.txt"; final URI workspaceTopNodeArchiveURI = URI .create(handleProxyPlusPrefixWithSlash + UUID.randomUUID().toString()); final File workspaceTopNodeArchiveFile = new File("/archive/some/node.cmdi"); final File uploadedFile = new File(workspaceUploadDirectory, filename); final URI uploadedFileURI = uploadedFile.toURI(); final URL uploadedFileURL = uploadedFileURI.toURL(); final WorkspaceNodeType fileType = WorkspaceNodeType.RESOURCE_WRITTEN; final String fileMimetype = "text/plain"; final WorkspaceNode uploadedNode = new LamusWorkspaceNode(workspaceID, null, null); uploadedNode.setName(filename);//from ww w .ja v a 2s .co m uploadedNode.setStatus(WorkspaceNodeStatus.UPLOADED); uploadedNode.setType(fileType); uploadedNode.setFormat(fileMimetype); uploadedNode.setWorkspaceURL(uploadedFileURL); final Collection<File> uploadedFiles = new ArrayList<>(); uploadedFiles.add(mockFile1); //no successful uploads final Collection<WorkspaceNode> uploadedNodes = new ArrayList<>(); String partExpectedErrorMessage = "File [" + filename + "] not archivable: "; //only one file in the collection, so only one loop cycle context.checking(new Expectations() { { oneOf(mockWorkspaceDao).getWorkspace(workspaceID); will(returnValue(mockWorkspace)); oneOf(mockWorkspaceDao).getWorkspaceTopNode(workspaceID); will(returnValue(mockWorkspaceTopNode)); oneOf(mockWorkspaceTopNode).getArchiveURI(); will(returnValue(workspaceTopNodeArchiveURI)); oneOf(mockNodeDataRetriever).getNodeLocalFile(workspaceTopNodeArchiveURI); will(returnValue(workspaceTopNodeArchiveFile)); //loop oneOf(mockFile1).toURI(); will(returnValue(uploadedFileURI)); oneOf(mockFile1).getName(); will(returnValue(filename)); oneOf(mockNodeDataRetriever).triggerResourceFileCheck(uploadedFileURL, filename); will(returnValue(mockTypecheckedResults)); oneOf(mockNodeDataRetriever).isCheckedResourceArchivable(with(same(mockTypecheckedResults)), with(same(workspaceTopNodeArchiveFile)), with(any(StringBuilder.class))); will(returnValue(Boolean.FALSE)); oneOf(mockFile1).getName(); will(returnValue(filename)); oneOf(mockArchiveFileLocationProvider).isFileInOrphansDirectory(mockFile1); will(returnValue(Boolean.FALSE)); oneOf(mockWorkspaceFileHandler).deleteFile(mockFile1); //still calls method to process links oneOf(mockWorkspaceUploadHelper).assureLinksInWorkspace(mockWorkspace, uploadedNodes); } }); Collection<ImportProblem> result = uploader.processUploadedFiles(workspaceID, uploadedFiles); assertNotNull("Collection with failed uploads should not be null", result); assertTrue("Collection with failed uploads should be empty", result.size() == 1); ImportProblem problem = result.iterator().next(); assertTrue("Upload problem different from expected", problem instanceof FileImportProblem); assertEquals("File added to the upload problem is different from expected", mockFile1, ((FileImportProblem) problem).getProblematicFile()); assertEquals("Reason for failure of file upload is different from expected", partExpectedErrorMessage, ((FileImportProblem) problem).getErrorMessage()); }
From source file:nl.mpi.lamus.workspace.upload.implementation.LamusWorkspaceUploaderTest.java
@Test public void processUploadedFileUnarchivable_IsInOrphansDirectory() throws IOException, WorkspaceNodeNotFoundException, URISyntaxException, WorkspaceException, NodeNotFoundException, TypeCheckerException { final String filename = "someFile.txt"; final URI workspaceTopNodeArchiveURI = URI .create(handleProxyPlusPrefixWithSlash + UUID.randomUUID().toString()); final File workspaceTopNodeArchiveFile = new File("/archive/some/node.cmdi"); final File uploadedFile = new File(workspaceUploadDirectory, filename); final URI uploadedFileURI = uploadedFile.toURI(); final URL uploadedFileURL = uploadedFileURI.toURL(); final WorkspaceNodeType fileType = WorkspaceNodeType.RESOURCE_WRITTEN; final String fileMimetype = "text/plain"; final WorkspaceNode uploadedNode = new LamusWorkspaceNode(workspaceID, null, null); uploadedNode.setName(filename);//from ww w .j av a 2 s. com uploadedNode.setStatus(WorkspaceNodeStatus.UPLOADED); uploadedNode.setType(fileType); uploadedNode.setFormat(fileMimetype); uploadedNode.setWorkspaceURL(uploadedFileURL); final Collection<File> uploadedFiles = new ArrayList<>(); uploadedFiles.add(mockFile1); //no successful uploads final Collection<WorkspaceNode> uploadedNodes = new ArrayList<>(); String partExpectedErrorMessage = "File [" + filename + "] not archivable: "; //only one file in the collection, so only one loop cycle context.checking(new Expectations() { { oneOf(mockWorkspaceDao).getWorkspace(workspaceID); will(returnValue(mockWorkspace)); oneOf(mockWorkspaceDao).getWorkspaceTopNode(workspaceID); will(returnValue(mockWorkspaceTopNode)); oneOf(mockWorkspaceTopNode).getArchiveURI(); will(returnValue(workspaceTopNodeArchiveURI)); oneOf(mockNodeDataRetriever).getNodeLocalFile(workspaceTopNodeArchiveURI); will(returnValue(workspaceTopNodeArchiveFile)); //loop oneOf(mockFile1).toURI(); will(returnValue(uploadedFileURI)); oneOf(mockFile1).getName(); will(returnValue(filename)); oneOf(mockNodeDataRetriever).triggerResourceFileCheck(uploadedFileURL, filename); will(returnValue(mockTypecheckedResults)); oneOf(mockNodeDataRetriever).isCheckedResourceArchivable(with(same(mockTypecheckedResults)), with(same(workspaceTopNodeArchiveFile)), with(any(StringBuilder.class))); will(returnValue(Boolean.FALSE)); oneOf(mockFile1).getName(); will(returnValue(filename)); oneOf(mockArchiveFileLocationProvider).isFileInOrphansDirectory(mockFile1); will(returnValue(Boolean.TRUE)); //still calls method to process links oneOf(mockWorkspaceUploadHelper).assureLinksInWorkspace(mockWorkspace, uploadedNodes); } }); Collection<ImportProblem> result = uploader.processUploadedFiles(workspaceID, uploadedFiles); assertNotNull("Collection with failed uploads should not be null", result); assertTrue("Collection with failed uploads should be empty", result.size() == 1); ImportProblem problem = result.iterator().next(); assertTrue("Upload problem different from expected", problem instanceof FileImportProblem); assertEquals("File added to the upload problem is different from expected", mockFile1, ((FileImportProblem) problem).getProblematicFile()); assertEquals("Reason for failure of file upload is different from expected", partExpectedErrorMessage, ((FileImportProblem) problem).getErrorMessage()); }