List of usage examples for java.net URLConnection connect
public abstract void connect() throws IOException;
From source file:io.apiman.gateway.engine.impl.DefaultPluginRegistry.java
/** * Download the artifact at the given URL and store it locally into the given * plugin file path./*from w w w.java 2s. c o m*/ */ protected void downloadArtifactTo(URL artifactUrl, File pluginFile, IAsyncResultHandler<File> handler) { InputStream istream = null; OutputStream ostream = null; try { URLConnection connection = artifactUrl.openConnection(); connection.connect(); if (connection instanceof HttpURLConnection) { HttpURLConnection httpConnection = (HttpURLConnection) connection; if (httpConnection.getResponseCode() != 200) { handler.handle(AsyncResultImpl.create(null)); return; } } istream = connection.getInputStream(); ostream = new FileOutputStream(pluginFile); IOUtils.copy(istream, ostream); ostream.flush(); handler.handle(AsyncResultImpl.create(pluginFile)); } catch (Exception e) { handler.handle(AsyncResultImpl.<File>create(e)); } finally { IOUtils.closeQuietly(istream); IOUtils.closeQuietly(ostream); } }
From source file:net.sf.taverna.t2.security.credentialmanager.impl.HTTPAuthenticatorIT.java
@Test() public void differentRealm() throws Exception { assertEquals("Unexpected calls to password provider", 0, HTTPAuthenticatorServiceUsernameAndPasswordProvider.getCalls()); CountingAuthenticator authenticator = new CountingAuthenticator(credentialManager); assertEquals("Unexpected calls to authenticator", 0, authenticator.calls); Authenticator.setDefault(authenticator); // Different password in case resetAuthCache() did not run UsernamePassword userPassword = new UsernamePassword(USERNAME, PASSWORD4); userRealm.put(USERNAME, PASSWORD4);// w w w . ja v a 2 s . c o m // userPassword.setShouldSave(true); //FixedPasswordProvider.setUsernamePassword(userPassword); URL url = new URL("http://localhost:" + PORT + "/test.html"); httpAuthProvider.setServiceUsernameAndPassword(url.toURI(), userPassword); URLConnection c = url.openConnection(); c.connect(); try { c.getContent(); } catch (Exception ex) { } assertEquals("Unexpected prompt/realm", REALM, httpAuthProvider.getRequestMessage()); assertEquals("Unexpected URI", url.toURI().toASCIIString() + "#" + REALM, HTTPAuthenticatorServiceUsernameAndPasswordProvider.getServiceURI().toASCIIString()); assertEquals("HTTP/1.1 200 OK", c.getHeaderField(0)); assertEquals("Did not invoke authenticator", 1, authenticator.calls); assertEquals("Did not invoke our password provider", 1, HTTPAuthenticatorServiceUsernameAndPasswordProvider.getCalls()); // different realm should be treated as a second connection, and not even use saved credentials credentialManager.resetAuthCache(); userRealm.setName(REALM2); URLConnection c2 = url.openConnection(); c2.connect(); try { c.getContent(); } catch (Exception ex) { } assertEquals("HTTP/1.1 200 OK", c2.getHeaderField(0)); assertEquals("Did not invoke authenticator again", 2, authenticator.calls); assertEquals("Did not invoke provider again", 2, HTTPAuthenticatorServiceUsernameAndPasswordProvider.getCalls()); assertEquals("Unexpected prompt/realm", REALM2, httpAuthProvider.getRequestMessage()); assertEquals("Unexpected URI", url.toURI().toASCIIString() + "#" + REALM2, HTTPAuthenticatorServiceUsernameAndPasswordProvider.getServiceURI().toASCIIString()); }
From source file:de.mango.business.GoogleSearchProvider.java
public Vector<String> search(String query, int count, int page) { Vector<String> results = new Vector<String>(); // Prepare the query try {//ww w .j a v a2s . co m query = "http://ajax.googleapis.com/ajax/services/search/images?" + GoogleSearchProvider.searchArguments + this.hostLanguage + "&q=" + URLEncoder.encode( GoogleSearchProvider.restrictToOpenClipart ? query + GoogleSearchProvider.openClipart : query, "UTF-8") + "&start="; } catch (UnsupportedEncodingException e) { if (DEBUG) { Log.w(TAG, "Unsupported Encoding Exception:" + e.getMessage()); Log.w(TAG, Log.getStackTraceString(e)); } return results; } // start argument to pass to google int firstindex = count * page; // count of results to skip before adding them to the result array int skip = 0; // start indices > 56 are skipped by Google, so we // ask for results from 56, but skip the unwanted $skip indices if (firstindex > 63) return results; if (firstindex > 56) { skip = firstindex - 56; firstindex = 56; } boolean readMore = true; // do we need more queries and are they // possible? while (readMore) { // add start index to the query String currentQuery = query + firstindex; if (DEBUG) Log.d(TAG, "Searching: " + currentQuery); try { // prepare the connection URL url = new URL(currentQuery); URLConnection connection = url.openConnection(); connection.addRequestProperty("Referer", GoogleSearchProvider.refererUrl); connection.setConnectTimeout(2000); connection.setReadTimeout(2000); connection.connect(); // receive the results StringBuilder builder = new StringBuilder(); BufferedReader reader = new BufferedReader(new InputStreamReader(connection.getInputStream())); String line; while ((line = reader.readLine()) != null) { builder.append(line); } // parse the results JSONObject json = new JSONObject(builder.toString()); int responseStatus = json.getInt("responseStatus"); if (responseStatus == 200)// successful search { json = json.getJSONObject("responseData"); JSONArray res = json.getJSONArray("results"); if (res.length() == 0) return results; String s; int limit = Math.min(res.length(), count - results.size() + skip); for (int i = skip; i < limit; i++) { s = res.getJSONObject(i).getString("unescapedUrl"); if (s != null) results.addElement(s); } // see if there are "more Results" JSONObject cursor = json.getJSONObject("cursor"); JSONArray pages = cursor.getJSONArray("pages"); int pageCount = pages.length(); int currentPageIndex = cursor.getInt("currentPageIndex"); this.moreResults = readMore = (pageCount - 1) > currentPageIndex; } else { if (DEBUG) Log.w(TAG, "Goole Search Error (Code " + responseStatus + "):" + json.getString("responseDetails")); this.moreResults = readMore = false;// prevent for (;;) loop // on errors } } catch (MalformedURLException e) { if (DEBUG) { Log.w(TAG, "MalformedURLException:" + e.getMessage()); Log.w(TAG, Log.getStackTraceString(e)); } this.moreResults = readMore = false; } catch (IOException e) { if (DEBUG) { Log.w(TAG, "IOException:" + e.getMessage()); Log.w(TAG, Log.getStackTraceString(e)); } this.moreResults = readMore = false; } catch (JSONException e) { if (DEBUG) { Log.w(TAG, "JSONException:" + e.getMessage()); Log.w(TAG, Log.getStackTraceString(e)); } this.moreResults = readMore = false; } // read more only if we can read more AND want to have more readMore = readMore && results.size() < count; if (readMore) { firstindex += 8; if (firstindex > 56)// the last pages always need to start // querying at index 56 (or google returns // errors) { skip = firstindex - 56; firstindex = 56; } } } return results; }
From source file:opendap.metacat.DDXRetriever.java
/** * Given a URL to a DDX, get the DDX document. If the DDXRetriever was * built with caching turned on, this uses a poor man's HTTP/1.1 cache * based on Last Modified Times. /*w ww.j a va 2 s. c om*/ * * If caching is on, then calling this on a series of DDX URLs will fill * the cache. If the cache is saved and later used again it is possible * to re-read the URLs straight from the cache. * * @see getCache() * @param DDXURL Get the DDX referenced by this URL * @return The DDX document, in a String * @throws Exception */ public String getDDXDoc(String DDXURL) throws Exception { String ddx = null; URL url = new URL(DDXURL); URLConnection connection = url.openConnection(); if (DDXCache.getLastVisited(DDXURL) != 0 && DDXCache.getCachedResponse(DDXURL) != null) connection.setIfModifiedSince(DDXCache.getLastVisited(DDXURL)); // Here's where we'd poke in a header to ask for the DAP3.2 DDX connection.connect(); // Cast to a HttpURLConnection if (connection instanceof HttpURLConnection) { HttpURLConnection httpConnection = (HttpURLConnection) connection; int code = httpConnection.getResponseCode(); // If we have something, process. Since a conditional get was // used, the response might be empty (code == 304) and nothing // should be done in that case switch (code) { case 200: ddx = convertStreamToString(httpConnection.getInputStream()); // Update the last visited and document caches if (!readOnly) { Date date = new Date(); DDXCache.setLastVisited(DDXURL, date.getTime()); DDXCache.setCachedResponse(DDXURL, ddx); } break; case 304: ddx = DDXCache.getCachedResponse(DDXURL); if (!readOnly) { // Update the last visited cache to now Date date = new Date(); DDXCache.setLastVisited(DDXURL, date.getTime()); } break; default: log.error("Expected a 200 or 304 HTTP return code. Got: " + new Integer(code).toString()); } } else { throw new MalformedURLException("Expected a HTTP URL (" + DDXURL + ")."); } return ddx; }
From source file:me.philio.ghost.sync.SyncAdapter.java
/** * Save content (expects images)/*from w w w . ja v a 2 s.c o m*/ * * @param blog * @param path * @param notificationUri * @throws NoSuchAlgorithmException * @throws IOException */ private void saveContent(Blog blog, String path, Uri notificationUri) throws NoSuchAlgorithmException, IOException { // Check that the path looks like something valid if (path == null || path.trim().isEmpty()) { return; } // Make sure the path is a full URL path = ImageUtils.getUrl(blog, path); // Generate a filename String filename = ImageUtils.getFilename(getContext(), blog, path); // Make sure destination directory exists if (!ImageUtils.ensureDirectory(filename.substring(0, filename.lastIndexOf('/')))) { Log.e(TAG, "Content directory missing"); return; } // Check if the file exists if (ImageUtils.fileExists(filename)) { Log.d(TAG, "File exists skipping"); return; } // Connect URL url = new URL(path); URLConnection connection = url.openConnection(); connection.connect(); // Save the image as a temporary file as IO errors on bitmap decode never throw an error Log.d(TAG, "Saving temporary file: " + filename + ".tmp"); File file = new File(filename + ".tmp"); IOUtils.copy(connection.getInputStream(), new FileOutputStream(file)); // Decode the file Log.d(TAG, "Decoding to file: " + filename); ImageUtils.decodeScale(new FileInputStream(file), filename, 2048, 2048); file.delete(); if (notificationUri != null) { getContext().getContentResolver().notifyChange(notificationUri, null); } }
From source file:org.apache.taverna.activities.wsdl.WSDLActivity.java
private void parseWSDL() throws ParserConfigurationException, WSDLException, IOException, SAXException, UnknownOperationException {//from w w w .j a v a2 s . c om URLConnection connection = null; try { URL wsdlURL = new URL(configurationBean.get("operation").get("wsdl").textValue()); connection = wsdlURL.openConnection(); connection.setConnectTimeout(RemoteHealthChecker.getTimeoutInSeconds() * 1000); connection.connect(); } catch (MalformedURLException e) { throw new IOException("Malformed URL", e); } catch (SocketTimeoutException e) { throw new IOException("Timeout", e); } catch (IOException e) { throw e; } finally { if ((connection != null) && (connection.getInputStream() != null)) { connection.getInputStream().close(); } } parser = new WSDLParser(configurationBean.get("operation").get("wsdl").textValue()); isWsrfService = parser.isWsrfService(); }
From source file:com.photon.phresco.util.Utility.java
public static boolean isConnectionAlive(String protocol, String host, int port) { boolean isAlive = true; try {//from w ww. jav a2s. c o m URL url = new URL(protocol, host, port, ""); URLConnection connection = url.openConnection(); connection.connect(); } catch (Exception e) { isAlive = false; } return isAlive; }
From source file:org.atricore.idbus.bundles.apache.tiles.OsgiDefinitionsFactory.java
/** * Appends locale-specific {@link Definition} objects to an existing * {@link Definitions} set by reading locale-specific versions of * the applied sources.//from w ww .ja va 2 s. c om * * @param definitions The Definitions object to append to. * @param tilesContext The requested locale. * @throws DefinitionsFactoryException if an error occurs reading definitions. */ protected void addDefinitions(Definitions definitions, TilesRequestContext tilesContext) throws DefinitionsFactoryException { Locale locale = localeResolver.resolveLocale(tilesContext); if (logger.isDebugEnabled()) logger.debug("Adding definitios for locale " + locale); if (isContextProcessed(tilesContext)) { if (logger.isDebugEnabled()) logger.debug("isContextProcessed(tilesContext):true, returning"); return; } if (locale == null) { if (logger.isDebugEnabled()) logger.debug("locale == null, returning"); return; } processedLocales.add(locale); List<String> postfixes = calculatePostfixes(locale); if (logger.isDebugEnabled()) logger.debug("Processing postfixes:" + (postfixes != null ? postfixes.size() : "null")); Map<String, Definition> localeDefsMap = new HashMap<String, Definition>(); for (Object postfix : postfixes) { if (logger.isDebugEnabled()) logger.debug("Processing postfix [" + postfix + "] for sources:" + (sources != null ? sources.size() : "null")); // For each postfix, all the sources must be loaded. for (Object source : sources) { URL url = (URL) source; String path = url.toExternalForm(); String newPath = concatPostfix(path, (String) postfix); if (logger.isDebugEnabled()) logger.debug("Adding source definition : " + newPath); try { URL newUrl = new URL(newPath); URLConnection connection = newUrl.openConnection(); connection.connect(); if (logger.isDebugEnabled()) logger.debug("Loding definition from URL:" + newUrl.toExternalForm()); lastModifiedDates.put(newUrl.toExternalForm(), connection.getLastModified()); // Definition must be collected, starting from the base // source up to the last localized file. Map<String, Definition> defsMap = reader.read(connection.getInputStream()); if (defsMap != null) { localeDefsMap.putAll(defsMap); } } catch (FileNotFoundException e) { // File not found. continue. if (logger.isDebugEnabled()) { logger.debug("File " + newPath + " not found, continue"); } } catch (IOException e) { // Assume I/O Exception is a Not Found error ? /* throw new DefinitionsFactoryException( "I/O error processing configuration.", e); */ if (logger.isDebugEnabled()) logger.debug("I/O error processing configuration " + newPath + ":" + e.getMessage(), e); } } } // At the end of definitions loading, they can be assigned to // Definitions implementation, to allow inheritance resolution. definitions.addDefinitions(localeDefsMap, localeResolver.resolveLocale(tilesContext)); }
From source file:org.jab.docsearch.utils.NetUtils.java
/** * Gets URL size (content)//from ww w. ja v a 2 s.c o m * * @param url URL for connect * @return size in bytes of a url or 0 if broken or timed out connection */ public long getURLSize(final String url) { try { URL tmpURL = new URL(url); URLConnection conn = tmpURL.openConnection(); // set connection parameter conn.setDoInput(true); conn.setDoOutput(false); conn.setUseCaches(false); conn.setRequestProperty("User-Agent", USER_AGENT); // connect conn.connect(); long contentLength = conn.getContentLength(); if (logger.isDebugEnabled()) { logger.debug("getURLSize() content lentgh=" + contentLength + " of URL='" + url + "'"); } return contentLength; } catch (IOException ioe) { logger.error("getURLSize() failed for URL='" + url + "'", ioe); return 0; } }
From source file:org.jab.docsearch.utils.NetUtils.java
/** * Gets URL modified date as long/* www . j a v a2 s . co m*/ * * @param url URL to connect * @return date of URLs modification or 0 if an error occurs */ public long getURLModifiedDate(final String url) { try { URL tmpURL = new URL(url); URLConnection conn = tmpURL.openConnection(); // set connection parameter conn.setDoInput(true); conn.setDoOutput(false); conn.setUseCaches(false); conn.setRequestProperty("User-Agent", USER_AGENT); // connect conn.connect(); long modifiedDate = conn.getLastModified(); if (logger.isDebugEnabled()) { logger.debug("getURLModifiedDate() modified date=" + modifiedDate + " of URL='" + url + "'"); } return modifiedDate; } catch (IOException ioe) { logger.error("getURLModifiedDate() failed for URL='" + url + "'", ioe); return 0; } }