List of usage examples for java.net URLConnection connect
public abstract void connect() throws IOException;
From source file:com.dotmarketing.portlets.rules.actionlet.VisitorsTagsActionletFTest.java
@After public void tearDown() throws Exception { URL logoutUrl = new URL(baseUrl + "/destroy.jsp"); URLConnection con = logoutUrl.openConnection(); con.connect(); con.getInputStream();/*from w w w . j av a 2 s . c om*/ for (Rule rule : rulesToRemove) { ruleDataGen.remove(rule); } rulesToRemove.clear(); }
From source file:com.dotmarketing.portlets.rules.actionlet.PersonaActionletFTest.java
@After public void tearDown() throws Exception { URL logoutUrl = new URL(baseUrl + "/destroy.jsp"); URLConnection con = logoutUrl.openConnection(); con.connect(); con.getInputStream();//from w w w . j av a 2s . c o m }
From source file:org.apache.falcon.logging.JobLogMover.java
private InputStream getURLinputStream(URL url) throws IOException { URLConnection connection = url.openConnection(); connection.setDoOutput(true);//from ww w . j a v a 2s . c o m connection.connect(); return connection.getInputStream(); }
From source file:org.esupportail.papercut.services.PayBoxService.java
public void updatePayBoxActionUrl() { for (String payboxActionUrl : payboxActionUrls) { try {/*from w w w . j a v a2s. c om*/ // on teste la connection, pour voir si le serveur est disponible URL url = new URL(payboxActionUrl); URLConnection connection = url.openConnection(); connection.connect(); connection.getInputStream().read(); this.payboxActionUrlOK = payboxActionUrl; } catch (Exception e) { log.warn("Pb with " + payboxActionUrl, e); } } if (this.payboxActionUrlOK == null) { throw new RuntimeException("No paybox action url is available at the moment !"); } }
From source file:io.apiman.manager.api.core.plugin.AbstractPluginRegistry.java
/** * Tries to download the plugin from the given remote maven repository. */// w w w . ja va2s . co m protected boolean downloadFromMavenRepo(File pluginFile, PluginCoordinates coordinates, URI mavenRepoUrl) { String artifactSubPath = PluginUtils.getMavenPath(coordinates); InputStream istream = null; OutputStream ostream = null; try { URL artifactUrl = new URL(mavenRepoUrl.toURL(), artifactSubPath); URLConnection connection = artifactUrl.openConnection(); connection.connect(); if (connection instanceof HttpURLConnection) { HttpURLConnection httpConnection = (HttpURLConnection) connection; if (httpConnection.getResponseCode() != 200) { throw new IOException(); } } istream = connection.getInputStream(); ostream = new FileOutputStream(pluginFile); IOUtils.copy(istream, ostream); ostream.flush(); return true; } catch (Exception e) { return false; } finally { IOUtils.closeQuietly(istream); IOUtils.closeQuietly(ostream); } }
From source file:com.kamosoft.flickr.model.Photo.java
private Bitmap getBitmapFromURL(String url) throws JSONException, IOException { Bitmap bm = null;/*from w ww . j a va 2s . co m*/ URL aURL = new URL(url); URLConnection conn = aURL.openConnection(); conn.connect(); InputStream is = conn.getInputStream(); BufferedInputStream bis = new BufferedInputStream(is); bm = BitmapFactory.decodeStream(bis); bis.close(); is.close(); return bm; }
From source file:org.apache.roller.weblogger.ui.rendering.velocity.deprecated.NewsfeedCache.java
/** * Returns a Channel object for the supplied RSS newsfeed URL. * * @param feedUrl RSS newsfeed URL.// w ww . ja v a 2 s .c om * @return FlockFeedI for specified RSS newsfeed URL. */ public SyndFeed getChannel(String feedUrl) { SyndFeed feed = null; try { // If aggregator has been disable return null if (!aggregator_enabled) { return null; } if (aggregator_cache_enabled) { if (log.isDebugEnabled()) { log.debug("Newsfeed: use Cache for " + feedUrl); } // Get pre-parsed feed from the cache feed = (SyndFeed) mCache.get(feedUrl); if (log.isDebugEnabled()) { log.debug("Newsfeed: got from Cache"); } if (feed == null) { try { // Parse the feed SyndFeedInput feedInput = new SyndFeedInput(); feed = feedInput.build(new InputStreamReader(new URL(feedUrl).openStream())); } catch (Exception e1) { log.info("Error parsing RSS: " + feedUrl); } } // Store parsed feed in the cache mCache.put(feedUrl, feed); log.debug("Newsfeed: not in Cache"); } else { if (log.isDebugEnabled()) { log.debug("Newsfeed: not using Cache for " + feedUrl); } try { // charset fix from Jason Rumney (see ROL-766) URLConnection connection = new URL(feedUrl).openConnection(); connection.connect(); String contentType = connection.getContentType(); // Default charset to UTF-8, since we are expecting XML String charset = "UTF-8"; if (contentType != null) { int charsetStart = contentType.indexOf("charset="); if (charsetStart >= 0) { int charsetEnd = contentType.indexOf(";", charsetStart); if (charsetEnd == -1) charsetEnd = contentType.length(); charsetStart += "charset=".length(); charset = contentType.substring(charsetStart, charsetEnd); // Check that charset is recognized by Java try { byte[] test = "test".getBytes(charset); } catch (UnsupportedEncodingException codingEx) { // default to UTF-8 charset = "UTF-8"; } } } // Parse the feed SyndFeedInput feedInput = new SyndFeedInput(); feed = feedInput.build(new InputStreamReader(connection.getInputStream(), charset)); } catch (Exception e1) { log.info("Error parsing RSS: " + feedUrl); } } } catch (Exception ioe) { if (log.isDebugEnabled()) { log.debug("Newsfeed: Unexpected exception", ioe); } } return feed; }
From source file:eu.faircode.netguard.DownloadTask.java
@Override protected Object doInBackground(Object... args) { Log.i(TAG, "Downloading " + url + " into " + file); InputStream in = null;/*from w w w. j a v a 2 s . c o m*/ OutputStream out = null; URLConnection connection = null; try { connection = url.openConnection(); connection.connect(); if (connection instanceof HttpURLConnection) { HttpURLConnection httpConnection = (HttpURLConnection) connection; if (httpConnection.getResponseCode() != HttpURLConnection.HTTP_OK) throw new IOException( httpConnection.getResponseCode() + " " + httpConnection.getResponseMessage()); } int contentLength = connection.getContentLength(); Log.i(TAG, "Content length=" + contentLength); in = connection.getInputStream(); out = new FileOutputStream(file); long size = 0; byte buffer[] = new byte[4096]; int bytes; while (!isCancelled() && (bytes = in.read(buffer)) != -1) { out.write(buffer, 0, bytes); size += bytes; if (contentLength > 0) publishProgress((int) (size * 100 / contentLength)); } Log.i(TAG, "Downloaded size=" + size); return null; } catch (Throwable ex) { return ex; } finally { try { if (out != null) out.close(); } catch (IOException ex) { Log.e(TAG, ex.toString() + "\n" + Log.getStackTraceString(ex)); } try { if (in != null) in.close(); } catch (IOException ex) { Log.e(TAG, ex.toString() + "\n" + Log.getStackTraceString(ex)); } if (connection instanceof HttpURLConnection) ((HttpURLConnection) connection).disconnect(); } }
From source file:ubic.gemma.core.loader.entrez.pubmed.ExpressionExperimentBibRefFinder.java
private int locatePubMedId(String geoSeries) { if (!geoSeries.matches("GSE\\d+")) { ExpressionExperimentBibRefFinder.log.warn(geoSeries + " is not a GEO Series Accession"); return -1; }// w w w.j ava 2 s . c o m URL url; Pattern pat = Pattern.compile(ExpressionExperimentBibRefFinder.PUBMEDREF_REGEX); URLConnection conn; try { url = new URL(ExpressionExperimentBibRefFinder.GEO_SERIES_URL_BASE + geoSeries); conn = url.openConnection(); conn.connect(); } catch (IOException e1) { ExpressionExperimentBibRefFinder.log.error(e1, e1); throw new RuntimeException("Could not get data from remote server", e1); } try (InputStream is = conn.getInputStream(); BufferedReader br = new BufferedReader(new InputStreamReader(is))) { String line; while ((line = br.readLine()) != null) { Matcher mat = pat.matcher(line); ExpressionExperimentBibRefFinder.log.debug(line); if (mat.find()) { String capturedAccession = mat.group(1); if (StringUtils.isBlank(capturedAccession)) return -1; return Integer.parseInt(capturedAccession); } } } catch (IOException e) { ExpressionExperimentBibRefFinder.log.error(e, e); throw new RuntimeException("Could not get data from remote server", e); } catch (NumberFormatException e) { ExpressionExperimentBibRefFinder.log.error(e, e); throw new RuntimeException("Could not determine valid pubmed id"); } return -1; }
From source file:org.ambraproject.solr.SolrHttpServiceImpl.java
@Override public Document makeSolrRequest(Map<String, String> params) throws SolrException { if (solrUrl == null || solrUrl.isEmpty()) { setSolrUrl(config.getString(URL_CONFIG_PARAM)); }/*from www . ja va 2s .co m*/ //make sure the return type is xml if (!params.keySet().contains(RETURN_TYPE_PARAM) || !params.get(RETURN_TYPE_PARAM).equals(XML)) { params.put(RETURN_TYPE_PARAM, XML); } //make sure that we include a 'q' parameter if (!params.keySet().contains(Q_PARAM)) { params.put(Q_PARAM, NO_FILTER); } String queryString = "?"; for (String param : params.keySet()) { String value = params.get(param); if (queryString.length() > 1) { queryString += "&"; } queryString += (cleanInput(param) + "=" + cleanInput(value)); } URL url; String urlString = solrUrl + queryString; log.debug("Making Solr http request to " + urlString); try { url = new URL(urlString); } catch (MalformedURLException e) { throw new SolrException("Bad Solr Url: " + urlString, e); } InputStream urlStream = null; Document doc = null; try { URLConnection connection = url.openConnection(); connection.setConnectTimeout(CONNECTION_TIMEOUT); connection.connect(); urlStream = connection.getInputStream(); DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setNamespaceAware(true); DocumentBuilder builder = factory.newDocumentBuilder(); doc = builder.parse(urlStream); } catch (IOException e) { throw new SolrException("Error connecting to the Solr server at " + solrUrl, e); } catch (ParserConfigurationException e) { throw new SolrException("Error configuring parser xml parser for solr response", e); } catch (SAXException e) { throw new SolrException("Solr Returned bad XML for url: " + urlString, e); } finally { //Close the input stream if (urlStream != null) { try { urlStream.close(); } catch (IOException e) { log.error("Error closing url stream to Solr", e); } } } return doc; }