List of usage examples for java.net URL getPort
public int getPort()
From source file:net.nightwhistler.pageturner.PageTurnerModule.java
/** * Binds the HttpClient interface to the DefaultHttpClient implementation. * /*from w ww. jav a2 s . c o m*/ * In testing we'll use a stub. * * @return */ @Provides @Inject public HttpClient getHttpClient(Configuration config) { HttpParams httpParams = new BasicHttpParams(); DefaultHttpClient client; if (config.isAcceptSelfSignedCertificates()) { client = new SSLHttpClient(httpParams); } else { client = new DefaultHttpClient(httpParams); } for (CustomOPDSSite site : config.getCustomOPDSSites()) { if (site.getUserName() != null && site.getUserName().length() > 0) { try { URL url = new URL(site.getUrl()); client.getCredentialsProvider().setCredentials(new AuthScope(url.getHost(), url.getPort()), new UsernamePasswordCredentials(site.getUserName(), site.getPassword())); } catch (MalformedURLException mal) { //skip to the next } } } return client; }
From source file:org.uiautomation.ios.server.grid.SelfRegisteringRemote.java
private boolean isAlreadyRegistered() { HttpClient client = httpClientFactory.getHttpClient(); try {/* w w w .ja va 2 s .co m*/ URL hubRegistrationURL = new URL(nodeConfig.getRegistrationURL()); URL api = new URL("http://" + hubRegistrationURL.getHost() + ":" + hubRegistrationURL.getPort() + "/grid/api/proxy"); HttpHost host = new HttpHost(api.getHost(), api.getPort()); String id = "http://" + nodeConfig.getHost() + ":" + nodeConfig.getPort(); BasicHttpRequest r = new BasicHttpRequest("GET", api.toExternalForm() + "?id=" + id); HttpResponse response = client.execute(host, r); if (response.getStatusLine().getStatusCode() != 200) { throw new GridException( "hub down or not responding. Reason : " + response.getStatusLine().getReasonPhrase()); } JSONObject o = extractObject(response); return (Boolean) o.get("success"); } catch (Exception e) { throw new GridException("Problem registering with hub", e); } }
From source file:com.adito.reverseproxy.ReverseProxyMethodHandler.java
/** * Encodes a URL //from w w w . j av a 2 s . com * @param location * @return */ public static final String encodeURL(String location) { try { URL url = new URL(location); StringBuffer buf = new StringBuffer(); buf.append(url.getProtocol()); buf.append("://"); if (!Util.isNullOrTrimmedBlank(url.getUserInfo())) { buf.append(DAVUtilities.encodeURIUserInfo(url.getUserInfo())); buf.append("@"); } buf.append(url.getHost()); if (url.getPort() != -1) { buf.append(":"); buf.append(url.getPort()); } if (!Util.isNullOrTrimmedBlank(url.getPath())) { buf.append(URLUTF8Encoder.encode(url.getPath(), false)); } if (!Util.isNullOrTrimmedBlank(url.getQuery())) { buf.append("?"); buf.append(encodeQuery(url.getQuery())); } return buf.toString(); } catch (MalformedURLException e) { int idx = location.indexOf('?'); if (idx > -1 && idx < location.length() - 1) { return URLUTF8Encoder.encode(location.substring(0, idx), false) + "?" + encodeQuery(location.substring(idx + 1)); } else return URLUTF8Encoder.encode(location, false); } }
From source file:com.nanocrawler.robotstxt.RobotstxtServer.java
private HostDirectives fetchDirectives(URL url) { WebURL robotsTxtUrl = new WebURL(); String host = getHost(url);// ww w . j a v a 2 s. c om String port = (url.getPort() == url.getDefaultPort() || url.getPort() == -1) ? "" : ":" + url.getPort(); robotsTxtUrl.setURL("http://" + host + port + "/robots.txt"); HostDirectives directives = null; PageFetchResult fetchResult = null; try { fetchResult = pageFetcher.fetchHeader(robotsTxtUrl); // TO_DO: Does this work on redirects e.g. http://news.ycombinator.com/robots.txt -> https://news.ycombinator.com/robots.txt if (fetchResult.getStatusCode() == HttpStatus.SC_OK) { Page page = new Page(robotsTxtUrl); fetchResult.fetchContent(page); if (ContentTypeUtil.hasPlainTextContent(page.getContentType())) { try { String content; if (page.getContentCharset() == null) { content = new String(page.getContentData()); } else { content = new String(page.getContentData(), page.getContentCharset()); } directives = RobotstxtParser.parse(content, config.getUserAgentName()); } catch (Exception e) { e.printStackTrace(); } } } } finally { if (fetchResult != null) { fetchResult.discardContentIfNotConsumed(); } } if (directives == null) { directives = new HostDirectives(); } synchronized (host2directivesCache) { if (host2directivesCache.size() == config.getCacheSize()) { String minHost = null; long minAccessTime = Long.MAX_VALUE; for (Entry<String, HostDirectives> entry : host2directivesCache.entrySet()) { if (entry.getValue().getLastAccessTime() < minAccessTime) { minAccessTime = entry.getValue().getLastAccessTime(); minHost = entry.getKey(); } } host2directivesCache.remove(minHost); } host2directivesCache.put(host, directives); } return directives; }
From source file:frame.crawler4j.robotstxt.RobotstxtServer.java
private HostDirectives fetchDirectives(URL url) { WebURL robotsTxtUrl = new WebURL(); String host = getHost(url);/*from w ww .jav a2s. c om*/ String port = (url.getPort() == url.getDefaultPort() || url.getPort() == -1) ? "" : ":" + url.getPort(); robotsTxtUrl.setURL("http://" + host + port + "/robots.txt"); HostDirectives directives = null; PageFetchResult fetchResult = null; try { fetchResult = pageFetcher.fetchHeader(robotsTxtUrl); if (fetchResult.getStatusCode() == HttpStatus.SC_OK) { Page page = new Page(robotsTxtUrl); fetchResult.fetchContent(page); if (Util.hasPlainTextContent(page.getContentType())) { try { String content; if (page.getContentCharset() == null) { content = new String(page.getContentData()); } else { content = new String(page.getContentData(), page.getContentCharset()); } directives = RobotstxtParser.parse(content, config.getUserAgentName()); } catch (Exception e) { e.printStackTrace(); } } } } finally { if (fetchResult != null) { fetchResult.discardContentIfNotConsumed(); } } if (directives == null) { // We still need to have this object to keep track of the time we // fetched it directives = new HostDirectives(); } synchronized (host2directivesCache) { if (host2directivesCache.size() == config.getCacheSize()) { String minHost = null; long minAccessTime = Long.MAX_VALUE; for (Entry<String, HostDirectives> entry : host2directivesCache.entrySet()) { if (entry.getValue().getLastAccessTime() < minAccessTime) { minAccessTime = entry.getValue().getLastAccessTime(); minHost = entry.getKey(); } } host2directivesCache.remove(minHost); } host2directivesCache.put(host, directives); } return directives; }
From source file:org.dasein.cloud.azure.platform.AzureSQLDatabaseSupportRequests.java
private String getEncodedUri(String urlString) throws InternalException { try {/*from www . j a v a 2 s .co m*/ URL url = new URL(urlString); return new URI(url.getProtocol(), url.getUserInfo(), url.getHost(), url.getPort(), url.getPath(), url.getQuery(), url.getRef()).toString(); } catch (Exception e) { throw new InternalException(e.getMessage()); } }
From source file:com.openteach.diamond.metadata.ServiceURL.java
/** * // w w w. j a v a 2 s .com * @throws MalformedURLException */ private void parse() throws MalformedURLException { int index = strURL.indexOf("://"); URL url = new URL(String.format("http%s", strURL.substring(index))); protocol = strURL.substring(0, index); host = url.getHost(); port = url.getPort(); serviceName = url.getFile(); query = url.getQuery(); }
From source file:com.tremolosecurity.scale.user.ScaleSession.java
@PostConstruct public void init() { try {/* w ww . j a va 2 s . c o m*/ HttpClientInfo httpci = this.commonConfig.createHttpClientInfo(); http = HttpClients.custom().setConnectionManager(httpci.getCm()) .setDefaultRequestConfig(httpci.getGlobalConfig()) .setHostnameVerifier(new AllowAllHostnameVerifier()).build(); URL uurl = new URL(commonConfig.getScaleConfig().getServiceConfiguration().getUnisonURL()); int port = uurl.getPort(); HttpServletRequest request = (HttpServletRequest) FacesContext.getCurrentInstance().getExternalContext() .getRequest(); this.login = request.getRemoteUser(); } catch (Exception e) { logger.error("Could not initialize ScaleSession", e); } }
From source file:de.comlineag.snc.webcrawler.robotstxt.RobotstxtServer.java
private HostDirectives fetchDirectives(URL url) { WebURL robotsTxtUrl = new WebURL(); String host = getHost(url);/*from www . j a v a 2 s. co m*/ String port = (url.getPort() == url.getDefaultPort() || url.getPort() == -1) ? "" : ":" + url.getPort(); robotsTxtUrl.setURL("http://" + host + port + "/robots.txt"); HostDirectives directives = null; PageFetchResult fetchResult = null; try { fetchResult = pageFetcher.fetchHeader(robotsTxtUrl); if (fetchResult.getStatusCode() == HttpStatus.SC_OK) { Page page = new Page(robotsTxtUrl); fetchResult.fetchContent(page); if (WebCrawlerUtil.hasPlainTextContent(page.getContentType())) { try { String content; if (page.getContentCharset() == null) { content = new String(page.getContentData()); } else { content = new String(page.getContentData(), page.getContentCharset()); } directives = RobotstxtParser.parse(content, config.getUserAgentName()); } catch (Exception e) { logger.error("Error occurred while fetching (robots) url: " + robotsTxtUrl.getURL(), e); } } } } finally { if (fetchResult != null) { fetchResult.discardContentIfNotConsumed(); } } if (directives == null) { // We still need to have this object to keep track of the time we // fetched it directives = new HostDirectives(); } synchronized (host2directivesCache) { if (host2directivesCache.size() == config.getCacheSize()) { String minHost = null; long minAccessTime = Long.MAX_VALUE; for (Entry<String, HostDirectives> entry : host2directivesCache.entrySet()) { if (entry.getValue().getLastAccessTime() < minAccessTime) { minAccessTime = entry.getValue().getLastAccessTime(); minHost = entry.getKey(); } } host2directivesCache.remove(minHost); } host2directivesCache.put(host, directives); } return directives; }
From source file:org.fourthline.cling.bridge.BridgeUpnpServiceConfiguration.java
public BridgeUpnpServiceConfiguration(URL localBaseURL, String contextPath, HttpClient httpClient) { super(localBaseURL == null ? 0 : localBaseURL.getPort(), false); this.localBaseURL = localBaseURL; this.contextPath = contextPath; this.actionProcessor = createFormActionProcessor(); this.combinedDescriptorBinder = createCombinedDescriptorBinder(); if (httpClient == null) { StreamClientConfigurationImpl streamConfiguration = new StreamClientConfigurationImpl(); HttpParams params = new BasicHttpParams(); HttpConnectionParams.setConnectionTimeout(params, streamConfiguration.getConnectionTimeoutSeconds() * 1000); HttpConnectionParams.setSoTimeout(params, streamConfiguration.getDataReadTimeoutSeconds() * 1000); HttpProtocolParams.setContentCharset(params, streamConfiguration.getContentCharset()); HttpProtocolParams.setUseExpectContinue(params, false); ThreadSafeClientConnManager clientConnectionManager = new ThreadSafeClientConnManager(); clientConnectionManager.setMaxTotal(streamConfiguration.getMaxTotalConnections()); clientConnectionManager.setDefaultMaxPerRoute(100); // do not request zipped response as Multicast2Unicast sends buggy data otherwise httpClient = new DefaultHttpClient(clientConnectionManager, params); }/*from w w w . java 2 s . c o m*/ this.httpClient = httpClient; }