List of usage examples for java.net MalformedURLException printStackTrace
public void printStackTrace()
From source file:org.scigap.iucig.controller.ScienceDisciplineController.java
@ResponseBody @RequestMapping(value = "/updateScienceDiscipline", method = RequestMethod.POST) public void updateScienceDiscipline(@RequestBody ScienceDiscipline discipline, HttpServletRequest request) throws Exception { try {/*from w w w . j a v a 2s . c o m*/ String remoteUser; if (request != null) { remoteUser = request.getRemoteUser(); } else { throw new Exception("Remote user is null"); } int primarySubDisId = 0; int secondarySubDisId = 0; int tertiarySubDisId = 0; String urlParameters = "user=" + remoteUser; if (discipline != null) { Map<String, String> primarySubDisc = discipline.getPrimarySubDisc(); if (primarySubDisc != null && !primarySubDisc.isEmpty()) { for (String key : primarySubDisc.keySet()) { if (key.equals("id")) { primarySubDisId = Integer.valueOf(primarySubDisc.get(key)); urlParameters += "&discipline1=" + primarySubDisId; } } } else { Map<String, Object> primaryDiscipline = discipline.getPrimaryDisc(); if (primaryDiscipline != null && !primaryDiscipline.isEmpty()) { Object subdisciplines = primaryDiscipline.get("subdisciplines"); if (subdisciplines instanceof ArrayList) { for (int i = 0; i < ((ArrayList) subdisciplines).size(); i++) { Object disc = ((ArrayList) subdisciplines).get(i); if (disc instanceof HashMap) { if (((HashMap) disc).get("name").equals("Other / Unspecified")) { primarySubDisId = Integer.valueOf((((HashMap) disc).get("id")).toString()); } } } urlParameters += "&discipline1=" + primarySubDisId; } } } Map<String, String> secondarySubDisc = discipline.getSecondarySubDisc(); if (secondarySubDisc != null && !secondarySubDisc.isEmpty()) { for (String key : secondarySubDisc.keySet()) { if (key.equals("id")) { secondarySubDisId = Integer.valueOf(secondarySubDisc.get(key)); urlParameters += "&discipline2=" + secondarySubDisId; } } } else { Map<String, Object> secondaryDisc = discipline.getSecondaryDisc(); if (secondaryDisc != null && !secondaryDisc.isEmpty()) { Object subdisciplines = secondaryDisc.get("subdisciplines"); if (subdisciplines instanceof ArrayList) { for (int i = 0; i < ((ArrayList) subdisciplines).size(); i++) { Object disc = ((ArrayList) subdisciplines).get(i); if (disc instanceof HashMap) { if (((HashMap) disc).get("name").equals("Other / Unspecified")) { secondarySubDisId = Integer .valueOf((((HashMap) disc).get("id")).toString()); } } } urlParameters += "&discipline2=" + secondarySubDisId; } } } Map<String, String> tertiarySubDisc = discipline.getTertiarySubDisc(); if (tertiarySubDisc != null && !tertiarySubDisc.isEmpty()) { for (String key : tertiarySubDisc.keySet()) { if (key.equals("id")) { tertiarySubDisId = Integer.valueOf(tertiarySubDisc.get(key)); urlParameters += "&discipline3=" + tertiarySubDisId; } } } else { Map<String, Object> tertiaryDisc = discipline.getTertiaryDisc(); if (tertiaryDisc != null && !tertiaryDisc.isEmpty()) { Object subdisciplines = tertiaryDisc.get("subdisciplines"); if (subdisciplines instanceof ArrayList) { for (int i = 0; i < ((ArrayList) subdisciplines).size(); i++) { Object disc = ((ArrayList) subdisciplines).get(i); if (disc instanceof HashMap) { if (((HashMap) disc).get("name").equals("Other / Unspecified")) { tertiarySubDisId = Integer.valueOf((((HashMap) disc).get("id")).toString()); } } } urlParameters += "&discipline3=" + tertiarySubDisId; } } } URL obj = new URL(SCIENCE_DISCIPLINE_URL + "discipline/"); HttpURLConnection con = (HttpURLConnection) obj.openConnection(); con.setRequestMethod("POST"); con.setDoInput(true); con.setDoOutput(true); con.setUseCaches(false); urlParameters += "&date=" + discipline.getDate() + "&source=cybergateway&commit=Update&cluster=" + discipline.getCluster(); DataOutputStream wr = new DataOutputStream(con.getOutputStream()); wr.writeBytes(urlParameters); wr.flush(); wr.close(); int responseCode = con.getResponseCode(); System.out.println("\nSending 'POST' request to URL : " + SCIENCE_DISCIPLINE_URL); System.out.println("Post parameters : " + urlParameters); System.out.println("Response Code : " + responseCode); } } catch (MalformedURLException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } }
From source file:de.ifgi.lodum.sparqlfly.SparqlFly.java
/** * Returns jena model from URL/*from w ww .j a va 2 s . c om*/ * @throws ParseException */ private Model getModelFromURL(String urlString, String format) throws Exception { Model m = ModelFactory.createDefaultModel(ReificationStyle.Standard); if (format.equals("HTML")) { StatementSink sink = new JenaStatementSink(m); XMLReader parser = ParserFactory.createReaderForFormat(sink, Format.HTML); parser.parse(urlString); } else { URL url = null; try { url = new URL(urlString); } catch (MalformedURLException e1) { e1.printStackTrace(); } URLConnection c = null; try { c = url.openConnection(); c.setConnectTimeout(10000); } catch (IOException e) { e.printStackTrace(); } BufferedReader in = null; try { in = new BufferedReader(new InputStreamReader(c.getInputStream(), Charset.forName("UTF-8"))); } catch (IOException e) { e.printStackTrace(); } m.read(in, "", format); try { in.close(); } catch (IOException e) { e.printStackTrace(); } RSIterator it = m.listReifiedStatements(); while (it.hasNext()) { m.add(it.nextRS().getStatement()); } } return m; }
From source file:bbcdataservice.BBCDataService.java
public void updateTvData(final TvDataUpdateManager updateManager, final Channel[] channels, final Date startDate, final int days, final ProgressMonitor monitor) throws TvBrowserException { // // Check for connection // if (!updateManager.checkConnection()) { // return;//w w w . j av a 2 s .c o m // } monitor.setMessage(mLocalizer.msg("update", "Updating BBC data")); monitor.setMaximum(channels.length); int progress = 0; for (Channel channel : channels) { HashMap<Date, MutableChannelDayProgram> dayPrograms = new HashMap<Date, MutableChannelDayProgram>(); monitor.setValue(progress++); for (int i = 0; i < days; i++) { Date date = startDate.addDays(i); String year = String.valueOf(date.getYear()); String month = String.valueOf(date.getMonth()); String day = String.valueOf(date.getDayOfMonth()); String schedulePath = "/" + year + "/" + month + "/" + day + ".xml"; String url = channel.getWebpage() + schedulePath; File file = new File(mWorkingDir, "bbc.xml"); try { IOUtilities.download(new URL(url), file); } catch (MalformedURLException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } boolean continueWithNextDay = false; try { continueWithNextDay = BBCProgrammesParser.parse(dayPrograms, file, channel, date); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } file.delete(); if (!continueWithNextDay) { break; } } // store the received programs for (MutableChannelDayProgram dayProgram : dayPrograms.values()) { updateManager.updateDayProgram(dayProgram); } } }
From source file:com.redhat.rhn.frontend.action.systems.SystemSearchSetupAction.java
protected DataResult performSearch(RequestContext context) { HttpServletRequest request = context.getRequest(); String searchString = context.getParam(SEARCH_STRING, false); String viewMode = context.getParam(VIEW_MODE, false); String whereToSearch = context.getParam(WHERE_TO_SEARCH, false); Boolean invertResults = StringUtils.defaultString(context.getParam(INVERT_RESULTS, false)).equals("on"); if (invertResults == null) { invertResults = Boolean.FALSE; }/*from w w w. j av a 2 s .c om*/ ActionErrors errs = new ActionErrors(); DataResult dr = null; try { dr = SystemSearchHelper.systemSearch(context, searchString, viewMode, invertResults, whereToSearch); } catch (MalformedURLException e) { log.info("Caught Exception :" + e); e.printStackTrace(); errs.add(ActionMessages.GLOBAL_MESSAGE, new ActionMessage("packages.search.connection_error")); } catch (XmlRpcFault e) { log.info("Caught Exception :" + e); log.info("ErrorCode = " + e.getErrorCode()); e.printStackTrace(); if (e.getErrorCode() == 100) { log.error("Invalid search query", e); errs.add(ActionMessages.GLOBAL_MESSAGE, new ActionMessage("packages.search.could_not_parse_query", searchString)); } else if (e.getErrorCode() == 200) { log.error("Index files appear to be missing: ", e); errs.add(ActionMessages.GLOBAL_MESSAGE, new ActionMessage("packages.search.index_files_missing", searchString)); } else { errs.add(ActionMessages.GLOBAL_MESSAGE, new ActionMessage("packages.search.could_not_execute_query", searchString)); } } catch (XmlRpcException e) { log.info("Caught Exception :" + e); e.printStackTrace(); errs.add(ActionMessages.GLOBAL_MESSAGE, new ActionMessage("packages.search.connection_error")); } if (dr == null) { ActionMessages messages = new ActionMessages(); messages.add(ActionMessages.GLOBAL_MESSAGE, new ActionMessage("systemsearch_no_matches_found")); getStrutsDelegate().saveMessages(request, messages); } if (!errs.isEmpty()) { addErrors(request, errs); } return dr; }
From source file:com.example.httpjson.AppEngineClient.java
public void put(URL uri, Map<String, List<String>> headers, byte[] body) { PUT put = new PUT(uri, headers, body); URL url = null;/* w w w . j av a 2 s. c o m*/ try { url = new URL("http://www.example.com/resource"); } catch (MalformedURLException e) { // TODO Auto-generated catch block e.printStackTrace(); } HttpURLConnection httpCon = null; try { httpCon = (HttpURLConnection) url.openConnection(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } try { httpCon.setDoOutput(true); httpCon.setRequestMethod("PUT"); OutputStreamWriter out = new OutputStreamWriter(httpCon.getOutputStream()); out.write("Resource content from PUT!!! "); out.close(); httpCon.getInputStream(); } catch (ProtocolException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } }
From source file:com.github.hobbe.android.openkarotz.karotz.OpenKarotz.java
/** * Initialize a new OpenKarotz instance. * @param hostname the hostname or IP//from w w w. ja v a2 s. co m */ public OpenKarotz(String hostname) { this.hostname = hostname; try { this.api = new URL(PROTOCOL + "://" + hostname + ":" + PORT); } catch (MalformedURLException e) { e.printStackTrace(); } }
From source file:com.example.android.threadsample.RSSPullService.java
/** * In an IntentService, onHandleIntent is run on a background thread. As it * runs, it broadcasts its current status using the LocalBroadcastManager. * @param workIntent The Intent that starts the IntentService. This Intent contains the * URL of the web site from which the RSS parser gets data. *///from w ww.j av a2 s . co m @Override protected void onHandleIntent(Intent workIntent) { // Gets a URL to read from the incoming Intent's "data" value String localUrlString = workIntent.getDataString(); // Creates a projection to use in querying the modification date table in the provider. final String[] dateProjection = new String[] { DataProviderContract.ROW_ID, DataProviderContract.DATA_DATE_COLUMN }; // A URL that's local to this method URL localURL; // A cursor that's local to this method. Cursor cursor = null; /* * A block that tries to connect to the Picasa featured picture URL passed as the "data" * value in the incoming Intent. The block throws exceptions (see the end of the block). */ try { // Convert the incoming data string to a URL. localURL = new URL(localUrlString); /* * Tries to open a connection to the URL. If an IO error occurs, this throws an * IOException */ URLConnection localURLConnection = localURL.openConnection(); // If the connection is an HTTP connection, continue if ((localURLConnection instanceof HttpURLConnection)) { // Broadcasts an Intent indicating that processing has started. mBroadcaster.broadcastIntentWithState(Constants.STATE_ACTION_STARTED); // Casts the connection to a HTTP connection HttpURLConnection localHttpURLConnection = (HttpURLConnection) localURLConnection; // Sets the user agent for this request. localHttpURLConnection.setRequestProperty("User-Agent", Constants.USER_AGENT); /* * Queries the content provider to see if this URL was read previously, and when. * The content provider throws an exception if the URI is invalid. */ cursor = getContentResolver().query(DataProviderContract.DATE_TABLE_CONTENTURI, dateProjection, null, null, null); // Flag to indicate that new metadata was retrieved boolean newMetadataRetrieved; /* * Tests to see if the table contains a modification date for the URL */ if (null != cursor && cursor.moveToFirst()) { // Find the URL's last modified date in the content provider long storedModifiedDate = cursor .getLong(cursor.getColumnIndex(DataProviderContract.DATA_DATE_COLUMN)); /* * If the modified date isn't 0, sets another request property to ensure that * data is only downloaded if it has changed since the last recorded * modification date. Formats the date according to the RFC1123 format. */ if (0 != storedModifiedDate) { localHttpURLConnection.setRequestProperty("If-Modified-Since", org.apache.http.impl.cookie.DateUtils.formatDate(new Date(storedModifiedDate), org.apache.http.impl.cookie.DateUtils.PATTERN_RFC1123)); } // Marks that new metadata does not need to be retrieved newMetadataRetrieved = false; } else { /* * No modification date was found for the URL, so newmetadata has to be * retrieved. */ newMetadataRetrieved = true; } // Reports that the service is about to connect to the RSS feed mBroadcaster.broadcastIntentWithState(Constants.STATE_ACTION_CONNECTING); // Gets a response code from the RSS server int responseCode = localHttpURLConnection.getResponseCode(); switch (responseCode) { // If the response is OK case HttpStatus.SC_OK: // Gets the last modified data for the URL long lastModifiedDate = localHttpURLConnection.getLastModified(); // Reports that the service is parsing mBroadcaster.broadcastIntentWithState(Constants.STATE_ACTION_PARSING); /* * Instantiates a pull parser and uses it to parse XML from the RSS feed. * The mBroadcaster argument send a broadcaster utility object to the * parser. */ RSSPullParser localPicasaPullParser = new RSSPullParser(); try { localPicasaPullParser.parseXml(localURLConnection.getInputStream(), mBroadcaster); } catch (JSONException e) { // TODO Auto-generated catch block e.printStackTrace(); } // Reports that the service is now writing data to the content provider. mBroadcaster.broadcastIntentWithState(Constants.STATE_ACTION_WRITING); // Gets image data from the parser Vector<ContentValues> imageValues = localPicasaPullParser.getImages(); // Stores the number of images int imageVectorSize = imageValues.size(); // Creates one ContentValues for each image ContentValues[] imageValuesArray = new ContentValues[imageVectorSize]; imageValuesArray = imageValues.toArray(imageValuesArray); /* * Stores the image data in the content provider. The content provider * throws an exception if the URI is invalid. */ getContentResolver().bulkInsert(DataProviderContract.PICTUREURL_TABLE_CONTENTURI, imageValuesArray); // Creates another ContentValues for storing date information ContentValues dateValues = new ContentValues(); // Adds the URL's last modified date to the ContentValues dateValues.put(DataProviderContract.DATA_DATE_COLUMN, lastModifiedDate); if (newMetadataRetrieved) { // No previous metadata existed, so insert the data getContentResolver().insert(DataProviderContract.DATE_TABLE_CONTENTURI, dateValues); } else { // Previous metadata existed, so update it. getContentResolver().update(DataProviderContract.DATE_TABLE_CONTENTURI, dateValues, DataProviderContract.ROW_ID + "=" + cursor.getString(cursor.getColumnIndex(DataProviderContract.ROW_ID)), null); } break; } // Reports that the feed retrieval is complete. mBroadcaster.broadcastIntentWithState(Constants.STATE_ACTION_COMPLETE); } // Handles possible exceptions } catch (MalformedURLException localMalformedURLException) { localMalformedURLException.printStackTrace(); } catch (IOException localIOException) { localIOException.printStackTrace(); } catch (XmlPullParserException localXmlPullParserException) { localXmlPullParserException.printStackTrace(); } finally { // If an exception occurred, close the cursor to prevent memory leaks. if (null != cursor) { cursor.close(); } } }
From source file:es.tekniker.framework.ktek.questionnaire.mng.server.EventServiceClient.java
private HttpURLConnection getEventServiceSEConnection(String method, String service, String contentType) { HttpURLConnection conn = null; URL url = null;//from w ww. j ava 2 s . com String urlStr = protocol + "://" + service + method; log.debug(urlStr); try { url = new URL(urlStr); conn = (HttpURLConnection) url.openConnection(); conn.setDoOutput(true); conn.setRequestMethod(methodPOST); conn.setRequestProperty(headerContentType, contentType); conn.setRequestProperty(headerAuthorization, headerAuthorizationOAuth + " " + headerToken); conn.setConnectTimeout(timeout); log.debug(headerContentType + " " + contentType); log.debug(headerAuthorizationOAuth + " " + headerToken); } catch (MalformedURLException e) { log.error("MalformedURLException " + e.getMessage()); e.printStackTrace(); } catch (IOException e) { log.error("IOException " + e.getMessage()); e.printStackTrace(); } return conn; }
From source file:edu.usf.cutr.gtfsrtvalidator.background.BackgroundTask.java
@Override public void run() { try {/*from www.ja va 2 s . c o m*/ long startTimeNanos = System.nanoTime(); GtfsRealtime.FeedMessage currentFeedMessage; GtfsRealtime.FeedMessage previousFeedMessage = null; GtfsDaoImpl gtfsData; GtfsMetadata gtfsMetadata; // Holds data needed in the database under each iteration GtfsRtFeedIterationModel feedIteration; // Get the GTFS feed from the GtfsDaoMap using the gtfsFeedId of the current feed. gtfsData = GtfsFeed.GtfsDaoMap.get(mCurrentGtfsRtFeed.getGtfsFeedModel().getFeedId()); // Create the GTFS metadata if it doesn't already exist gtfsMetadata = mGtfsMetadata.computeIfAbsent(mCurrentGtfsRtFeed.getGtfsFeedModel().getFeedId(), k -> new GtfsMetadata(mCurrentGtfsRtFeed.getGtfsFeedModel().getGtfsUrl(), TimeZone.getTimeZone(mCurrentGtfsRtFeed.getGtfsFeedModel().getAgency()), gtfsData)); // Read the GTFS-rt feed from the feed URL URL gtfsRtFeedUrl; Session session; try { gtfsRtFeedUrl = new URL(mCurrentGtfsRtFeed.getGtfsUrl()); } catch (MalformedURLException e) { _log.error("Malformed Url: " + mCurrentGtfsRtFeed.getGtfsUrl(), e); e.printStackTrace(); return; } try { // Get the GTFS-RT feedMessage for this method InputStream in = gtfsRtFeedUrl.openStream(); byte[] gtfsRtProtobuf = IOUtils.toByteArray(in); boolean isUniqueFeed = true; MessageDigest md = MessageDigest.getInstance("MD5"); byte[] prevFeedDigest = null; byte[] currentFeedDigest = md.digest(gtfsRtProtobuf); session = GTFSDB.initSessionBeginTrans(); feedIteration = (GtfsRtFeedIterationModel) session .createQuery("FROM GtfsRtFeedIterationModel" + " WHERE rtFeedId = " + mCurrentGtfsRtFeed.getGtfsRtId() + " ORDER BY IterationId DESC") .setMaxResults(1).uniqueResult(); if (feedIteration != null) { prevFeedDigest = feedIteration.getFeedHash(); } if (MessageDigest.isEqual(currentFeedDigest, prevFeedDigest)) { // If previous feed digest and newly fetched/current feed digest are equal means, we received the same feed again. isUniqueFeed = false; } InputStream is = new ByteArrayInputStream(gtfsRtProtobuf); currentFeedMessage = GtfsRealtime.FeedMessage.parseFrom(is); long feedTimestamp = TimeUnit.SECONDS.toMillis(currentFeedMessage.getHeader().getTimestamp()); // Create new feedIteration object and save the iteration to the database if (isUniqueFeed) { if (feedIteration != null && feedIteration.getFeedprotobuf() != null) { // Get the previous feed message InputStream previousIs = new ByteArrayInputStream(feedIteration.getFeedprotobuf()); previousFeedMessage = GtfsRealtime.FeedMessage.parseFrom(previousIs); } feedIteration = new GtfsRtFeedIterationModel(System.currentTimeMillis(), feedTimestamp, gtfsRtProtobuf, mCurrentGtfsRtFeed, currentFeedDigest); } else { feedIteration = new GtfsRtFeedIterationModel(System.currentTimeMillis(), feedTimestamp, null, mCurrentGtfsRtFeed, currentFeedDigest); } session.save(feedIteration); GTFSDB.commitAndCloseSession(session); if (!isUniqueFeed) { return; } } catch (Exception e) { _log.error("The URL '" + gtfsRtFeedUrl + "' does not contain valid Gtfs-Rt data", e); return; } // Read all GTFS-rt entities for the current feed mGtfsRtFeedMap.put(feedIteration.getGtfsRtFeedModel().getGtfsRtId(), currentFeedMessage); session = GTFSDB.initSessionBeginTrans(); List<GtfsRealtime.FeedEntity> allEntitiesArrayList = new ArrayList<>(); List<GtfsRtFeedModel> gtfsRtFeedModelList; gtfsRtFeedModelList = session.createQuery("FROM GtfsRtFeedModel" + " WHERE gtfsFeedID = " + mCurrentGtfsRtFeed.getGtfsFeedModel().getFeedId()).list(); GTFSDB.closeSession(session); GtfsRealtime.FeedHeader header = null; if (gtfsRtFeedModelList.size() < 1) { _log.error("The URL '" + gtfsRtFeedUrl + "' is not stored properly into the database"); return; } for (GtfsRtFeedModel gtfsRtFeedModel : gtfsRtFeedModelList) { GtfsRealtime.FeedMessage message = mGtfsRtFeedMap.get(gtfsRtFeedModel.getGtfsRtId()); if (header == null) { // Save one header to use in our combined feed below header = message.getHeader(); } else { if (message.getHeader() != null && message.getHeader().getTimestamp() > header.getTimestamp()) { // Use largest header timestamp with multiple feeds - see #239 header = message.getHeader(); } } if (message != null) { allEntitiesArrayList.addAll(message.getEntityList()); } } GtfsRealtime.FeedMessage.Builder feedMessageBuilder = GtfsRealtime.FeedMessage.newBuilder(); feedMessageBuilder.setHeader(header); feedMessageBuilder.addAllEntity(allEntitiesArrayList); GtfsRealtime.FeedMessage combinedFeed = feedMessageBuilder.build(); // Use the same current time for all rules for consistency long currentTimeMillis = System.currentTimeMillis(); // Run validation rules for (FeedEntityValidator rule : mValidationRules) { validateEntity(currentTimeMillis, combinedFeed, previousFeedMessage, gtfsData, gtfsMetadata, feedIteration, rule); } logDuration(_log, "Processed " + mCurrentGtfsRtFeed.getGtfsUrl() + " in ", startTimeNanos); } catch (Exception ex) { ex.printStackTrace(); } }
From source file:app.HadoopImporterWindowTopComponent.java
public static String[] loadQueriesForNetworkLayer(String networkName, String layerName, HadoopImporterWindowTopComponent comp) { String[] q = new String[2]; CloseableHttpClient httpClient = null; HttpGet httpGet = null;/*from w w w . j a v a 2 s. co m*/ CloseableHttpResponse response = null; try { httpClient = HttpClients.createDefault(); httpGet = new HttpGet(); String query = "[[Modification date::+]]|?Modification date|sort=Modification date|order=Ddesc"; String queryForLayerSQL = "[[Category:TMN_layer]][[TMN_layer_name::" + layerName + "]][[belongs_to_TMN::" + networkName + "]]|?TMN_layer_nlq|?TMN_layer_elq"; URI uri = new URI("http://semanpix.de/oldtimer/wiki/api.php?action=ask&format=json&query=" + encodeQuery(queryForLayerSQL)); //"); String uri2 = "http://semanpix.de/oldtimer/wiki/api.php?action=ask&format=json&query=" + queryForLayerSQL; httpGet.setURI(uri); System.out.println("[Request:]\n" + uri2); if (comp != null) { comp.setQuery(uri2); } // ArrayList<NameValuePair> nvps; // nvps = new ArrayList<NameValuePair>(); // nvps.add(new BasicNameValuePair("content-type", "application/json")); // nvps.add(new BasicNameValuePair("x-kii-appid", "xxxxx")); // nvps.add(new BasicNameValuePair("x-kii-appkey", "xxxxxxxxxxxxxx")); // StringEntity input = new StringEntity("{\"username\": \"dummyuser\",\"password\": \"dummypassword\"}"); // input.setContentType("application/json"); // // httpPost.setEntity(input); // // for (NameValuePair h : nvps) // { // httpPost.addHeader(h.getName(), h.getValue()); // } response = httpClient.execute(httpGet); if (response.getStatusLine().getStatusCode() != 200) { throw new RuntimeException( "Failed : HTTP error code : " + response.getStatusLine().getStatusCode()); } BufferedReader br = new BufferedReader(new InputStreamReader((response.getEntity().getContent()))); StringBuffer sb = new StringBuffer(); String output; System.out.println("Output from Server .... \n"); while ((output = br.readLine()) != null) { System.out.println(output); sb.append(output); } if (comp != null) { comp.setResponse(sb.toString()); } JSONObject obj1 = new JSONObject(sb.toString()); JSONObject obj2 = obj1.getJSONObject("query"); JSONObject obj3 = obj2.getJSONObject("results"); JSONObject obj4 = obj3.getJSONObject(layerName); JSONObject obj5 = obj4.getJSONObject("printouts"); String nlq = obj5.optString("TMN layer nlq"); String elq = obj5.optString("TMN layer elq"); System.out.println(nlq); System.out.println(elq); q[0] = URLDecoder.decode(nlq.substring(2, nlq.length() - 2)); q[1] = URLDecoder.decode(elq.substring(2, elq.length() - 2)); } catch (MalformedURLException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } catch (URISyntaxException ex) { Exceptions.printStackTrace(ex); } catch (JSONException ex) { Exceptions.printStackTrace(ex); } finally { try { if (response != null) { response.close(); } httpClient.close(); } catch (Exception ex) { ex.printStackTrace(); } } return q; }