List of usage examples for org.json.simple JSONArray size
public int size()
From source file:crossbear.convergence.ConvergenceConnector.java
/** * Transfer the Notary's answer from a JSON-representation into a HashSet of ConvergenceCertObservation * /* w w w. j a va 2 s . c o m*/ * @param notaryAnswer The Response-String that the Notary sent as an answer. It should contain a JSON-encoded list of ConvergenceCertificateObservations * @param hostPort The Hostname and port of the server on which the information about the certificate observations is desired. * @return The Notary's answer as a Set of ConvergenceCertObservations * @throws ParseException */ private static HashSet<ConvergenceCertObservation> parseNotaryAnswer(String notaryAnswer, String hostPort) throws ParseException { // Create a empty Set of ConvergenceCertObservations HashSet<ConvergenceCertObservation> re = new HashSet<ConvergenceCertObservation>(); // Try to decode the Notary's answer as a JSONObject JSONParser parser = new JSONParser(); JSONObject obj = (JSONObject) parser.parse(notaryAnswer); // If that worked extract the field called fingerprintList (which is basically a list of ConvergenceCertObservations in JSON encoding) JSONArray array = (JSONArray) obj.get("fingerprintList"); // Go through the list ... for (int i = 0; i < array.size(); i++) { // ... read each entry ... JSONObject entry = (JSONObject) array.get(i); // .. extract its content ... byte[] fingerprint = Message.hexStringToByteArray(((String) entry.get("fingerprint")).replace(":", "")); JSONObject ts = (JSONObject) entry.get("timestamp"); Timestamp firstObservation = new Timestamp(1000 * Long.valueOf((String) ts.get("start"))); Timestamp lastObservation = new Timestamp(1000 * Long.valueOf((String) ts.get("finish"))); Timestamp lastUpdate = new Timestamp(System.currentTimeMillis()); // ... and create a new ConvergenceCertObservation-object based on that content. re.add(new ConvergenceCertObservation(hostPort, Message.byteArrayToHexString(fingerprint), firstObservation, lastObservation, lastUpdate)); } // Finally return the Set containing all of the extracted ConvergenceCertObservations. return re; }
From source file:com.nubits.nubot.options.ParseOptions.java
private static ArrayList parseBackupFeeds(JSONObject optionsJSON) throws NuBotConfigException { ArrayList backupFeeds = new ArrayList<>(); //Iterate on backupFeeds JSONArray bfeeds = null; try {/*w w w .jav a 2 s . c om*/ bfeeds = (JSONArray) getIgnoreCase(optionsJSON, backupfeeds); } catch (Exception e) { throw new NuBotConfigException("can't parse array " + e); } if (bfeeds.size() < 2) { throw new NuBotConfigException("The bot requires at least two backup data feeds to run"); } for (int i = 0; i < bfeeds.size(); i++) { try { String feedname = (String) bfeeds.get(i); if (!FeedFacade.isValidFeed(feedname)) throw new NuBotConfigException("invalid feed configured"); else backupFeeds.add(feedname); } catch (JSONException ex) { throw new NuBotConfigException("parse feeds json error" + ex); } } return backupFeeds; }
From source file:com.netbase.insightapi.bestpractice.TopicDownloader.java
/** * Download all the documents of a topic, in sequential order, by * publication date.//from w w w . ja v a 2 s. c o m * * @param masterQuery * @param user * @param startTimestamp * inclusive * @param endTimestamp * exclusive * @param handler * @throws InterruptedException * @throws InsightAPIQueryException */ public static void downloadHistory(InsightAPIQuery masterQuery, UserChannel user, int startTimestamp, int endTimestamp, ResultHandler handler) throws InterruptedException, InsightAPIQueryException { // clone the original query so we don't change it InsightAPIQuery query = new InsightAPIQuery(masterQuery); // force the parameters we rely on, leaving the others set by caller query.setParameter("sort", "timestamp"); query.setOp("retrieveDocuments"); // The caller can set "sizeNeeded" to any legal value, particularly for // testing purposes. In production, bigger is better unless we start // experiencing timeout or communication reliability issues. if (query.getParameters("sizeNeeded") == null) query.setParameter("sizeNeeded", 2000); /* * each call to the Insight API will return a (typically small) number * of documents that we already received in the prior call. This is * because we start the time range for call "n+1" with the highest * timestamp received during call "n". We do this because we are not * guaranteed to have received *all* of the documents containing the * highest timestamp. * * Timestamp resolution is 1/10 second; so, typically, we'll receive * exactly one document at the end of call "n" and the beginning of * "n+1". * * This set arranges for us to ignore the overlapped documents. */ Set<String> docIdsAlreadySeen = new HashSet<String>(); /* * the query for the first request covers the entire span for the * download. Since we're sorting and filtering by timestamp, we'll get * the earliest documents in the range. */ query.setPublishedTimestampRange(startTimestamp, endTimestamp); while (true) { InsightAPIQuery q = new InsightAPIQuery(query); // run the query and toss an exception if it didn't work user.run(q); q.checkSuccess(); // get the parsed json result JSONObject jsonResult = (JSONObject) q.getParsedContent(); // get the array of documents JSONArray docs = (JSONArray) jsonResult.get("documents"); // no documents at all? We're done if (docs == null || docs.size() == 0) break; // traverse the beginning of the list, counting up the duplicates int first = 0; while (first < docs.size()) { JSONObject doc = (JSONObject) docs.get(first); String docID = (String) getDocProperty(doc, "docID"); if (!docIdsAlreadySeen.contains(docID)) break; first++; } // all duplicates? we're done. if (first >= docs.size()) break; // call the ResultHandler to process the documents, beginning // with the first unique one handler.handleResult(docs, first); int last = docs.size() - 1; docIdsAlreadySeen.clear(); // get the timestamp of the last document received int lastTimestamp = ((Number) getDocProperty(docs.get(last), "timestamp")).intValue(); // if it's later than (shouldn't be) or equal to (could be) the // end of the requested range, we're done if (lastTimestamp >= endTimestamp) break; /* * traverse backwards through the list from the end, looking for the * next-lower timestamp. Write down all the docIDs of these * documents, because we're going to see them again at the beginning * of the next query */ while (last >= 0 && ((Number) getDocProperty(docs.get(last), "timestamp")).intValue() == lastTimestamp) { docIdsAlreadySeen.add((String) getDocProperty(docs.get(last), "docID")); last--; } /* * If we get through this loop with last < 0, it means that the * entire block of documents we received had the same timestamp. * This is a failure of this algorithm. * * For this to happen, it means that the topic contains more than * query.sizeNeeded (current max: 2000) documents with publication * timestamps in the same 1/10 second. * * We have no choice but to increment the timestamp by 1/10 of a * second and move on. If we don't, we'll keep getting the same * result in an infinite loop. */ if (last < 0) { user.logWarning(query.getSerial() + " too many docs with same timestamp=" + lastTimestamp + ", num of docs=" + docs.size()); docIdsAlreadySeen.clear(); lastTimestamp++; } // set the query's timestamp range to start with the last timestamp // we received, and rinse and repeat query.setPublishedTimestampRange(lastTimestamp, endTimestamp); } }
From source file:at.ac.tuwien.dsg.rSybl.planningEngine.staticData.ActionEffects.java
public static HashMap<String, List<ActionEffect>> getActionConditionalEffects() { if (applicationSpecificActionEffects.isEmpty() && defaultActionEffects.isEmpty()) { PlanningLogger.logger.info("~~~~~~~~~~Action effects is empty, reading the effects ! "); JSONParser parser = new JSONParser(); try {// ww w. ja va2s . c o m InputStream inputStream = Configuration.class.getClassLoader() .getResourceAsStream(Configuration.getEffectsPath()); Object obj = parser.parse(new InputStreamReader(inputStream)); JSONObject jsonObject = (JSONObject) obj; for (Object actionName : jsonObject.keySet()) { String myaction = (String) actionName; JSONObject object = (JSONObject) jsonObject.get(myaction); for (Object actions : object.keySet()) { ActionEffect actionEffect = new ActionEffect(); actionEffect.setActionType((String) myaction); actionEffect.setActionName((String) actions); JSONObject scaleinDescription = (JSONObject) object.get(actions); if (scaleinDescription.containsKey("conditions")) { JSONArray conditions = (JSONArray) jsonObject.get("conditions"); for (int i = 0; i < conditions.size(); i++) { actionEffect.addCondition((String) conditions.get(i)); } } String targetUnit = (String) scaleinDescription.get("targetUnit"); actionEffect.setTargetedEntityID(targetUnit); JSONObject effects = (JSONObject) scaleinDescription.get("effects"); for (Object effectPerUnit : effects.keySet()) { //System.out.println(effects.toString()); String affectedUnit = (String) effectPerUnit; JSONObject metriceffects = (JSONObject) effects.get(affectedUnit); for (Object metric : metriceffects.keySet()) { String metricName = (String) metric; try { actionEffect.setActionEffectForMetric(metricName, (Double) metriceffects.get(metricName), affectedUnit); } catch (Exception e) { actionEffect.setActionEffectForMetric(metricName, ((Long) metriceffects.get(metricName)).doubleValue(), affectedUnit); } } } if (applicationSpecificActionEffects.get(actionEffect.getTargetedEntityID()) == null) { List<ActionEffect> l = new ArrayList<ActionEffect>(); l.add(actionEffect); applicationSpecificActionEffects.put(actionEffect.getTargetedEntityID(), l); } else { applicationSpecificActionEffects.get(actionEffect.getTargetedEntityID()) .add(actionEffect); } } } } catch (Exception e) { PlanningLogger.logger.info("~~~~~~~~~~Retrying reading the effects "); parser = new JSONParser(); try { InputStream inputStream = Configuration.class.getClassLoader() .getResourceAsStream(Configuration.getEffectsPath()); Object obj = parser.parse(new InputStreamReader(inputStream)); JSONObject jsonObject = (JSONObject) obj; for (Object actionName : jsonObject.keySet()) { String myaction = (String) actionName; ActionEffect actionEffect = new ActionEffect(); actionEffect.setActionType((String) myaction); actionEffect.setActionName((String) myaction); JSONObject object = (JSONObject) jsonObject.get(myaction); JSONObject metrics = (JSONObject) object.get("effects"); for (Object me : metrics.keySet()) { String metric = (String) me; Double metricEffect = (Double) metrics.get(metric); actionEffect.setActionEffectForMetric(metric, metricEffect, ""); } defaultActionEffects.put(myaction, actionEffect); } } catch (Exception ex) { PlanningLogger.logger .error("Error when reading the effects!!!!!!!!!!!!!!!!!!" + ex.getMessage()); } } } return applicationSpecificActionEffects; }
From source file:de.minestar.minestarlibrary.utils.PlayerUtils.java
public static String getPlayerNameFromMojang(String uuid) { JSONArray array = getHTTPGetRequestAsArray("https://api.mojang.com/user/profiles/" + uuid + "/names"); JSONObject object = (JSONObject) array.get(array.size() - 1); return (String) object.get("name"); }
From source file:net.drgnome.virtualpack.util.Util.java
public static boolean hasUpdate(int projectID, String version) { try {/*from w w w . j a v a2s . c o m*/ HttpURLConnection con = (HttpURLConnection) (new URL( "https://api.curseforge.com/servermods/files?projectIds=" + projectID)).openConnection(); con.setConnectTimeout(5000); con.setRequestMethod("GET"); con.setRequestProperty("User-Agent", "Mozilla/4.0 (compatible; JVM)"); con.setRequestProperty("Pragma", "no-cache"); con.connect(); JSONArray json = (JSONArray) JSONValue.parse(new InputStreamReader(con.getInputStream())); String[] cdigits = ((String) ((JSONObject) json.get(json.size() - 1)).get("name")).toLowerCase() .split("\\."); String[] vdigits = version.toLowerCase().split("\\."); int max = vdigits.length > cdigits.length ? cdigits.length : vdigits.length; int a; int b; for (int i = 0; i < max; i++) { a = b = 0; try { a = Integer.parseInt(cdigits[i]); } catch (Exception e1) { char[] c = cdigits[i].toCharArray(); for (int j = 0; j < c.length; j++) { a += (c[j] << ((c.length - (j + 1)) * 8)); } } try { b = Integer.parseInt(vdigits[i]); } catch (Exception e1) { char[] c = vdigits[i].toCharArray(); for (int j = 0; j < c.length; j++) { b += (c[j] << ((c.length - (j + 1)) * 8)); } } if (a > b) { return true; } else if (a < b) { return false; } else if ((i == max - 1) && (cdigits.length > vdigits.length)) { return true; } } } catch (Exception e) { } return false; }
From source file:IrqaQuery.java
public static void pipeline(String basedir, String indexpath, String set, JSONObject lookup_sent) throws Exception { System.out.println(set + " started..."); String index = basedir + "/index_all" + indexpath + "/"; String stopwords = basedir + "/stopwords.txt"; IrqaQuery lp = new IrqaQuery(); String answer_filename = String.format(basedir + "/stats/data_for_analysis/newTACL/%s_raw_list.json", set); String file = String.format(basedir + "/stats/data_for_analysis/newTACL/WikiQASent-%s.txt", set); // String lookup_8kfn = basedir+"/data/wikilookup_8k.json"; String documents2_fn = basedir + "/data/documents2.json"; JSONParser parser = new JSONParser(); JSONArray answer_list = (JSONArray) parser.parse(new FileReader(answer_filename)); // Object obj2 = parser.parse(new FileReader(lookup_8kfn)); // JSONObject lookup_8k = (JSONObject) obj2; Object obj3 = parser.parse(new FileReader(documents2_fn)); JSONArray documents2 = (JSONArray) obj3; List<String> questions = new ArrayList<>(); BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(file))); String outfilename = String.format(basedir + "/stats/data_for_analysis/newTACL/newsplit%s_%s.txt", indexpath, set);//from ww w . j a v a 2 s .com BufferedWriter outfile = new BufferedWriter(new FileWriter(outfilename)); int numline = 0; ArrayList<ArrayList<String>> sentlistAll = new ArrayList<ArrayList<String>>(); ArrayList<ArrayList<String>> alistAll = new ArrayList<ArrayList<String>>(); try { String r; String cquestion = ""; ArrayList<String> sentlist = new ArrayList<>(); ArrayList<String> alist = new ArrayList<>(); while ((r = br.readLine()) != null) { numline++; String[] line = r.split("\t"); if (cquestion.compareTo(line[0]) != 0) { if (cquestion.compareTo("") != 0) { sentlistAll.add(sentlist); alistAll.add(alist); questions.add(cquestion); } sentlist = new ArrayList<>(); alist = new ArrayList<>(); sentlist.add(line[1]); alist.add(line[2]); cquestion = line[0]; } else { sentlist.add(line[1]); alist.add(line[2]); } } sentlistAll.add(sentlist); alistAll.add(alist); questions.add(cquestion); } finally { br.close(); } System.out.println(questions.size()); for (int i = 0; i < questions.size(); i++) { String query = questions.get(i); List<Document> docs = lp.query(index, stopwords, query, 5, "BM25"); // Object o = (Object) answer_list.get(0); JSONObject rl = (JSONObject) answer_list.get(i); String gold_pid = (String) rl.get("paragraph_id"); // String gold_q =(String) rl.get("question"); for (Document d : docs) { String docid = d.get("docid"); if (gold_pid.compareTo(docid) == 0) { // get sentences from gold (alistAll, sentlistAll) for (int j = 0; j < sentlistAll.get(i).size(); j++) { if (sentlistAll.get(i).get(j).length() < 1 || sentlistAll.get(i).get(j).compareTo(" ") == 0 || sentlistAll.get(i).get(j).compareTo(" ") == 0 || sentlistAll.get(i).get(j).compareTo("''") == 0 || sentlistAll.get(i).get(j).compareTo(" ") == 0) continue; String outstring = String.format("%s\t%s\t%s\n", query, sentlistAll.get(i).get(j), alistAll.get(i).get(j)); outfile.write(outstring); } } else { // get_sentence_from_lookup(); // lookup_sent.get(docid) // JSONArray sents = (JSONArray) lookup_sent.get("Timeline_of_classical_mechanics-Abstract"); JSONArray sents = (JSONArray) lookup_sent.get(docid); if (sents == null) { System.out.println("noway, " + docid + "\n"); } else { for (int kk = 0; kk < sents.size(); kk++) { if (sents.get(kk).toString().length() < 1 || sents.get(kk).toString().compareTo(" ") == 0 || sents.get(kk).toString().compareTo(" ") == 0 || sents.get(kk).toString().compareTo("''") == 0 || sents.get(kk).toString().compareTo(" ") == 0) continue; String outstring = String.format("%s\t%s\t%s\n", query, sents.get(kk).toString(), "0"); outfile.write(outstring); // System.out.printf("%s\t%s\t%s\n", query, sents.get(kk).toString(), "0"); // System.out.println(sents.get(kk)); } } } } } outfile.close(); // System.out.println(raw_list.size()); System.out.println(numline); }
From source file:mas.MAS_TOP_PAPERS.java
public static String getConferenceName(int id) { String url = "https://api.datamarket.azure.com/MRC/MicrosoftAcademic/v2/Conference?$filter=ID%20eq%20" + id + "&$format=json"; while (true) { try {/*from ww w. jav a 2 s.c o m*/ StringBuilder csv_str = new StringBuilder(); final String json = getData2(url, 0); JSONParser parser = new JSONParser(); JSONObject jsonObj = (JSONObject) parser.parse(json); final JSONObject dObj = (JSONObject) jsonObj.get("d"); final JSONArray results = (JSONArray) dObj.get("results"); if (results.size() == 0) { System.out.println("results is Empty, break."); break; } else { // System.out.println("Conf: results# = " + results.size()); for (Object conf : results) { JSONObject confObj = (JSONObject) conf; String shortName = normalized((String) confObj.get("ShortName")); if (!shortName.equals("")) { return shortName; } else { String fullName = normalized((String) confObj.get("FullName")); return fullName; } } } // System.out.println("json= " + jsonObj); } catch (ParseException ex) { System.out.println(ex.getMessage() + " Cause: " + ex.getCause()); Logger.getLogger(MAS_TOP_PAPERS.class.getName()).log(Level.SEVERE, null, ex); try { Thread.sleep(5000L); } catch (InterruptedException ex1) { Logger.getLogger(MAS_TOP_PAPERS.class.getName()).log(Level.SEVERE, null, ex1); } } } return null; }
From source file:mas.MAS_TOP_PAPERS.java
public static void extractConference(int start) { String file_prefix = "conferences"; String csv_file_path = "data/" + file_prefix + ".csv"; String json_dump_file_path = "data/" + file_prefix + "_dump.json"; String url = "https://api.datamarket.azure.com/MRC/MicrosoftAcademic/v2/Conference?"; url += "$format=json"; while (true) { try {/* w ww .j a v a 2 s . c o m*/ StringBuilder csv_str = new StringBuilder(); final String json = getData2(url, start); JSONParser parser = new JSONParser(); JSONObject jsonObj = (JSONObject) parser.parse(json); final JSONObject dObj = (JSONObject) jsonObj.get("d"); final JSONArray results = (JSONArray) dObj.get("results"); if (results.size() == 0) { System.out.println("results is Empty, break."); break; } else { System.out.println("Conference: start = " + start + " results# = " + results.size()); for (Object paper : results) { JSONObject paperObj = (JSONObject) paper; Long id = (Long) paperObj.get("ID"); String shortName = normalized((String) paperObj.get("ShortName")); String fullName = normalized((String) paperObj.get("FullName")); String homepage = normalized((String) paperObj.get("Homepage")); csv_str.append(id).append(SEPERATOR).append(shortName).append(SEPERATOR).append(fullName) .append(SEPERATOR).append(homepage).append(NEWLINE); } IOUtils.writeDataIntoFile(json + "\n", json_dump_file_path); IOUtils.writeDataIntoFile(csv_str.toString(), csv_file_path); start += 100; Thread.sleep(300L); } // System.out.println("json= " + jsonObj); } catch (ParseException ex) { Logger.getLogger(MAS_TOP_PAPERS.class.getName()).log(Level.SEVERE, null, ex); } catch (InterruptedException ex) { Logger.getLogger(MAS_TOP_PAPERS.class.getName()).log(Level.SEVERE, null, ex); } } }
From source file:mas.MAS_TOP_PAPERS.java
public static void extractJournal(int start) { String file_prefix = "journals"; String csv_file_path = "data/" + file_prefix + ".csv"; String json_dump_file_path = "data/" + file_prefix + "_dump.json"; String url = "https://api.datamarket.azure.com/MRC/MicrosoftAcademic/v2/Journal?"; url += "$format=json"; while (true) { try {/* www . j av a2 s .co m*/ StringBuilder csv_str = new StringBuilder(); final String json = getData2(url, start); JSONParser parser = new JSONParser(); JSONObject jsonObj = (JSONObject) parser.parse(json); final JSONObject dObj = (JSONObject) jsonObj.get("d"); final JSONArray results = (JSONArray) dObj.get("results"); if (results.size() == 0) { System.out.println("results is Empty, break."); break; } else { System.out.println("Journals: start = " + start + " results# = " + results.size()); for (Object paper : results) { JSONObject paperObj = (JSONObject) paper; Long id = (Long) paperObj.get("ID"); String shortName = normalized((String) paperObj.get("ShortName")); String fullName = normalized((String) paperObj.get("FullName")); String homepage = normalized((String) paperObj.get("Homepage")); csv_str.append(id).append(SEPERATOR).append(shortName).append(SEPERATOR).append(fullName) .append(SEPERATOR).append(homepage).append(NEWLINE); } IOUtils.writeDataIntoFile(json + "\n", json_dump_file_path); IOUtils.writeDataIntoFile(csv_str.toString(), csv_file_path); start += 100; Thread.sleep(300L); } // System.out.println("json= " + jsonObj); } catch (ParseException ex) { Logger.getLogger(MAS_TOP_PAPERS.class.getName()).log(Level.SEVERE, null, ex); } catch (InterruptedException ex) { Logger.getLogger(MAS_TOP_PAPERS.class.getName()).log(Level.SEVERE, null, ex); } } }