List of usage examples for java.util LinkedList size
int size
To view the source code for java.util LinkedList size.
Click Source Link
From source file:com.funambol.json.api.dao.FunambolJSONApiDAO.java
/** * get all the keys of the updated items since a specified time. * @return/*w ww. j a va 2s . com*/ */ public String getUpdatedSyncItemKeys() throws OperationFailureException { if (log.isInfoEnabled()) { log.info( "Executing method: getUpdatedSyncItemKeys() from:" + configLoader.getServerTimeStr(lastSyncTime) + " to :" + configLoader.getServerTimeStr(beginSyncTime)); } try { String req = configLoader.getServerProperty(Def.SERVER_URI) + "/" + getSourceName() + "/keys/updated"; String response = sendGetRequest(req, configLoader.getServerTime(lastSyncTime), configLoader.getServerTime(beginSyncTime)); if (log.isTraceEnabled()) { log.trace("RESPONSE getUpdatedSyncItemKeys: \n" + response); } JSONArray arrayOfKeys = JSONObject.fromObject(response).getJSONObject("data").getJSONArray("keys"); LinkedList<String> items = configLoader.getUpdatedItems(sourceName, testName); if (expectItemsFromServer) { if (items.size() != arrayOfKeys.size()) { log.error("------------- ERROR [" + sourceName + ":" + testName + "]"); log.error("ERROR Expected :" + items.size() + " found " + arrayOfKeys.size()); log.error("ERROR Expected :" + items); log.error("ERROR Found :" + arrayOfKeys); } } else { if (arrayOfKeys.size() != 0) { log.error("------------- ERROR [" + sourceName + ":" + testName + "]"); log.error("ERROR : the syncType specified does not expect any keys returned"); } } return response; } catch (IOOperationException ex) { throw new OperationFailureException("Error retrieving item ", ex); } }
From source file:com.funambol.json.api.dao.FunambolJSONApiDAO.java
/** * get all the keys of the deleted items since a specified time. * @return/* w w w . j a v a 2s . com*/ */ public String getDeletedSyncItemKeys() throws OperationFailureException { if (log.isInfoEnabled()) { log.info( "Executing method: getDeletedSyncItemKeys() from:" + configLoader.getServerTimeStr(lastSyncTime) + " to :" + configLoader.getServerTimeStr(beginSyncTime)); } try { String req = configLoader.getServerProperty(Def.SERVER_URI) + "/" + getSourceName() + "/keys/deleted"; //String response = sendGetRequest(req, configLoader.getServerTime(startTestTime), configLoader.getServerTime(beginSyncTestTime)); String response = sendGetRequest(req, configLoader.getServerTime(lastSyncTime), configLoader.getServerTime(beginSyncTime)); if (log.isTraceEnabled()) { log.trace("RESPONSE getDeletedSyncItemKeys: \n" + response); } JSONArray arrayOfKeys = JSONObject.fromObject(response).getJSONObject("data").getJSONArray("keys"); LinkedList<String> items = configLoader.getDeletedItems(sourceName, testName); if (expectItemsFromServer) { if (items.size() != arrayOfKeys.size()) { log.error("------------- ERROR [" + sourceName + ":" + testName + "]"); log.error("ERROR Expected :" + items.size() + " found " + arrayOfKeys.size()); log.error("ERROR client :" + items); log.error("ERROR server :" + arrayOfKeys); } } else { if (arrayOfKeys.size() != 0) { log.error("------------- ERROR [" + sourceName + ":" + testName + "]"); log.error("ERROR : the syncType specified does not expect any keys returned"); } } return response; } catch (IOOperationException ex) { throw new OperationFailureException("Error retrieving item ", ex); } }
From source file:com.att.nsa.cambria.service.impl.MMServiceImpl.java
private void pushEvents(DMaaPContext ctx, String topic, InputStream msg, String defaultPartition, boolean chunked, String mediaType) throws ConfigDbException, AccessDeniedException, TopicExistsException, CambriaApiException, IOException { final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics(); // setup the event set final CambriaEventSet events = new CambriaEventSet(mediaType, msg, chunked, defaultPartition); // start processing, building a batch to push to the backend final long startMs = System.currentTimeMillis(); long count = 0; long maxEventBatch = 1024 * 16; String batchlen = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, BATCH_LENGTH); if (null != batchlen) maxEventBatch = Long.parseLong(batchlen); // long maxEventBatch = // ctx.getConfigReader().getSettings().getLong(BATCH_LENGTH, 1024 * 16); final LinkedList<Publisher.message> batch = new LinkedList<Publisher.message>(); final ArrayList<KeyedMessage<String, String>> kms = new ArrayList<KeyedMessage<String, String>>(); try {//from www.ja va 2s .c o m // for each message... Publisher.message m = null; while ((m = events.next()) != null) { // add the message to the batch batch.add(m); final KeyedMessage<String, String> data = new KeyedMessage<String, String>(topic, m.getKey(), m.getMessage()); kms.add(data); // check if the batch is full final int sizeNow = batch.size(); if (sizeNow > maxEventBatch) { ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms); kms.clear(); batch.clear(); metricsSet.publishTick(sizeNow); count += sizeNow; } } // send the pending batch final int sizeNow = batch.size(); if (sizeNow > 0) { ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms); kms.clear(); batch.clear(); metricsSet.publishTick(sizeNow); count += sizeNow; } final long endMs = System.currentTimeMillis(); final long totalMs = endMs - startMs; LOG.info("Published " + count + " msgs in " + totalMs + "ms for topic " + topic); // build a responseP final JSONObject response = new JSONObject(); response.put("count", count); response.put("serverTimeMs", totalMs); // DMaaPResponseBuilder.respondOk(ctx, response); } catch (Exception excp) { int status = HttpStatus.SC_NOT_FOUND; String errorMsg = null; if (excp instanceof CambriaApiException) { status = ((CambriaApiException) excp).getStatus(); JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody()); JSONObject errObject = new JSONObject(jsonTokener); errorMsg = (String) errObject.get("message"); } ErrorResponse errRes = new ErrorResponse(status, DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(), errorMessages.getPublishMsgError() + ":" + topic + "." + errorMessages.getPublishMsgCount() + count + "." + errorMsg, null, Utils.getFormattedDate(new Date()), topic, null, ctx.getRequest().getRemoteHost(), null, null); LOG.info(errRes.toString()); throw new CambriaApiException(errRes); } }
From source file:com.rapidminer.operator.preprocessing.discretization.MinimalEntropyDiscretization.java
private Double getMinEntropySplitpoint(LinkedList<double[]> truncatedExamples, Attribute label) { HashSet<Double> candidateSplitpoints = new HashSet<Double>(); Iterator<double[]> it = truncatedExamples.iterator(); int[] totalLabelDistribution = new int[label.getMapping().size()]; // Label distribution for // all examples. while (it.hasNext()) { // Get splitpoint candidates and total label distribution. double[] attributeLabelPair = it.next(); candidateSplitpoints.add(attributeLabelPair[0]); int labelIndex = (int) attributeLabelPair[1]; totalLabelDistribution[labelIndex]++; }//from w w w .ja va2 s. c o m double[] totalFrequencies = new double[label.getMapping().size()]; for (int i = 0; i < label.getMapping().size(); i++) { totalFrequencies[i] = (double) totalLabelDistribution[i] / (double) truncatedExamples.size(); } double totalEntropy = 0.0d; for (int i = 0; i < label.getMapping().size(); i++) { totalEntropy -= totalFrequencies[i] * MathFunctions.ld(totalFrequencies[i]); } double minClassInformationEntropy = totalEntropy; double bestSplitpoint = Double.NaN; double bestSplitpointEntropy1 = Double.POSITIVE_INFINITY; double bestSplitpointEntropy2 = Double.POSITIVE_INFINITY; int k1 = 0; // Number of different class labels in class 1. int k2 = 0; // Number of different class labels in class 2. for (double currentSplitpoint : candidateSplitpoints) { // Test every value as splitpoint // Initialize. int s1 = 0; // Instances in partition 1. int s2 = 0; // Instances in partition 2. k1 = 0; k2 = 0; int[] labelDistribution1 = new int[label.getMapping().size()]; // Label distribution in // class 1. int[] labelDistribution2 = new int[label.getMapping().size()]; // Label distribution in // class 2. // Determine the class of each instance and the corresponding label distribution. for (double[] attributeLabelPair : truncatedExamples) { double valueToCompare = attributeLabelPair[0]; int labelIndex = (int) attributeLabelPair[1]; if (valueToCompare <= currentSplitpoint) { // Partition 1 gets all instances with values less or equal to the current // splitpoint. s1++; labelDistribution1[labelIndex]++; } else { // Partition 2 gets all instances with values // greater than the current split point. s2++; labelDistribution2[labelIndex]++; } } // Calculate frequencies and number of different labels for this // splitpoint each class. double[] frequencies1 = new double[label.getMapping().size()]; double[] frequencies2 = new double[label.getMapping().size()]; for (int i = 0; i < label.getMapping().size(); i++) { frequencies1[i] = (double) labelDistribution1[i] / (double) s1; frequencies2[i] = (double) labelDistribution2[i] / (double) s2; if (labelDistribution1[i] > 0) { // Label value i exists in // class 1. k1++; } if (labelDistribution2[i] > 0) { // Label value i exists in // class 2. k2++; } } /* * Calculate entropies. * * In the case of p(x_i) = 0 for some i, the value of the corresponding summand 0 * * ld(0) is taken to be 0, which is consistent with the well-known limit: * * lim_(p -> 0+) p*log(p) = 0 */ double entropy1 = 0.0d; for (int i = 0; i < label.getMapping().size(); i++) { double frequency = frequencies1[i]; // if frequency is zero, skip label if (frequency != 0.0d) { entropy1 -= frequency * MathFunctions.ld(frequency); } } double entropy2 = 0.0d; for (int i = 0; i < label.getMapping().size(); i++) { double frequency = frequencies2[i]; // if frequency is zero, skip label if (frequency != 0.0d) { entropy2 -= frequency * MathFunctions.ld(frequency); } } double classInformationEntropy = ((double) s1 / (double) truncatedExamples.size()) * entropy1 + ((double) s2 / (double) truncatedExamples.size()) * entropy2; if (classInformationEntropy < minClassInformationEntropy) { minClassInformationEntropy = classInformationEntropy; bestSplitpoint = currentSplitpoint; bestSplitpointEntropy1 = entropy1; bestSplitpointEntropy2 = entropy2; } } // Calculate the termination criterion. Return null if termination // criterion is met. double gain = totalEntropy - minClassInformationEntropy; double delta = MathFunctions.ld(Math.pow(3.0, label.getMapping().size()) - 2) - (label.getMapping().size() * totalEntropy - k1 * bestSplitpointEntropy1 - k2 * bestSplitpointEntropy2); if (gain >= MathFunctions.ld(truncatedExamples.size() - 1) / truncatedExamples.size() + delta / truncatedExamples.size()) { return Double.valueOf(bestSplitpoint); } else { return null; } }
From source file:CB_Core.Api.SearchForGeocaches_Core.java
String ParseJsonResult(Search search, CB_List<Cache> cacheList, ArrayList<LogEntry> logList, ArrayList<ImageEntry> imageList, long gpxFilenameId, String result, byte apiStatus, boolean isLite) { // Parse JSON Result try {//from ww w . ja va 2 s . c om JSONTokener tokener = new JSONTokener(result); JSONObject json = (JSONObject) tokener.nextValue(); JSONObject status = json.getJSONObject("Status"); if (status.getInt("StatusCode") == 0) { result = ""; JSONArray caches = json.getJSONArray("Geocaches"); // Log.debug(log, "got " + caches.length() + " Caches from gc"); for (int i = 0; i < caches.length(); i++) { JSONObject jCache = (JSONObject) caches.get(i); String gcCode = jCache.getString("Code"); // Log.debug(log, "handling " + gcCode); String name = jCache.getString("Name"); result += gcCode + " - " + name + "\n"; Boolean CacheERROR = false; Cache cache = new Cache(true); cache.setArchived(jCache.getBoolean("Archived")); cache.setAttributesPositive(new DLong(0, 0)); cache.setAttributesNegative(new DLong(0, 0)); JSONArray jAttributes = jCache.getJSONArray("Attributes"); for (int j = 0; j < jAttributes.length(); j++) { JSONObject jAttribute = jAttributes.getJSONObject(j); int AttributeTypeId = jAttribute.getInt("AttributeTypeID"); Boolean isOn = jAttribute.getBoolean("IsOn"); Attributes att = Attributes.getAttributeEnumByGcComId(AttributeTypeId); if (isOn) { cache.addAttributePositive(att); } else { cache.addAttributeNegative(att); } } cache.setAvailable(jCache.getBoolean("Available")); cache.setDateHidden(new Date()); try { String dateCreated = jCache.getString("DateCreated"); int date1 = dateCreated.indexOf("/Date("); int date2 = dateCreated.lastIndexOf("-"); String date = (String) dateCreated.subSequence(date1 + 6, date2); cache.setDateHidden(new Date(Long.valueOf(date))); } catch (Exception exc) { Log.err(log, "SearchForGeocaches_ParseDate", exc); } cache.setDifficulty((float) jCache.getDouble("Difficulty")); // Ein evtl. in der Datenbank vorhandenen "Found" nicht berschreiben Boolean Favorite = LoadBooleanValueFromDB( "select Favorit from Caches where GcCode = \"" + gcCode + "\""); cache.setFavorite(Favorite); // Ein evtl. in der Datenbank vorhandenen "Found" nicht berschreiben Boolean Found = LoadBooleanValueFromDB( "select found from Caches where GcCode = \"" + gcCode + "\""); if (!Found) { cache.setFound(jCache.getBoolean("HasbeenFoundbyUser")); } else { cache.setFound(true); } cache.setGcCode(jCache.getString("Code")); try { cache.setGcId(jCache.getString("ID")); } catch (Exception e) { // CacheERROR = true; gibt bei jedem Cache ein // Fehler ??? } cache.setGPXFilename_ID(gpxFilenameId); // Ein evtl. in der Datenbank vorhandenen "Found" nicht berschreiben Boolean userData = LoadBooleanValueFromDB( "select HasUserData from Caches where GcCode = \"" + gcCode + "\""); cache.setHasUserData(userData); if (!isLite) { try { cache.setHint(jCache.getString("EncodedHints")); } catch (Exception e1) { cache.setHint(""); } } cache.Id = Cache.GenerateCacheId(cache.getGcCode()); cache.setListingChanged(false); if (!isLite) { try { cache.setLongDescription(jCache.getString("LongDescription")); } catch (Exception e1) { Log.err(log, "SearchForGeocaches_LongDescription:" + cache.getGcCode(), e1); cache.setLongDescription(""); } if (!jCache.getBoolean("LongDescriptionIsHtml")) { cache.setLongDescription( cache.getLongDescription().replaceAll("(\r\n|\n\r|\r|\n)", "<br />")); } } cache.setName(jCache.getString("Name")); cache.setTourName(""); cache.setNoteChecksum(0); cache.NumTravelbugs = jCache.getInt("TrackableCount"); JSONObject jOwner = jCache.getJSONObject("Owner"); cache.setOwner(jOwner.getString("UserName")); cache.setPlacedBy(cache.getOwner()); try { cache.Pos = new CoordinateGPS(jCache.getDouble("Latitude"), jCache.getDouble("Longitude")); } catch (Exception e) { } cache.Rating = 0; if (!isLite) { try { cache.setShortDescription(jCache.getString("ShortDescription")); } catch (Exception e) { Log.err(log, "SearchForGeocaches_shortDescription:" + cache.getGcCode(), e); cache.setShortDescription(""); } if (!jCache.getBoolean("ShortDescriptionIsHtml")) { cache.setShortDescription( cache.getShortDescription().replaceAll("(\r\n|\n\r|\r|\n)", "<br />")); } } JSONObject jContainer = jCache.getJSONObject("ContainerType"); int jSize = jContainer.getInt("ContainerTypeId"); cache.Size = CacheSizes.parseInt(GroundspeakAPI.getCacheSize(jSize)); cache.setSolverChecksum(0); cache.setTerrain((float) jCache.getDouble("Terrain")); cache.Type = CacheTypes.Traditional; try { JSONObject jCacheType = jCache.getJSONObject("CacheType"); cache.Type = GroundspeakAPI.getCacheType(jCacheType.getInt("GeocacheTypeId")); } catch (Exception e) { if (gcCode.equals("GC4K089")) { cache.Type = CacheTypes.Giga; } else { cache.Type = CacheTypes.Undefined; } } cache.setUrl(jCache.getString("Url")); cache.setApiStatus(apiStatus); // Ein evtl. in der Datenbank vorhandenen "Favorit" nicht berschreiben Boolean fav = LoadBooleanValueFromDB( "select favorit from Caches where GcCode = \"" + gcCode + "\""); cache.setFavorite(fav); // Chk if Own or Found Boolean exclude = false; if (search.excludeFounds && cache.isFound()) exclude = true; if (search.excludeHides && cache.getOwner().equalsIgnoreCase(CB_Core_Settings.GcLogin.getValue())) exclude = true; if (search.available && (cache.isArchived() || !cache.isAvailable())) exclude = true; if (!CacheERROR && !exclude) { cacheList.add(cache); // insert Logs JSONArray logs = jCache.getJSONArray("GeocacheLogs"); for (int j = 0; j < logs.length(); j++) { JSONObject jLogs = (JSONObject) logs.get(j); JSONObject jFinder = (JSONObject) jLogs.get("Finder"); JSONObject jLogType = (JSONObject) jLogs.get("LogType"); LogEntry logEntry = new LogEntry(); logEntry.CacheId = cache.Id; logEntry.Comment = jLogs.getString("LogText"); logEntry.Finder = jFinder.getString("UserName"); logEntry.Id = jLogs.getInt("ID"); logEntry.Timestamp = new Date(); try { String dateCreated = jLogs.getString("VisitDate"); int date1 = dateCreated.indexOf("/Date("); int date2 = dateCreated.indexOf("-"); String date = (String) dateCreated.subSequence(date1 + 6, date2); logEntry.Timestamp = new Date(Long.valueOf(date)); } catch (Exception exc) { Log.err(log, "API", "SearchForGeocaches_ParseLogDate", exc); } logEntry.Type = LogTypes.GC2CB_LogType(jLogType.getInt("WptLogTypeId")); logList.add(logEntry); } // insert Images int imageListSizeOrg = imageList.size(); JSONArray images = jCache.getJSONArray("Images"); for (int j = 0; j < images.length(); j++) { JSONObject jImage = (JSONObject) images.get(j); ImageEntry image = new ImageEntry(); image.CacheId = cache.Id; image.GcCode = cache.getGcCode(); image.Name = jImage.getString("Name"); image.Description = jImage.getString("Description"); image.ImageUrl = jImage.getString("Url").replace("img.geocaching.com/gc/cache", "img.geocaching.com/cache"); // remove "/gc" to match the url used in the description image.IsCacheImage = true; imageList.add(image); } int imageListSizeGC = images.length(); // insert images from Cache description LinkedList<String> allImages = null; if (!search.isLite) allImages = DescriptionImageGrabber.GetAllImages(cache); int imageListSizeGrabbed = 0; if (allImages != null && allImages.size() > 0) { imageListSizeGrabbed = allImages.size(); } while (allImages != null && allImages.size() > 0) { String url; url = allImages.poll(); boolean found = false; for (ImageEntry im : imageList) { if (im.ImageUrl.equalsIgnoreCase(url)) { found = true; break; } } if (!found) { ImageEntry image = new ImageEntry(); image.CacheId = cache.Id; image.GcCode = cache.getGcCode(); image.Name = url.substring(url.lastIndexOf("/") + 1); image.Description = ""; image.ImageUrl = url; image.IsCacheImage = true; imageList.add(image); } } log.debug("Merged imageList has " + imageList.size() + " Entrys (" + imageListSizeOrg + "/" + imageListSizeGC + "/" + imageListSizeGrabbed + ")"); // insert Waypoints JSONArray waypoints = jCache.getJSONArray("AdditionalWaypoints"); for (int j = 0; j < waypoints.length(); j++) { JSONObject jWaypoints = (JSONObject) waypoints.get(j); Waypoint waypoint = new Waypoint(true); waypoint.CacheId = cache.Id; try { waypoint.Pos = new CoordinateGPS(jWaypoints.getDouble("Latitude"), jWaypoints.getDouble("Longitude")); } catch (Exception ex) { // no Coordinates -> Lat/Lon = 0/0 waypoint.Pos = new CoordinateGPS(0, 0); } waypoint.setTitle(jWaypoints.getString("Description")); waypoint.setDescription(jWaypoints.getString("Comment")); waypoint.Type = GroundspeakAPI.getCacheType(jWaypoints.getInt("WptTypeID")); waypoint.setGcCode(jWaypoints.getString("Code")); cache.waypoints.add(waypoint); } // User Waypoints - Corrected Coordinates of the Geocaching.com Website JSONArray userWaypoints = jCache.getJSONArray("UserWaypoints"); for (int j = 0; j < userWaypoints.length(); j++) { JSONObject jUserWaypoint = (JSONObject) userWaypoints.get(j); if (!jUserWaypoint.getString("Description").equals("Coordinate Override")) { continue; // only corrected Coordinate } Waypoint waypoint = new Waypoint(true); waypoint.CacheId = cache.Id; try { waypoint.Pos = new CoordinateGPS(jUserWaypoint.getDouble("Latitude"), jUserWaypoint.getDouble("Longitude")); } catch (Exception ex) { // no Coordinates -> Lat/Lon = 0/0 waypoint.Pos = new CoordinateGPS(0, 0); } waypoint.setTitle(jUserWaypoint.getString("Description")); waypoint.setDescription(jUserWaypoint.getString("Description")); waypoint.Type = CacheTypes.Final; waypoint.setGcCode("CO" + cache.getGcCode().substring(2, cache.getGcCode().length())); cache.waypoints.add(waypoint); } // Spoiler aktualisieren actualizeSpoilerOfActualCache(cache); } // Notes Object note = jCache.get("GeocacheNote"); if ((note != null) && (note instanceof String)) { String s = (String) note; System.out.println(s); cache.setTmpNote(s); } } GroundspeakAPI.checkCacheStatus(json, isLite); } else { result = "StatusCode = " + status.getInt("StatusCode") + "\n"; result += status.getString("StatusMessage") + "\n"; result += status.getString("ExceptionDetails"); } } catch (JSONException e) { Log.err(log, "SearchForGeocaches:ParserException: " + result, e); } catch (ClassCastException e) { Log.err(log, "SearchForGeocaches:ParserException: " + result, e); } return result; }
From source file:net.dv8tion.jda.MessageHistory.java
/** * Queues the next set of Messages and returns them * If the end of the chat was already reached, this function returns null * * @param amount the amount to Messages to queue * @return a list of the next [amount] Messages (max), or null if at end of chat *///from w ww.j a v a 2 s . co m public List<Message> retrieve(int amount) { if (atEnd) { return null; } int toQueue; LinkedList<Message> out = new LinkedList<>(); EntityBuilder builder = new EntityBuilder(api); while (amount > 0) { toQueue = Math.min(amount, 100); try { Requester.Response response = api.getRequester().get(Requester.DISCORD_API_PREFIX + "channels/" + channelId + "/messages?limit=" + toQueue + (lastId != null ? "&before=" + lastId : "")); if (!response.isOk()) throw new RuntimeException("Error fetching message-history for channel with id " + channelId + "... Error: " + response.toString()); JSONArray array = response.getArray(); for (int i = 0; i < array.length(); i++) { out.add(builder.createMessage(array.getJSONObject(i))); } if (array.length() < toQueue) { atEnd = true; break; } else { lastId = out.getLast().getId(); } } catch (Exception ex) { JDAImpl.LOG.log(ex); break; } amount -= toQueue; } if (out.size() == 0) { return null; } queued.addAll(out); return out; }
From source file:fr.univlorraine.mondossierweb.views.RechercheRapideView.java
private List<ResultatDeRecherche> quickSearch(String valueString) { List<ResultatDeRecherche> listeReponses = new LinkedList<ResultatDeRecherche>(); items.clear();//from w w w.j a va 2s . c o m String value = valueString; if (StringUtils.hasText(value) && value.length() > 2) { /////////////////////////////////////////////////////// //appel elasticSearch /////////////////////////////////////////////////////// //transformation de la chaine recherche en fonction des besoins String valueselasticSearch = value; //valueselasticSearch = valueselasticSearch+"*"; List<Map<String, Object>> lobjresult = ElasticSearchService.findObj(valueselasticSearch, Utils.NB_MAX_RESULT_QUICK_SEARCH * 5, true); //Liste des types autoriss LinkedList<String> listeTypeAutorise = new LinkedList(); if (casesAcocherComposantes.getValue()) { listeTypeAutorise.add(Utils.CMP); } if (casesAcocherVet.getValue()) { listeTypeAutorise.add(Utils.VET); } if (casesAcocherElp.getValue()) { listeTypeAutorise.add(Utils.ELP); } if (casesAcocherEtudiant.getValue()) { listeTypeAutorise.add(Utils.ETU); } /////////////////////////////////////////////////////// // recuperation des obj ElasticSearch /////////////////////////////////////////////////////// if (lobjresult != null && listeTypeAutorise.size() > 0) { for (Map<String, Object> obj : lobjresult) { if (listeReponses.size() < Utils.NB_MAX_RESULT_QUICK_SEARCH) { if (obj != null) { if (listeTypeAutorise.contains((String) obj.get(Utils.ES_TYPE))) { if (listeReponses.size() > 0) { boolean triOk = true; int rang = 0; //On evite des doublons while (triOk && rang < listeReponses.size()) { //En quickSearch on prend la description et non pas le libelle ResultatDeRecherche r = (ResultatDeRecherche) listeReponses.get(rang); if ((r.lib.toUpperCase()) .equals((new ResultatDeRecherche(obj)).lib.toUpperCase())) { triOk = false; } rang++; } if (triOk) { //En quickSearch on prend la description et non pas le libelle listeReponses.add(new ResultatDeRecherche(obj)); items.add(new ResultatDeRecherche(obj)); } } else { //En quickSearch on prend la description et non pas le libelle listeReponses.add(new ResultatDeRecherche(obj)); items.add(new ResultatDeRecherche(obj)); } } } } } } } //return listeReponses; //return new ArrayList<Object>(listeReponses); return listeReponses; }
From source file:fr.inria.soctrace.tools.importer.moca.core.MocaParser.java
/** * Recursively expand the hierarchy tree, following a top-down approach, by * creating new event producers by merging consecutive EP * * @param eventProdToMerge// w w w .j a va2 s . c o m * the list of event producer that are consecutive * @param currentHierarchyDepth * the current depth we are building in the hierarchy tree * @param ppid * the parent id of the created nodes * @param dividingFactor * the factor into which the result will be divided * @return the list of created event producers */ private List<EventProducer> createHierarchy(List<EventProducer> eventProdToMerge, double currentHierarchyDepth, int ppid, int dividingFactor) { LinkedList<EventProducer> newEventProd = new LinkedList<EventProducer>(); int groupSize; // If first hierarchy depth if (currentHierarchyDepth == 0.0) // Do not split, just create a super producer representing the whole // group groupSize = eventProdToMerge.size(); else // Compute the size of a new group groupSize = eventProdToMerge.size() / dividingFactor; if (groupSize <= 1) return eventProdToMerge; int mergedProducers = 0; int i; // Compute new group of EP for (i = 0; i < eventProdToMerge.size() - groupSize; i = i + groupSize) { EventProducer newNode = createProducer( eventProdToMerge.get(i).getName() + "_" + (int) currentHierarchyDepth, ppid); newEventProd.add(newNode); LinkedList<EventProducer> newSubGroup = new LinkedList<EventProducer>(); // Update the parent of leaves event prod for (int j = i; j < i + groupSize; j++) { eventProdToMerge.get(j).setParentId(newNode.getId()); newSubGroup.add(eventProdToMerge.get(j)); } // Keep merging? if (currentHierarchyDepth + 1 < maxHierarchyDepth && newSubGroup.size() >= dividingFactor && newSubGroup.size() > 1) { newEventProd.addAll( createHierarchy(newSubGroup, currentHierarchyDepth + 1, newNode.getId(), dividingFactor)); } else { newEventProd.addAll(newSubGroup); } mergedProducers = i + groupSize; } int remainingEP = eventProdToMerge.size() - mergedProducers; if (remainingEP == 1) { newEventProd.add(eventProdToMerge.get(eventProdToMerge.size() - 1)); } else // Check if some producer remains if (mergedProducers < eventProdToMerge.size()) { EventProducer newNode = createProducer( eventProdToMerge.get(i).getName() + "_" + (int) currentHierarchyDepth, ppid); newEventProd.add(newNode); LinkedList<EventProducer> newSubGroup = new LinkedList<EventProducer>(); for (i = mergedProducers; i < eventProdToMerge.size(); i++) { if (currentHierarchyDepth > 0.0 || newNode.getName().matches("^\\d+$") || newNode.getName() .compareTo(eventProdToMerge.get(i).getName() + "_" + (int) currentHierarchyDepth) != 0) { // Do not copy data structure at depth 0 eventProdToMerge.get(i).setParentId(newNode.getId()); newSubGroup.add(eventProdToMerge.get(i)); } } if (currentHierarchyDepth + 1 < maxHierarchyDepth && newSubGroup.size() >= dividingFactor && newSubGroup.size() > 1 && dividingFactor > 1) { newEventProd.addAll( createHierarchy(newSubGroup, currentHierarchyDepth + 1, newNode.getId(), dividingFactor)); } else { newEventProd.addAll(newSubGroup); } } logger.debug(currentHierarchyDepth + ", " + newEventProd.size()); return newEventProd; }
From source file:com.oltpbenchmark.benchmarks.seats.SEATSWorker.java
private boolean executeUpdateReservation(UpdateReservation proc) throws SQLException { LinkedList<Reservation> cache = CACHE_RESERVATIONS.get(CacheType.PENDING_UPDATES); assert (cache != null) : "Unexpected " + CacheType.PENDING_UPDATES; if (LOG.isTraceEnabled()) LOG.trace("Let's look for a Reservation that we can update"); // Pull off the first pending seat change and throw that ma at the server Reservation r = null;// w w w .j av a2s . co m try { r = cache.poll(); } catch (Throwable ex) { // Nothing } if (r == null) { if (LOG.isDebugEnabled()) LOG.warn(String.format("Failed to find Reservation to update [cache=%d]", cache.size())); return (false); } if (LOG.isTraceEnabled()) LOG.trace("Ok let's try to update " + r); long value = rng.number(1, 1 << 20); long attribute_idx = rng.nextInt(UpdateReservation.NUM_UPDATES); long seatnum = rng.number(0, SEATSConstants.FLIGHTS_NUM_SEATS - 1); if (LOG.isTraceEnabled()) LOG.trace("Calling " + proc); proc.run(conn, r.id, r.flight_id.encode(), r.customer_id.encode(), seatnum, attribute_idx, value); conn.commit(); SEATSWorker.this.requeueReservation(r); return (true); }
From source file:fi.ni.IFC_ClassModel.java
/** * Fill java class instance values.// w ww.j a va2 s . c o m * * @param name * the name * @param vo * the value object representing the IFC file line * @param level_up_vo * the IFC line pointing to this line * @param level * the iteration count in the recursive run */ private void fillJavaClassInstanceValues(String name, IFC_X3_VO vo, IFC_X3_VO level_up_vo, int level) { EntityVO evo = entities.get(ExpressReader.formatClassName(vo.name)); if (evo == null) System.err.println("Does not exist: " + vo.name); String subject = null; if (vo.getGid() != null) { subject = "gref_" + filter_extras(vo.getGid()); } else { subject = "iref_" + ifc_filename + "_i" + vo.line_num; } // Somebody has pointed here from above: if (vo != level_up_vo) { String level_up_subject; if (level_up_vo.getGid() != null) { level_up_subject = "gref_" + filter_extras(level_up_vo.getGid()); } else { level_up_subject = "iref_" + ifc_filename + "_i" + level_up_vo.line_num; } addLiteralValue(level_up_vo.getLine_num(), vo.getLine_num(), level_up_subject, name); } if (vo.is_touched()) return; int attribute_pointer = 0; for (int i = 0; i < vo.list.size(); i++) { Object o = vo.list.get(i); if (String.class.isInstance(o)) { if (!((String) o).equals("$")) { // Do not print out empty // values' if (types.get(ExpressReader.formatClassName((String) o)) == null) { if ((evo != null) && (evo.getDerived_attribute_list() != null) && (evo.getDerived_attribute_list().size() > attribute_pointer)) { addLiteralValue(vo.getLine_num(), subject, evo.getDerived_attribute_list().get(attribute_pointer).getName(), "\'" + filter_extras((String) o) + "'"); } attribute_pointer++; } } else attribute_pointer++; } else if (IFC_X3_VO.class.isInstance(o)) { if ((evo != null) && (evo.getDerived_attribute_list() != null) && (evo.getDerived_attribute_list().size() > attribute_pointer)) { fillJavaClassInstanceValues(evo.getDerived_attribute_list().get(attribute_pointer).getName(), (IFC_X3_VO) o, vo, level + 1); addIFCAttribute(vo, evo.getDerived_attribute_list().get(attribute_pointer), (IFC_X3_VO) o); } else { fillJavaClassInstanceValues("-", (IFC_X3_VO) o, vo, level + 1); System.out.println("1!" + evo); } attribute_pointer++; } else if (LinkedList.class.isInstance(o)) { @SuppressWarnings("unchecked") LinkedList<Object> tmp_list = (LinkedList<Object>) o; StringBuffer local_txt = new StringBuffer(); for (int j = 0; j < tmp_list.size(); j++) { Object o1 = tmp_list.get(j); if (String.class.isInstance(o1)) { if (j > 0) local_txt.append(", "); local_txt.append(filter_extras((String) o1)); } if (IFC_X3_VO.class.isInstance(o1)) { if ((evo != null) && (evo.getDerived_attribute_list() != null) && (evo.getDerived_attribute_list().size() > attribute_pointer)) { fillJavaClassInstanceValues( evo.getDerived_attribute_list().get(attribute_pointer).getName(), (IFC_X3_VO) o1, vo, level + 1); addIFCAttribute(vo, evo.getDerived_attribute_list().get(attribute_pointer), (IFC_X3_VO) o1); } else { fillJavaClassInstanceValues("-", (IFC_X3_VO) o1, vo, level + 1); System.out.println("2!" + evo); } } } if (local_txt.length() > 0) { if ((evo != null) && (evo.getDerived_attribute_list() != null) && (evo.getDerived_attribute_list().size() > attribute_pointer)) { addLiteralValue(vo.getLine_num(), subject, evo.getDerived_attribute_list().get(attribute_pointer).getName(), "'" + local_txt.toString() + "\'"); } } attribute_pointer++; } } }