List of usage examples for java.util LinkedList addLast
public void addLast(E e)
From source file:org.hyperic.hq.measurement.server.session.AvailabilityManagerImpl.java
private PageList<HighLowMetricValue> getPageList(List<AvailabilityDataRLE> availInfo, long begin, long end, long interval, boolean prependUnknowns) { PageList<HighLowMetricValue> rtn = new PageList<HighLowMetricValue>(); for (Iterator<AvailabilityDataRLE> it = availInfo.iterator(); it.hasNext();) { AvailabilityDataRLE rle = it.next(); long availStartime = rle.getStartime(); long availEndtime = rle.getEndtime(); //skip measurements that are before first time slot if (availEndtime < begin) { continue; }/*from w w w. j a v a 2 s. co m*/ LinkedList<AvailabilityDataRLE> queue = new LinkedList<AvailabilityDataRLE>(); queue.add(rle); int i = 0; for (long curr = begin; curr <= end; curr += interval) { long next = curr + interval; next = (next > end) ? end : next; long endtime = ((AvailabilityDataRLE) queue.getFirst()).getEndtime(); //while next time slot is after measurement time range while (next > endtime) { // it should not be the case that there are no more // avails in the array, but we need to handle it if (it.hasNext()) { AvailabilityDataRLE tmp = (AvailabilityDataRLE) it.next(); queue.addFirst(tmp); endtime = tmp.getEndtime(); } else { endtime = availEndtime; int measId = rle.getMeasurement().getId().intValue(); String msg = "Measurement, " + measId + ", for interval " + begin + " - " + end + " did not return a value for range " + curr + " - " + (curr + interval); log.warn(msg); } } endtime = availEndtime; while (curr > endtime) { queue.removeLast(); // this should not happen unless the above !it.hasNext() // else condition is true if (queue.size() == 0) { rle = new AvailabilityDataRLE(rle.getMeasurement(), rle.getEndtime(), next, AVAIL_UNKNOWN); queue.addLast(rle); } rle = (AvailabilityDataRLE) queue.getLast(); availStartime = rle.getStartime(); availEndtime = rle.getEndtime(); endtime = availEndtime; } HighLowMetricValue val; if (curr >= availStartime) { val = getMetricValue(queue, curr); } else if (prependUnknowns) { val = new HighLowMetricValue(AVAIL_UNKNOWN, curr); val.incrementCount(); } else { i++; continue; } if (rtn.size() <= i) { rtn.add(round(val)); } else { updateMetricValue(val, (HighLowMetricValue) rtn.get(i)); } i++; } } if (rtn.size() == 0) { rtn.addAll(getDefaultHistoricalAvail(end)); } return rtn; }
From source file:org.deeplearning4j.nn.multilayer.MultiLayerNetwork.java
/** Calculate gradients and errors. Used in two places: * (a) backprop (for standard multi layer network learning) * (b) backpropGradient (layer method, for when MultiLayerNetwork is used as a layer) * @param epsilon Errors (technically errors .* activations). Not used if withOutputLayer = true * @param withOutputLayer if true: assume last layer is output layer, and calculate errors based on labels. In this * case, the epsilon input is not used (may/should be null). * If false: calculate backprop gradients * @return Gradients and the error (epsilon) at the input *//* w w w . j a va 2s . c o m*/ protected Pair<Gradient, INDArray> calcBackpropGradients(INDArray epsilon, boolean withOutputLayer) { if (flattenedGradients == null) initGradientsView(); String multiGradientKey; Gradient gradient = new DefaultGradient(flattenedGradients); Layer currLayer; //calculate and apply the backward gradient for every layer /** * Skip the output layer for the indexing and just loop backwards updating the coefficients for each layer. * (when withOutputLayer == true) * * Activate applies the activation function for each layer and sets that as the input for the following layer. * * Typical literature contains most trivial case for the error calculation: wT * weights * This interpretation transpose a few things to get mini batch because ND4J is rows vs columns organization for params */ int numLayers = getnLayers(); //Store gradients is a list; used to ensure iteration order in DefaultGradient linked hash map. i.e., layer 0 first instead of output layer LinkedList<Triple<String, INDArray, Character>> gradientList = new LinkedList<>(); int layerFrom; Pair<Gradient, INDArray> currPair; if (withOutputLayer) { if (!(getOutputLayer() instanceof IOutputLayer)) { log.warn( "Warning: final layer isn't output layer. You cannot use backprop without an output layer."); return null; } IOutputLayer outputLayer = (IOutputLayer) getOutputLayer(); if (labels == null) throw new IllegalStateException("No labels found"); outputLayer.setLabels(labels); currPair = outputLayer.backpropGradient(null); for (Map.Entry<String, INDArray> entry : currPair.getFirst().gradientForVariable().entrySet()) { String origName = entry.getKey(); multiGradientKey = String.valueOf(numLayers - 1) + "_" + origName; gradientList.addLast(new Triple<>(multiGradientKey, entry.getValue(), currPair.getFirst().flatteningOrderForVariable(origName))); } if (getLayerWiseConfigurations().getInputPreProcess(numLayers - 1) != null) currPair = new Pair<>(currPair.getFirst(), this.layerWiseConfigurations .getInputPreProcess(numLayers - 1).backprop(currPair.getSecond(), getInputMiniBatchSize())); layerFrom = numLayers - 2; } else { currPair = new Pair<>(null, epsilon); layerFrom = numLayers - 1; } // Calculate gradients for previous layers & drops output layer in count for (int j = layerFrom; j >= 0; j--) { currLayer = getLayer(j); if (currLayer instanceof FrozenLayer) break; currPair = currLayer.backpropGradient(currPair.getSecond()); LinkedList<Triple<String, INDArray, Character>> tempList = new LinkedList<>(); for (Map.Entry<String, INDArray> entry : currPair.getFirst().gradientForVariable().entrySet()) { String origName = entry.getKey(); multiGradientKey = String.valueOf(j) + "_" + origName; tempList.addFirst(new Triple<>(multiGradientKey, entry.getValue(), currPair.getFirst().flatteningOrderForVariable(origName))); } for (Triple<String, INDArray, Character> triple : tempList) gradientList.addFirst(triple); //Pass epsilon through input processor before passing to next layer (if applicable) if (getLayerWiseConfigurations().getInputPreProcess(j) != null) currPair = new Pair<>(currPair.getFirst(), getLayerWiseConfigurations().getInputPreProcess(j) .backprop(currPair.getSecond(), getInputMiniBatchSize())); } //Add gradients to Gradients (map), in correct order for (Triple<String, INDArray, Character> triple : gradientList) { gradient.setGradientFor(triple.getFirst(), triple.getSecond(), triple.getThird()); } return new Pair<>(gradient, currPair.getSecond()); }
From source file:android.net.http.Connection.java
/** * Process requests in queue//from ww w . j a v a 2 s.co m * pipelines requests */ void processRequests(Request firstRequest) { Request req = null; boolean empty; int error = EventHandler.OK; Exception exception = null; LinkedList<Request> pipe = new LinkedList<Request>(); int minPipe = MIN_PIPE, maxPipe = MAX_PIPE; int state = SEND; while (state != DONE) { if (HttpLog.LOGV) HttpLog.v(states[state] + " pipe " + pipe.size()); /* If a request was cancelled, give other cancel requests some time to go through so we don't uselessly restart connections */ if (mActive == STATE_CANCEL_REQUESTED) { try { Thread.sleep(100); } catch (InterruptedException x) { /* ignore */ } mActive = STATE_NORMAL; } switch (state) { case SEND: { if (pipe.size() == maxPipe) { state = READ; break; } /* get a request */ if (firstRequest == null) { req = mRequestFeeder.getRequest(mHost); } else { req = firstRequest; firstRequest = null; } if (req == null) { state = DRAIN; break; } req.setConnection(this); /* Don't work on cancelled requests. */ if (req.mCancelled) { if (HttpLog.LOGV) HttpLog.v("processRequests(): skipping cancelled request " + req); req.complete(); break; } if (mHttpClientConnection == null || !mHttpClientConnection.isOpen()) { /* If this call fails, the address is bad or the net is down. Punt for now. FIXME: blow out entire queue here on connection failure if net up? */ if (!openHttpConnection(req)) { state = DONE; break; } } try { /* FIXME: don't increment failure count if old connection? There should not be a penalty for attempting to reuse an old connection */ req.sendRequest(mHttpClientConnection); } catch (HttpException e) { exception = e; error = EventHandler.ERROR; } catch (IOException e) { exception = e; error = EventHandler.ERROR_IO; } catch (IllegalStateException e) { exception = e; error = EventHandler.ERROR_IO; } if (exception != null) { if (httpFailure(req, error, exception) && !req.mCancelled) { /* retry request if not permanent failure or cancelled */ pipe.addLast(req); } exception = null; state = clearPipe(pipe) ? DONE : SEND; minPipe = maxPipe = 1; break; } pipe.addLast(req); if (!mCanPersist) state = READ; break; } case DRAIN: case READ: { empty = !mRequestFeeder.haveRequest(mHost); int pipeSize = pipe.size(); if (state != DRAIN && pipeSize < minPipe && !empty && mCanPersist) { state = SEND; break; } else if (pipeSize == 0) { /* Done if no other work to do */ state = empty ? DONE : SEND; break; } req = (Request) pipe.removeFirst(); if (HttpLog.LOGV) HttpLog.v("processRequests() reading " + req); try { req.readResponse(mHttpClientConnection); } catch (ParseException e) { exception = e; error = EventHandler.ERROR_IO; } catch (IOException e) { exception = e; error = EventHandler.ERROR_IO; } catch (IllegalStateException e) { exception = e; error = EventHandler.ERROR_IO; } if (exception != null) { if (httpFailure(req, error, exception) && !req.mCancelled) { /* retry request if not permanent failure or cancelled */ req.reset(); pipe.addFirst(req); } exception = null; mCanPersist = false; } if (!mCanPersist) { if (HttpLog.LOGV) HttpLog.v("processRequests(): no persist, closing " + mHost); closeConnection(); mHttpContext.removeAttribute(HTTP_CONNECTION); clearPipe(pipe); minPipe = maxPipe = 1; state = SEND; } break; } } } }
From source file:org.mycard.net.network.Connection.java
/*** * Process requests in queue/*from w ww. ja va2 s . com*/ * pipelines requests */ void processRequests(Request firstRequest) { Request req = null; boolean empty; int error = EventHandler.OK; Exception exception = null; LinkedList<Request> pipe = new LinkedList<Request>(); int minPipe = MIN_PIPE, maxPipe = MAX_PIPE; int state = SEND; while (state != DONE) { /** If a request was cancelled, give other cancel requests some time to go through so we don't uselessly restart connections */ if (mActive == STATE_CANCEL_REQUESTED) { try { Thread.sleep(100); } catch (InterruptedException x) { /** ignore */ } mActive = STATE_NORMAL; } switch (state) { case SEND: { if (pipe.size() == maxPipe) { state = READ; break; } /** get a request */ if (firstRequest == null) { req = mRequestFeeder.getRequest(mHost); } else { req = firstRequest; firstRequest = null; } if (req == null) { state = DRAIN; break; } req.setConnection(this); /** Don't work on cancelled requests. */ if (req.mCancelled) { req.complete(); break; } if (mHttpClientConnection == null || !mHttpClientConnection.isOpen()) { /** If this call fails, the address is bad or the net is down. Punt for now. FIXME: blow out entire queue here on connection failure if net up? */ if (!openHttpConnection(req)) { state = DONE; break; } } /** we have a connection, let the event handler * know of any associated certificate, * potentially none. */ //req.mEventHandler.certificate(mCertificate); try { /** FIXME: don't increment failure count if old connection? There should not be a penalty for attempting to reuse an old connection */ req.sendRequest(mHttpClientConnection); } catch (HttpException e) { exception = e; error = EventHandler.ERROR; } catch (IOException e) { exception = e; error = EventHandler.ERROR_IO; } catch (IllegalStateException e) { exception = e; error = EventHandler.ERROR_IO; } if (exception != null) { if (httpFailure(req, error, exception) && !req.mCancelled) { /** retry request if not permanent failure or cancelled */ pipe.addLast(req); } exception = null; state = clearPipe(pipe) ? DONE : SEND; minPipe = maxPipe = 1; break; } pipe.addLast(req); if (!mCanPersist) state = READ; break; } case DRAIN: case READ: { empty = !mRequestFeeder.haveRequest(mHost); int pipeSize = pipe.size(); if (state != DRAIN && pipeSize < minPipe && !empty && mCanPersist) { state = SEND; break; } else if (pipeSize == 0) { /** Done if no other work to do */ state = empty ? DONE : SEND; break; } req = (Request) pipe.removeFirst(); try { req.readResponse(mHttpClientConnection); } catch (ParseException e) { exception = e; error = EventHandler.ERROR_IO; } catch (IOException e) { exception = e; error = EventHandler.ERROR_IO; } catch (IllegalStateException e) { exception = e; error = EventHandler.ERROR_IO; } if (exception != null) { if (httpFailure(req, error, exception) && !req.mCancelled) { /** retry request if not permanent failure or cancelled */ req.reset(); pipe.addFirst(req); } exception = null; mCanPersist = false; } if (!mCanPersist) { closeConnection(); mHttpContext.removeAttribute(HTTP_CONNECTION); clearPipe(pipe); minPipe = maxPipe = 1; state = SEND; } break; } } } }
From source file:android.net.http.Connection.java
/** * Process requests in queue/*w w w .j a v a2 s . c o m*/ * pipelines requests */ void processRequests(Request firstRequest) { Request req = null; boolean empty; int error = EventHandler.OK; Exception exception = null; LinkedList<Request> pipe = new LinkedList<Request>(); int minPipe = MIN_PIPE, maxPipe = MAX_PIPE; int state = SEND; while (state != DONE) { if (HttpLog.LOGV) HttpLog.v(states[state] + " pipe " + pipe.size()); /* If a request was cancelled, give other cancel requests some time to go through so we don't uselessly restart connections */ if (mActive == STATE_CANCEL_REQUESTED) { try { Thread.sleep(100); } catch (InterruptedException x) { /* ignore */ } mActive = STATE_NORMAL; } switch (state) { case SEND: { if (pipe.size() == maxPipe) { state = READ; break; } /* get a request */ if (firstRequest == null) { req = mRequestFeeder.getRequest(mHost); } else { req = firstRequest; firstRequest = null; } if (req == null) { state = DRAIN; break; } req.setConnection(this); /* Don't work on cancelled requests. */ if (req.mCancelled) { if (HttpLog.LOGV) HttpLog.v("processRequests(): skipping cancelled request " + req); req.complete(); break; } if (mHttpClientConnection == null || !mHttpClientConnection.isOpen()) { /* If this call fails, the address is bad or the net is down. Punt for now. FIXME: blow out entire queue here on connection failure if net up? */ if (!openHttpConnection(req)) { state = DONE; break; } } /* we have a connection, let the event handler * know of any associated certificate, * potentially none. */ req.mEventHandler.certificate(mCertificate); try { /* FIXME: don't increment failure count if old connection? There should not be a penalty for attempting to reuse an old connection */ req.sendRequest(mHttpClientConnection); } catch (HttpException e) { exception = e; error = EventHandler.ERROR; } catch (IOException e) { exception = e; error = EventHandler.ERROR_IO; } catch (IllegalStateException e) { exception = e; error = EventHandler.ERROR_IO; } if (exception != null) { if (httpFailure(req, error, exception) && !req.mCancelled) { /* retry request if not permanent failure or cancelled */ pipe.addLast(req); } exception = null; state = clearPipe(pipe) ? DONE : SEND; minPipe = maxPipe = 1; break; } pipe.addLast(req); if (!mCanPersist) state = READ; break; } case DRAIN: case READ: { empty = !mRequestFeeder.haveRequest(mHost); int pipeSize = pipe.size(); if (state != DRAIN && pipeSize < minPipe && !empty && mCanPersist) { state = SEND; break; } else if (pipeSize == 0) { /* Done if no other work to do */ state = empty ? DONE : SEND; break; } req = (Request) pipe.removeFirst(); if (HttpLog.LOGV) HttpLog.v("processRequests() reading " + req); try { req.readResponse(mHttpClientConnection); } catch (ParseException e) { exception = e; error = EventHandler.ERROR_IO; } catch (IOException e) { exception = e; error = EventHandler.ERROR_IO; } catch (IllegalStateException e) { exception = e; error = EventHandler.ERROR_IO; } if (exception != null) { if (httpFailure(req, error, exception) && !req.mCancelled) { /* retry request if not permanent failure or cancelled */ req.reset(); pipe.addFirst(req); } exception = null; mCanPersist = false; } if (!mCanPersist) { if (HttpLog.LOGV) HttpLog.v("processRequests(): no persist, closing " + mHost); closeConnection(); mHttpContext.removeAttribute(HTTP_CONNECTION); clearPipe(pipe); minPipe = maxPipe = 1; state = SEND; } break; } } } }
From source file:org.seasr.meandre.components.analytics.socialnetworking.AbstractLinkCreationComponent.java
@Override public void executeCallBack(ComponentContext cc) throws Exception { Strings inMetaTuple = (Strings) cc.getDataComponentFromInput(IN_META_TUPLE); SimpleTuplePeer tuplePeer = new SimpleTuplePeer(inMetaTuple); console.fine("Input meta tuple: " + tuplePeer.toString()); StringsArray inTuples = (StringsArray) cc.getDataComponentFromInput(IN_TUPLES); Strings[] tuples = BasicDataTypesTools.stringsArrayToJavaArray(inTuples); int SENTENCE_ID_IDX = tuplePeer.getIndexForFieldName(OpenNLPNamedEntity.SENTENCE_ID_FIELD); int TYPE_IDX = tuplePeer.getIndexForFieldName(OpenNLPNamedEntity.TYPE_FIELD); int TEXT_IDX = tuplePeer.getIndexForFieldName(OpenNLPNamedEntity.TEXT_FIELD); // Linked list of sentences keyed by sentence id - the HashSet is the set of entities in that sentence LinkedList<KeyValuePair<Integer, HashSet<Entity>>> _sentencesWindow = new LinkedList<KeyValuePair<Integer, HashSet<Entity>>>(); // Note: The algorithm used to mark entities as adjacent if they fall within the specified sentence distance // relies on a sliding-window of sentences that are within the 'adjacency' range. As new sentences are // considered, the window moves to the right and old sentences that are now too far fall out of scope. SimpleTuple tuple = tuplePeer.createTuple(); for (Strings t : tuples) { tuple.setValues(t);/*from w ww .j a va 2 s .c o m*/ Integer sentenceId = Integer.parseInt(tuple.getValue(SENTENCE_ID_IDX)); String tupleType = tuple.getValue(TYPE_IDX); String tupleValue = tuple.getValue(TEXT_IDX); // If the entity is of the type we're interested in if (_entityTypes.contains(tupleType)) { if (_normalizeEntities) { // Normalize whitespaces StringBuilder sb = new StringBuilder(); Matcher nonWhitespaceMatcher = REGEXP_NONWHITESPACE.matcher(tupleValue); while (nonWhitespaceMatcher.find()) sb.append(" ").append(nonWhitespaceMatcher.group(1)); if (sb.length() > 0) tupleValue = sb.substring(1); else continue; // Normalize people's names if (tupleType.toLowerCase().equals("person")) { sb = new StringBuilder(); Matcher personMatcher = REGEXP_PERSON.matcher(tupleValue); while (personMatcher.find()) sb.append(" ").append(personMatcher.group(1)); if (sb.length() > 0) tupleValue = sb.substring(1); else continue; // ignore names with 1 character if (tupleValue.length() == 1) continue; } tupleValue = WordUtils.capitalizeFully(tupleValue); } // ... create an object for it Entity entity = new Entity(tupleType, tupleValue); // Check if we already recorded this entity before Entity oldEntity = _entities.get(entity); if (oldEntity == null) // If not, record it _entities.put(entity, entity); else // Otherwise retrieve the entity we used before entity = oldEntity; HashSet<Entity> sentenceEntities; // Remove all sentences (together with any entities they contained) from the set // of sentences that are too far from the current sentence of this entity while (_sentencesWindow.size() > 0 && sentenceId - _sentencesWindow.peek().getKey() > _offset) _sentencesWindow.remove(); if (_sentencesWindow.size() > 0) { // If this sentence is different from the last sentence in the window if (_sentencesWindow.getLast().getKey() != sentenceId) { // Create an entry for it and add it at the end of the window sentenceEntities = new HashSet<Entity>(); _sentencesWindow .addLast(new KeyValuePair<Integer, HashSet<Entity>>(sentenceId, sentenceEntities)); } else sentenceEntities = _sentencesWindow.getLast().getValue(); } else { // If there are no sentences in the window, create an entry for this sentence and add it sentenceEntities = new HashSet<Entity>(); _sentencesWindow .addLast(new KeyValuePair<Integer, HashSet<Entity>>(sentenceId, sentenceEntities)); } // Iterate through all the sentences in the window for (KeyValuePair<Integer, HashSet<Entity>> kvp : _sentencesWindow) // ... and all the entities in each sentence for (Entity e : kvp.getValue()) { // ignore self-references if (e.equals(entity)) continue; // ... and mark the new entity as being adjacent to all the entities in the window e.addOutwardLink(entity); entity.addInwardLink(e); } // Add the new entity to the window sentenceEntities.add(entity); } } if (!_isStreaming) generateAndPushOutputInternal(); }
From source file:org.gcaldaemon.core.ldap.ContactLoader.java
private final void loadContacts() throws Exception { // Loading contact list log.debug("Loading Gmail contact list..."); GmailPool pool = configurator.getGmailPool(); LinkedList contactList = new LinkedList(); HashSet processedEntries = new HashSet(); String rev = new DateTime().toString(); HashSet cardFiles = new HashSet(); GmailEntry entry = null;//from w w w. ja va 2 s . c o m GmailContact contact; String key, csv; boolean found; char[] chars; int i, m, n; char c; // Loop on accounts QuickWriter buffer = new QuickWriter(); for (n = 0; n < usernames.length; n++) { try { // Download CSV from Gmail entry = pool.borrow(usernames[n], passwords[n]); csv = entry.downloadCSV(); if (csv == null) { continue; } // Remove header chars = csv.toCharArray(); found = false; i = -1; for (m = 0; m < chars.length; m++) { c = chars[m]; if (c == '\r' || c == '\n') { found = true; continue; } if (found) { i = m; break; } } if (i != -1) { buffer.write(chars, i, chars.length - i); } } finally { pool.recycle(entry); } if (n < usernames.length - 1) { Thread.sleep(1000); } } // Parse CSV to GmailContact array csv = buffer.toString(); GmailContact[] contactArray = parseCSV(csv); // Save 'contacts.csv' in UTF8 into the 'work/vcard' dir File file = new File(vcardDirectory, "contacts.csv"); byte[] bytes = StringUtils.encodeString(csv, StringUtils.UTF_8); saveFile(file, bytes); // Process contacts if (contactArray == null) { contactArray = new GmailContact[0]; } for (i = 0; i < contactArray.length; i++) { // Verify email address and name field contact = contactArray[i]; if (contact.email.length() == 0) { // Use the secondary email address contact.email = contact.mail; } if (contact.name.length() == 0) { // Create name from the email address contact.name = contact.email; m = contact.name.indexOf('@'); if (m != -1) { contact.name = contact.name.substring(0, m); } } // Fix MS Address Book bug if (contact.email.indexOf('@') == -1) { continue; } key = contact.email + '\t' + contact.name; if (processedEntries.contains(key)) { continue; } processedEntries.add(key); if (contact.email.length() != 0) { // Save vcard with name and email address contactList.addLast(contact); } else { if (contact.name.length() != 0) { // Save vcard without email address cardFiles.add(saveVCard(contact, rev)); } } } GmailContact[] array = new GmailContact[contactList.size()]; contactList.toArray(array); // Save contacts withall email addresses for (i = 0; i < array.length; i++) { cardFiles.add(saveVCard(array[i], rev)); } // Save contact in other formats (eg. HTML) saveContacts(vcardDirectory, array, buffer); // Remove deleted contacts String[] currentFiles = vcardDirectory.list(); String fileName; for (i = 0; i < currentFiles.length; i++) { fileName = currentFiles[i]; if (fileName.endsWith(VCARD_EXTENSION) && !cardFiles.contains(fileName)) { (new File(vcardDirectory, fileName)).delete(); } } // Contact list loaded synchronized (this) { contacts = array; } log.debug(array.length + " contacts loaded successfully."); }
From source file:org.slc.sli.dashboard.manager.impl.PopulationManagerImpl.java
/** * Retrieves attendance in a sorted order, removes all events where the * student is present. Returns a GenericEntity with startDate, endDate, and * attendanceList.//from w w w . j a v a 2s .c o m */ @Override public GenericEntity getStudentAttendanceForCalendar(String token, Object studentId, Data config) { String schoolId = null; GenericEntity ge = new GenericEntity(); List<GenericEntity> enrollments = getApiClient().getEnrollmentForStudent(token, (String) studentId); if (enrollments == null || enrollments.size() < 1) { return ge; } GenericEntity firstEnrollment = enrollments.get(0); Map<String, Object> school = (Map<String, Object>) firstEnrollment.get(Constants.ATTR_SCHOOL); if (school != null) { schoolId = (String) school.get(Constants.ATTR_ID); } else { return ge; } List<String> currentYearDates = null; try { // get begin/end dates for the current school year currentYearDates = getCurrentYearDates(token, schoolId); } catch (ParseException e) { LOG.error(e.getMessage(), e); } LinkedList<Map> absentList = new LinkedList<Map>(); ge.put(Constants.ATTR_ATTENDANCE_LIST, absentList); ge.put(Constants.ATTR_START_DATE, currentYearDates.get(0)); ge.put(Constants.ATTR_END_DATE, currentYearDates.get(1)); List<GenericEntity> attendanceList = getStudentAttendanceForSchool(token, (String) studentId, schoolId, null, null); if (attendanceList == null || attendanceList.size() < 1) { return ge; } GenericEntity firstWrapper = attendanceList.get(0); List<Map<String, Object>> schoolYearAttendance = (List<Map<String, Object>>) firstWrapper .get(Constants.ATTR_ATTENDANCE_SCHOOLYEAR_ATTENDANCE); if (schoolYearAttendance == null || schoolYearAttendance.size() < 1) { return ge; } // Comparator, sort by "schoolYear" descending order Comparator<Map<String, Object>> schoolYearAttendanceComparator = new Comparator<Map<String, Object>>() { @Override public int compare(Map<String, Object> arg0, Map<String, Object> arg1) { Object schoolYearObj0 = arg0.get(Constants.ATTR_SCHOOL_YEAR); Object schoolYearObj1 = arg1.get(Constants.ATTR_SCHOOL_YEAR); if (schoolYearObj0 == null || schoolYearObj1 == null) { return 0; } String schoolYear0 = schoolYearObj0.toString(); String schoolYear1 = schoolYearObj1.toString(); return schoolYear1.compareTo(schoolYear0); } }; Collections.sort(schoolYearAttendance, schoolYearAttendanceComparator); Map<String, Object> secondWrapper = schoolYearAttendance.get(0); List<Map> attList = (List<Map>) secondWrapper.get(Constants.ATTR_ATTENDANCE_ATTENDANCE_EVENT); if (attList == null) { return ge; } // filter out 'In Attendance' events, remove whitespace // LinkedList<Map> absentList = new LinkedList<Map>(); for (Map attEvent : attList) { String event = (String) attEvent.get(Constants.ATTR_ATTENDANCE_EVENT_CATEGORY); if (!event.equals(Constants.ATTR_ATTENDANCE_IN_ATTENDANCE)) { String strippedWhiteSpaceEvent = ((String) attEvent.get(Constants.ATTR_ATTENDANCE_EVENT_CATEGORY)) .replace(" ", ""); attEvent.put(Constants.ATTR_ATTENDANCE_EVENT_CATEGORY, strippedWhiteSpaceEvent); absentList.addLast(attEvent); } } return ge; }
From source file:de.tudarmstadt.ukp.wikipedia.parser.mediawiki.ModularParser.java
private Table buildTable(SpanManager sm, ContentElementParsingParameters cepp, LinkedList<Span> lineSpans) { Table result = new Table(); int col = -1; int row = 0;//from w w w .j a v a 2 s .c o m int subTables = 0; LinkedList<Span> tableDataSpans = new LinkedList<Span>(); sm.manageList(tableDataSpans); if (calculateSrcSpans) { result.setSrcSpan(new SrcSpan(sm.getSrcPos(lineSpans.getFirst().getStart()), -1)); } lineSpans.removeFirst(); while (!lineSpans.isEmpty()) { Span s = lineSpans.removeFirst(); int pos = s.nonWSCharPos(sm); char c0 = s.charAt(pos, sm); char c1 = s.charAt(pos + 1, sm); if (subTables == 0 && (c0 == '!' || c0 == '|')) { if (!tableDataSpans.isEmpty()) { lineSpans.addFirst(s); SrcSpan ei = null; if (calculateSrcSpans) { ei = new SrcSpan(sm.getSrcPos(tableDataSpans.getFirst().getStart() - 1) + 1, -1); } TableElement te = new TableElement(parseSections(sm, cepp, tableDataSpans), row, col); te.setSrcSpan(ei); result.addTableElement(te); lineSpans.removeFirst(); } col++; if (c1 == '-') { row++; col = -1; continue; } else if (c0 == '|' && c1 == '}') { sm.removeManagedList(tableDataSpans); if (calculateSrcSpans) { result.getSrcSpan().setEnd(sm.getSrcPos(s.getEnd())); } return result; } else if (c0 == '|' && c1 == '+') { result.setTitleElement( parseContentElement(sm, cepp, new Span(s.getStart() + pos + 2, s.getEnd()).trim(sm))); continue; } else { int multipleCols; if ((multipleCols = sm.indexOf("||", s.getStart() + pos + 1, s.getEnd())) != -1) { lineSpans.addFirst(new Span(multipleCols + 1, s.getEnd())); s.setEnd(multipleCols); } int optionTagPos = sm.indexOf("|", s.getStart() + pos + 1, s.getEnd()); if (optionTagPos != -1) { s.setStart(optionTagPos + 1).trim(sm); } else { s.adjustStart(pos + 1).trim(sm); } } } else if (c0 == '|' && c1 == '}') { subTables--; } else if (c0 == '{' && c1 == '|') { subTables++; } tableDataSpans.addLast(s); } if (tableDataSpans.size() != 0) { SrcSpan ei = null; if (calculateSrcSpans) { ei = new SrcSpan(sm.getSrcPos(tableDataSpans.getFirst().getStart() - 1) + 1, -1); } TableElement te = new TableElement(parseSections(sm, cepp, tableDataSpans), row, col); te.setSrcSpan(ei); result.addTableElement(te); } sm.removeManagedList(tableDataSpans); if (calculateSrcSpans) { result.getSrcSpan().setEnd(-1); } return result; }