List of usage examples for java.util Collections binarySearch
@SuppressWarnings("unchecked") public static <T> int binarySearch(List<? extends T> list, T key, Comparator<? super T> c)
From source file:io.github.tavernaextras.biocatalogue.ui.filtertree.FilterTreePane.java
/** * This method loads filter data from API and populates the view. *///from w w w . j a v a 2 s .co m private void loadFiltersAndBuildTheTree() { SwingUtilities.invokeLater(new Runnable() { public void run() { resetTreeActionToolbar(); jpFilters.removeAll(); jpFilters.setLayout(new BorderLayout()); jpFilters.add(new JLabel(" Loading filters..."), BorderLayout.NORTH); jpFilters.add(new JLabel(ResourceManager.getImageIcon(ResourceManager.BAR_LOADER_ORANGE)), BorderLayout.CENTER); thisPanel.validate(); thisPanel.repaint(); // validate and repaint this component to make sure that // scroll bar around the filter tree placeholder panel disappears } }); new Thread("Load filters") { public void run() { try { // load filter data filtersRoot = client.getBioCatalogueFilters(filtersURL); // Create root of the filter tree component FilterTreeNode root = new FilterTreeNode("root"); // populate the tree via its root element for (FilterGroup fgroup : filtersRoot.getGroupList()) { // attach filter group directly to the root node FilterTreeNode fgroupNode = new FilterTreeNode( "<html><span style=\"color: black; font-weight: bold;\">" + StringEscapeUtils.escapeHtml(fgroup.getName().toString()) + "</span></html>"); root.add(fgroupNode); // go through all filter types in this group and add them to the tree for (FilterType ftype : fgroup.getTypeList()) { // if there's more than one filter type in the group, add the type node as another level of nesting // (otherwise, attach filters inside the single type directly to the group node) FilterTreeNode filterTypeNode = fgroupNode; if (fgroup.getTypeList().size() > 1) { filterTypeNode = new FilterTreeNode( "<html><span style=\"color: black; font-weight: bold;\">" + StringEscapeUtils.escapeHtml(ftype.getName().toString()) + "</span></html>"); fgroupNode.add(filterTypeNode); } // For some reason sorting the list of filters before inserting into tree // messes up the tree nodes // Collections.sort(ftype.getFilterList(), new Comparator<Filter>(){ // @Override // public int compare(Filter f1, Filter f2) { // return (f1.getName().compareToIgnoreCase(f2.getName())); // } // }); addFilterChildren(filterTypeNode, ftype.getUrlKey().toString(), ftype.getFilterList()); } } // Create the tree view with the populated root filterTree = new JFilterTree(root); filterTree.setRootVisible(false); // don't want the root to be visible; not a standard thing, so not implemented within JTriStateTree filterTree.setLargeModel(true); // potentially can have many filters! filterTree.addCheckingListener(thisPanel); // insert the created tree view into the filters panel jpFilters.removeAll(); jpFilters.setLayout(new GridLayout(0, 1)); jpFilters.add(filterTree); jpFilters.validate(); // add actions from the contextual menu of the filter tree into the toolbar // that replicates those plus adds additional ones in this panel tbFilterTreeToolbar.removeAll(); for (Action a : filterTree.getContextualMenuActions()) { tbFilterTreeToolbar.add(a); } // enable all actions filterTree.enableAllContextualMenuAction(true); } catch (Exception e) { logger.error("Failed to load filter tree from the following URL: " + filtersURL, e); } } /** * Recursive method to populate a node of the filter tree with all * sub-filters. * * Ontological terms will be underlined. * * @param root Tree node to add children to. * @param filterList A list of Filters to add to "root" as children. */ private void addFilterChildren(FilterTreeNode root, String filterCategory, List<Filter> filterList) { for (Filter f : filterList) { // Is this an ontological term? String ontology = null; if (FilterTreeNode.isTagWithNamespaceNode(filterCategory, f.getUrlValue())) { String nameAndNamespace = f.getUrlValue().substring(1, f.getUrlValue().length() - 1); String[] namePlusNamespace = nameAndNamespace.split("#"); ontology = JFilterTree.getOntologyFromNamespace(namePlusNamespace[0]); } FilterTreeNode fNode = new FilterTreeNode("<html><span color=\"black\"" /*(FilterTreeNode.isTagWithNamespaceNode(filterCategory, f.getUrlValue()) ? " style=\"text-decoration: underline;\"" : "") */ + ">" + StringEscapeUtils.escapeHtml(f.getName()) + " (" + f.getCount() + ")" + "</span>" + /*(FilterTreeNode.isTagWithNamespaceNode(filterCategory, f.getUrlValue()) ? "<span color=\"gray\"> ("+f.getCount().intValue()+")</span></html>" : "</html>"),*/ (ontology != null ? "<span color=\"#3090C7\"> <" + ontology + "></span></html>" : "</html>"), filterCategory, f.getUrlValue()); addFilterChildren(fNode, filterCategory, f.getFilterList()); // Insert the node into the (alphabetically) sorted children nodes List<FilterTreeNode> children = Collections.list(root.children()); // Search for the index the new node should be inserted at int index = Collections.binarySearch(children, fNode, new Comparator<FilterTreeNode>() { @Override public int compare(FilterTreeNode o1, FilterTreeNode o2) { String str1 = ((String) o1.getUserObject()).toString(); String str2 = ((String) o2.getUserObject()).toString(); return (str1.compareToIgnoreCase(str2)); } }); if (index < 0) { // not found - index will be equal to -insertion-point -1 index = -index - 1; } // else node with the same name found in the array - insert it at that position root.insert(fNode, index); //root.add(fNode); } } }.start(); }
From source file:ch.ethz.dcg.jukefox.model.player.playlog.PlayLog.java
License:asdf
public List<PlaylistSong<BaseArtist, BaseAlbum>> getSongByDateTag(long fromTimestamp, long toTimestamp) throws DataUnavailableException { ArrayList<DateTag> sortedDateTags = tagProvider.getSortedDateTags(); long avgTime = (fromTimestamp + toTimestamp) / 2; DateTag bestTag = null;//from w ww . j av a2 s .c om for (DateTag t : sortedDateTags) { if (toTimestamp < t.getFrom()) { continue; } if (fromTimestamp > t.getTo()) { continue; } // we have at least some overlap... if (bestTag == null) { bestTag = t; continue; } long bestDiff = Math.abs(bestTag.getTime() - avgTime); long curDiff = Math.abs(t.getTime() - avgTime); if (curDiff < bestDiff) { bestTag = t; continue; } if (curDiff == bestDiff && t.getRange() < bestTag.getRange()) { bestTag = t; } } if (bestTag != null) { return returnSongsForDateTag(bestTag); } // no tag overlaps with our time range... just take the one with best // mean-time fit... DateTag relevantDate = new DateTag(); relevantDate.setFrom(fromTimestamp); relevantDate.setTo(toTimestamp); int idx = Collections.binarySearch(sortedDateTags, relevantDate, new Comparator<DateTag>() { @Override public int compare(DateTag t1, DateTag t2) { if (t1.getTime() < t2.getTime()) { return -1; } if (t1.getTime() > t2.getTime()) { return 1; } return 0; } }); if (idx > 0) { DateTag tag = sortedDateTags.get(idx); return returnSongsForDateTag(tag); } idx = Math.min(-idx - 1, sortedDateTags.size() - 1); DateTag tag = sortedDateTags.get(idx); return returnSongsForDateTag(tag); }
From source file:net.sourceforge.pmd.Report.java
/** * Merges the given report into this report. This might be necessary, if a * summary over all violations is needed as PMD creates one report per file * by default.// w w w . j a v a 2s. c o m * * @param r * the report to be merged into this. * @see AbstractAccumulatingRenderer */ public void merge(Report r) { Iterator<ProcessingError> i = r.errors(); while (i.hasNext()) { addError(i.next()); } Iterator<ConfigurationError> ce = r.configErrors(); while (ce.hasNext()) { addConfigError(ce.next()); } Iterator<Metric> m = r.metrics(); while (m.hasNext()) { addMetric(m.next()); } Iterator<RuleViolation> v = r.iterator(); while (v.hasNext()) { RuleViolation violation = v.next(); int index = Collections.binarySearch(violations, violation, RuleViolationComparator.INSTANCE); violations.add(index < 0 ? -index - 1 : index, violation); violationTree.addRuleViolation(violation); } Iterator<SuppressedViolation> s = r.getSuppressedRuleViolations().iterator(); while (s.hasNext()) { suppressedRuleViolations.add(s.next()); } }
From source file:org.dspace.browse.SolrBrowseDAO.java
@Override public int doDistinctOffsetQuery(String column, String value, boolean isAscending) throws BrowseException { DiscoverResult resp = getSolrResponse(); List<FacetResult> facets = resp.getFacetResult(facetField); Comparator comparator = new SolrBrowseDAO.FacetValueComparator(); Collections.sort(facets, comparator); int x = Collections.binarySearch(facets, value, comparator); int ascValue = (x >= 0) ? x : -(x + 1); if (isAscending) { return ascValue; } else {/* www .j av a2 s . co m*/ return doCountQuery() - ascValue; } }
From source file:org.pentaho.di.trans.steps.sort.SortRows.java
Object[] getBuffer() throws KettleValueException { Object[] retval;// w ww . java 2 s . c o m // Open all files at once and read one row from each file... if (data.files.size() > 0 && (data.dis.size() == 0 || data.fis.size() == 0)) { if (log.isBasic()) { logBasic(BaseMessages.getString(PKG, "SortRows.Basic.OpeningTempFiles", data.files.size())); } try { for (int f = 0; f < data.files.size() && !isStopped(); f++) { FileObject fileObject = data.files.get(f); String filename = KettleVFS.getFilename(fileObject); if (log.isDetailed()) { logDetailed(BaseMessages.getString(PKG, "SortRows.Detailed.OpeningTempFile", filename)); } InputStream fi = KettleVFS.getInputStream(fileObject); DataInputStream di; data.fis.add(fi); if (data.compressFiles) { di = getDataInputStream(new GZIPInputStream(new BufferedInputStream(fi))); } else { di = new DataInputStream(new BufferedInputStream(fi, 50000)); } data.dis.add(di); // How long is the buffer? int buffersize = data.bufferSizes.get(f); if (log.isDetailed()) { logDetailed(BaseMessages.getString(PKG, "SortRows.Detailed.FromFileExpectingRows", filename, buffersize)); } if (buffersize > 0) { Object[] row = data.outputRowMeta.readData(di); data.rowbuffer.add(row); // new row from input stream data.tempRows.add(new RowTempFile(row, f)); } } // Sort the data row buffer Collections.sort(data.tempRows, data.comparator); } catch (Exception e) { logError(BaseMessages.getString(PKG, "SortRows.Error.ErrorReadingBackTempFiles"), e); } } if (data.files.size() == 0) { // read from in-memory processing if (data.getBufferIndex < data.buffer.size()) { retval = data.buffer.get(data.getBufferIndex); data.getBufferIndex++; } else { retval = null; } } else { // read from disk processing if (data.rowbuffer.size() == 0) { retval = null; } else { // We now have "filenr" rows waiting: which one is the smallest? // if (log.isRowLevel()) { for (int i = 0; i < data.rowbuffer.size() && !isStopped(); i++) { Object[] b = data.rowbuffer.get(i); logRowlevel(BaseMessages.getString(PKG, "SortRows.RowLevel.PrintRow", i, data.outputRowMeta.getString(b))); } } RowTempFile rowTempFile = data.tempRows.remove(0); retval = rowTempFile.row; int smallest = rowTempFile.fileNumber; // now get another Row for position smallest FileObject file = data.files.get(smallest); DataInputStream di = data.dis.get(smallest); InputStream fi = data.fis.get(smallest); try { Object[] row2 = data.outputRowMeta.readData(di); RowTempFile extra = new RowTempFile(row2, smallest); int index = Collections.binarySearch(data.tempRows, extra, data.comparator); if (index < 0) { data.tempRows.add(index * (-1) - 1, extra); } else { data.tempRows.add(index, extra); } } catch (KettleFileException fe) { // empty file or EOF mostly GZIPInputStream gzfi = (data.compressFiles) ? data.gzis.get(smallest) : null; try { di.close(); fi.close(); if (gzfi != null) { gzfi.close(); } file.delete(); } catch (IOException e) { logError(BaseMessages.getString(PKG, "SortRows.Error.UnableToCloseFile", smallest, file.toString())); setErrors(1); stopAll(); return null; } data.files.remove(smallest); data.dis.remove(smallest); data.fis.remove(smallest); if (gzfi != null) { data.gzis.remove(smallest); } // Also update all file numbers in in data.tempRows if they are larger // than smallest. // for (RowTempFile rtf : data.tempRows) { if (rtf.fileNumber > smallest) { rtf.fileNumber--; } } } catch (SocketTimeoutException e) { throw new KettleValueException(e); // should never happen on local files } } } return retval; }
From source file:org.apache.hadoop.hdfs.notifier.server.ServerHistory.java
/** * Checks what notifications are saved in history for the given event and * adds those notifications in the given queue. Only the notifications * which happened strictly after the edit log operations with the given * transaction id are put in the queue.//from w ww . j av a2s.c om * The notifications are put in the queue in the order of their * transaction id. * * @param event the event for which the notifications should be stored * in the queue. * @param txId the given transaction id * @param notifications the queue in which the notifications will be placed. * * @throws TransactionIdTooOldException raised when we can't guarantee that * we got all notifications that happened after the given * transaction id. */ @Override public void addNotificationsToQueue(NamespaceEvent event, long txId, Queue<NamespaceNotification> notifications) throws TransactionIdTooOldException { if (LOG.isDebugEnabled()) { LOG.debug("Got addNotificationsToQueue for: " + NotifierUtils.asString(event) + " and txId: " + txId); } historyLock.readLock().lock(); try { if (orderedHistoryList == null || orderedHistoryList.size() == 0) { throw new TransactionIdTooOldException("No data in history."); } if (orderedHistoryList.get(0).txnId > txId || orderedHistoryList.get(orderedHistoryList.size() - 1).txnId < txId) { throw new TransactionIdTooOldException("No data in history for txId " + txId); } int index = Collections.binarySearch(orderedHistoryList, new HistoryTreeEntry(0, txId, event.type), comparatorByID); if (index < 0) { // If we got here, there are 2 possibilities: // * The client gave us a bad transaction id. // * We missed one (or more) transaction(s) LOG.error("Potential corrupt history. Got request for: " + NotifierUtils.asString(event) + " and txId: " + txId); throw new TransactionIdTooOldException("Potentially corrupt server history"); } String dirFormatPath = event.path; if (!dirFormatPath.endsWith(Path.SEPARATOR)) { dirFormatPath += Path.SEPARATOR; } for (int i = index + 1; i < orderedHistoryList.size(); i++) { HistoryTreeEntry entry = orderedHistoryList.get(i); if (event.type != entry.type) { continue; } String entryPath = entry.getFullPath(); if (entryPath.startsWith(dirFormatPath)) { notifications.add(new NamespaceNotification(entryPath, entry.type, entry.txnId)); } } } finally { historyLock.readLock().unlock(); } }
From source file:de.tudarmstadt.ukp.csniper.webapp.evaluation.EvaluationRepository.java
/** * Persist the given items. If they already exist in the database, replace the item in the list * with the item from the database. Transient data (e.g. match offsets) is preserved. * //from www.j av a 2s . co m * @param aCreate * true = missing evaluation items are created and returned; false = missing * evaluation items are not created and returned */ @Transactional public List<EvaluationItem> writeEvaluationItems(List<EvaluationItem> aItems, boolean aCreate) { long start = System.currentTimeMillis(); log.info("Building index on in-memory items"); List<EvaluationItem> result = new ArrayList<EvaluationItem>(aItems.size()); LinkedMultiValueMap<String, EvaluationItem> idx = new LinkedMultiValueMap<String, EvaluationItem>(); for (EvaluationItem i : aItems) { idx.add(i.getCollectionId() + "-" + i.getDocumentId() + "-" + i.getType(), i); } TypedQuery<EvaluationItem> query = entityManager .createQuery("FROM EvaluationItem WHERE collectionId = :collectionId AND documentId = " + ":documentId AND type = :type", EvaluationItem.class); log.info("Merging with in-database items in " + idx.size() + " chunks"); ProgressMeter progress = new ProgressMeter(idx.size()); for (List<EvaluationItem> items : idx.values()) { progress.next(); EvaluationItem ref = items.get(0); List<EvaluationItem> pItems = query.setParameter("collectionId", ref.getCollectionId()) .setParameter("documentId", ref.getDocumentId()).setParameter("type", ref.getType()) .getResultList(); Comparator<EvaluationItem> cmp = new Comparator<EvaluationItem>() { @Override public int compare(EvaluationItem aO1, EvaluationItem aO2) { if (aO1.getBeginOffset() > aO2.getBeginOffset()) { return 1; } else if (aO1.getBeginOffset() < aO2.getBeginOffset()) { return -1; } else if (aO1.getEndOffset() > aO2.getEndOffset()) { return 1; } else if (aO1.getEndOffset() < aO2.getEndOffset()) { return -1; } else { return 0; } } }; Collections.sort(pItems, cmp); for (EvaluationItem item : items) { int i = Collections.binarySearch(pItems, item, cmp); if (i < 0) { if (aCreate) { entityManager.persist(item); result.add(item); } } else { EvaluationItem pItem = pItems.get(i); pItem.copyTransientData(item); result.add(pItem); } } log.info(progress); } log.info("writeEvaluationItems for " + aItems.size() + " items completed in " + (System.currentTimeMillis() - start) + " ms"); return result; // String query = "FROM EvaluationItem WHERE collectionId = :collectionId AND documentId = " // + // ":documentId AND type = :type AND beginOffset = :beginOffset AND endOffset = :endOffset"; // for (ListIterator<EvaluationItem> li = aItems.listIterator(); li.hasNext();) { // EvaluationItem item = li.next(); // try { // EvaluationItem pItem = entityManager.createQuery(query, EvaluationItem.class) // .setParameter("collectionId", item.getCollectionId()) // .setParameter("documentId", item.getDocumentId()) // .setParameter("type", item.getType()) // .setParameter("beginOffset", item.getBeginOffset()) // .setParameter("endOffset", item.getEndOffset()).getSingleResult(); // // // if item already exists, use that instead of persisting the new // pItem.copyTransientData(item); // li.set(pItem); // } // catch (NoResultException e) { // // persist item if not exists // if (aCreate) { // entityManager.persist(item); // } // } // } }
From source file:at.salzburgresearch.vgi.vgianalyticsframework.osm.importer.impl.VgiOperationGeneratorDataHandlerImpl.java
/** * Adds coordinates to operations (OpAddNode, OpModifyWayCoordinate) * @param refElements list of child elements (way nodes and relation members) *//*from www .j a v a 2 s . com*/ private void addCoordinateToOperations(List<IVgiFeature> refElements) { log.info(refElements.size() + " ref elements found!"); refElements.sort(VgiFeatureImpl.getFeatureComparator()); int numFound = 0, numNotFound = 0; for (IVgiFeature feature : featureList) { TLongObjectHashMap<List<IVgiOperation>> childElementHistories = new TLongObjectHashMap<List<IVgiOperation>>(); TLongObjectHashMap<List<IVgiOperation>> featureChildElements = new TLongObjectHashMap<List<IVgiOperation>>(); /** Add coordinates to OpAddNode operations */ for (IVgiOperation featureOperation : feature.getOperationList()) { /** Create childElementHistory (will be used for adding OpModifyWayCoordinate) */ if (featureOperation.getVgiOperationType().equals(VgiOperationType.OP_ADD_NODE) || featureOperation.getVgiOperationType().equals(VgiOperationType.OP_REMOVE_NODE) || featureOperation.getVgiOperationType().equals(VgiOperationType.OP_ADD_MEMBER) || featureOperation.getVgiOperationType().equals(VgiOperationType.OP_REMOVE_MEMBER)) { if (!childElementHistories.containsKey(featureOperation.getRefId())) { /** Initialize child element history */ childElementHistories.put(featureOperation.getRefId(), new ArrayList<IVgiOperation>()); } childElementHistories.get(featureOperation.getRefId()).add(featureOperation); } /** Only OpAddNode (and OpAddMember) */ //TODO relations are not implemented from here to end if (!featureOperation.getVgiOperationType().equals(VgiOperationType.OP_ADD_NODE)) continue; /** Find ref element */ IVgiFeature refElement = null; IVgiFeature refElementForSearch = new VgiFeatureImpl(); refElementForSearch.setOid(featureOperation.getRefId()); if (featureOperation.getVgiOperationType().equals(VgiOperationType.OP_ADD_NODE)) { refElementForSearch.setVgiGeometryType(VgiGeometryType.POINT); } else if (featureOperation.getVgiOperationType().equals(VgiOperationType.OP_ADD_MEMBER)) { if (featureOperation.getKey().equals("n")) { refElementForSearch.setVgiGeometryType(VgiGeometryType.POINT); } else if (featureOperation.getKey().equals("w")) { refElementForSearch.setVgiGeometryType(VgiGeometryType.LINE); } else if (featureOperation.getKey().equals("r")) { refElementForSearch.setVgiGeometryType(VgiGeometryType.RELATION); } } int index = Collections.binarySearch(refElements, refElementForSearch, VgiFeatureImpl.getFeatureComparator()); if (index >= 0) { refElement = refElements.get(index); numFound++; } else { numNotFound++; if (settings.getFilterPolygon() == null) { /** only write error if no polygon filter is set */ if (numNotFound % 10000 == 0) log.error(featureOperation.getVgiGeometryType() + "/" + featureOperation.getOid() + "/v" + featureOperation.getVersion() + ": Cannot find ref element " + refElementForSearch.getVgiGeometryType() + "/" + refElementForSearch.getOid() + " (" + numNotFound + "/" + (numFound + numNotFound) + ")"); } continue; } /** Operations for added nodes */ featureChildElements.put(refElement.getOid(), refElement.getOperationList()); /** Some nodes are added to way before they are created (e.g. node 20950647 in way 4848297) */ /** we want to find a coordinate, also if the node has been created too late... */ for (IVgiOperation childNodeOperation : refElement.getOperationList()) { /** only operations which have a coordinate */ if (childNodeOperation.getCoordinate() == null) continue; /** Find latest node coordinate */ /** if (coordinate not found yet OR operation timestamp not after changeset timestamp */ if (featureOperation.getCoordinate() != null && childNodeOperation.getTimestamp().after(featureOperation.getTimestamp())) break; /** Add coordinate to operation */ featureOperation.setCoordinate(new Coordinate(childNodeOperation.getCoordinate())); } } /** Add OpModifyWayCoordinate operations */ for (long childElementId : featureChildElements .keys()) { /** Nodes which are part of way twice are processed once */ /** Get operations and sort them */ List<IVgiOperation> childElementHistory = childElementHistories.get(childElementId); List<IVgiOperation> childNodeOperations = featureChildElements.get(childElementId); Collections.sort(childElementHistory, VgiOperationImpl.getVersionComparator()); Collections.sort(childNodeOperations, VgiOperationImpl.getVersionComparator()); /** Add operation to feature if node is in node list at this timestamp */ for (IVgiOperation nodeOperation : childNodeOperations) { /** Only OpModifyCoordinate */ if (!nodeOperation.getVgiOperationType().equals(VgiOperationType.OP_MODIFY_COORDINATE)) continue; int membershipCount = 0; /** Iterate through OpAddNodes and OpRemoveNodes */ for (int i = 0; i < childElementHistory.size(); i++) { /** We want the way node history before the node operation */ if (childElementHistory.get(i).getTimestamp().after(nodeOperation.getTimestamp())) break; if (childElementHistory.get(i).getVgiOperationType().equals(VgiOperationType.OP_ADD_NODE)) membershipCount++; if (childElementHistory.get(i).getVgiOperationType() .equals(VgiOperationType.OP_REMOVE_NODE)) membershipCount--; } /** Node should be member of way at least once */ if (membershipCount == 0) continue; /** Add OP_UPDATE_WAY_COORDINATE */ IVgiOperation newOperation = new VgiOperationImpl(feature.getOid(), feature.getVgiGeometryType(), VgiOperationType.OP_MODIFY_WAY_COORDINATE, nodeOperation.getUid(), nodeOperation.getUser(), nodeOperation.getTimestamp(), Short.MAX_VALUE, nodeOperation.getChangesetid(), nodeOperation.getCoordinate(), "", "", nodeOperation.getOid(), -1); /** Find index and version with feature operation list */ int index = Collections.binarySearch(feature.getOperationList(), newOperation, VgiOperationImpl.getTimestampComparator()); if (index >= 0) { /** Timestamp/OpType combination already exists */ newOperation.setVersion(feature.getOperationList().get(index).getVersion()); feature.getOperationList().add(index + 1, newOperation); } else { index = (index + 1) * -1; newOperation.setVersion(feature.getOperationList().get(index - 1).getVersion()); feature.getOperationList().add(index, newOperation); } } } /** Sort operations */ Collections.sort(feature.getOperationList(), VgiOperationImpl.getVersionComparator()); } }
From source file:com.github.pockethub.ui.gist.GistFragment.java
@Override public void onDialogResult(int requestCode, int resultCode, Bundle arguments) { if (RESULT_OK != resultCode) return;/*from w ww. j a v a 2 s .co m*/ switch (requestCode) { case COMMENT_DELETE: final GithubComment comment = arguments.getParcelable(EXTRA_COMMENT); new DeleteCommentTask(getActivity(), gist.id, comment) { @Override protected void onSuccess(GithubComment comment) throws Exception { super.onSuccess(comment); // Update comment list if (comments != null && comment != null) { int position = Collections.binarySearch(comments, comment, new Comparator<GithubComment>() { public int compare(GithubComment lhs, GithubComment rhs) { return String.valueOf(lhs.id).compareTo(rhs.id); } }); comments.remove(position); updateList(gist, comments); } else refreshGist(); } }.start(); break; } }
From source file:com.github.pockethub.android.ui.gist.GistFragment.java
@Override public void onDialogResult(int requestCode, int resultCode, Bundle arguments) { if (RESULT_OK != resultCode) { return;/*www. jav a2 s .c om*/ } switch (requestCode) { case COMMENT_DELETE: final GitHubComment comment = arguments.getParcelable(EXTRA_COMMENT); ServiceGenerator.createService(getActivity(), GistCommentService.class) .deleteGistComment(gistId, comment.id()).subscribeOn(Schedulers.io()) .observeOn(AndroidSchedulers.mainThread()).compose(this.<Response<Boolean>>bindToLifecycle()) .subscribe(new ProgressObserverAdapter<Response<Boolean>>(getActivity(), R.string.deleting_comment) { @Override public void onSuccess(Response<Boolean> response) { super.onSuccess(response); // Update comment list if (comments != null) { int position = Collections.binarySearch(comments, comment, (lhs, rhs) -> Integer.valueOf(lhs.id()).compareTo(rhs.id())); comments.remove(position); updateList(gist, comments); } else { refreshGist(); } } @Override public void onError(Throwable e) { super.onError(e); Log.d(TAG, "Exception deleting comment on gist", e); ToastUtils.show((Activity) getContext(), e.getMessage()); } }.start()); break; } }