List of usage examples for java.util ListIterator set
void set(E e);
From source file:org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLAuthorizationUtils.java
/** * Calls getValidatedPrincipal on each principal in list and updates the list * @param hivePrincipals/*from w w w . j av a2s . c o m*/ * @return * @return * @throws HiveAuthzPluginException */ public static List<HivePrincipal> getValidatedPrincipals(List<HivePrincipal> hivePrincipals) throws HiveAuthzPluginException { ListIterator<HivePrincipal> it = hivePrincipals.listIterator(); while (it.hasNext()) { it.set(getValidatedPrincipal(it.next())); } return hivePrincipals; }
From source file:exm.stc.ic.ICUtil.java
/** * Replace the current instruction with the provided sequence * After this is done, next() will return the instruction after * the inserted sequence/*w w w.jav a 2 s. c o m*/ */ public static void replaceInsts(Block block, ListIterator<Statement> it, List<? extends Statement> replacements) { for (Statement stmt : replacements) { stmt.setParent(block); } if (replacements.size() == 1) { it.set(replacements.get(0)); } else if (replacements.size() == 0) { it.remove(); } else { it.set(replacements.get(0)); List<? extends Statement> rest = replacements.subList(1, replacements.size()); for (Statement newInst : rest) { it.add(newInst); } } }
From source file:Main.java
private static <E> void updateList(List<E> origList, Collection<? extends E> updateList, boolean add, boolean replace, boolean remove, BiPredicate<? super E, ? super E> equalTester, BiConsumer<List<E>, Collection<? extends E>> adder) { List<E> itemsToRemove = null; List<E> itemsToAdd = null; for (E update : updateList) { boolean origListContainsUpdate = false; ListIterator<E> origIter = origList.listIterator(); while (origIter.hasNext()) { E orig = origIter.next();/*from w w w .ja v a2s. com*/ if (equalTester.test(orig, update)) { origListContainsUpdate = true; if (remove) { if (itemsToRemove == null) { itemsToRemove = new ArrayList<>(origList); } itemsToRemove.remove(orig); } if (replace) { origIter.set(update); } break; } } if (!origListContainsUpdate && add) { if (itemsToAdd == null) { itemsToAdd = new ArrayList<>(); } itemsToAdd.add(update); } } if (remove) { if (itemsToRemove != null) { origList.removeAll(itemsToRemove); } else { origList.clear(); } } if (itemsToAdd != null) { adder.accept(origList, itemsToAdd); } }
From source file:com.linuxbox.enkive.docsearch.indri.IndriQueryComposer.java
/** * Iterate through all the terms in the phrase and sanitize each one in * place.//from w w w. j av a 2s.c o m * * @param phrase */ protected static void sanitizePhraseInPlace(Phrase phrase) { ListIterator<CharSequence> i = phrase.getTermsListIterator(); while (i.hasNext()) { CharSequence charSeq = i.next(); // convert to a StringBuffer if not one already StringBuffer buffer; if (!(charSeq instanceof StringBuffer)) { buffer = new StringBuffer(charSeq); } else { buffer = (StringBuffer) charSeq; } sanitizeStringBuffer(buffer); // replace term in phrase i.set(buffer); } }
From source file:exm.stc.ic.ICUtil.java
public static void replaceVarsInList(Map<Var, Arg> replacements, List<Var> vars, boolean removeDupes, boolean removeMapped) { // Remove new duplicates ArrayList<Var> alreadySeen = null; if (removeDupes) { alreadySeen = new ArrayList<Var>(vars.size()); }//from w ww .j av a2 s . com ListIterator<Var> it = vars.listIterator(); while (it.hasNext()) { Var v = it.next(); if (replacements.containsKey(v)) { Arg oa = replacements.get(v); if (oa.isVar()) { if (removeDupes && alreadySeen.contains(oa.getVar())) { it.remove(); } else { it.set(oa.getVar()); if (removeDupes) { alreadySeen.add(oa.getVar()); } } } } else { if (removeDupes) { if (alreadySeen.contains(v)) { it.remove(); } else { alreadySeen.add(v); } } } } }
From source file:Main.java
/** * Returns a list iterator that swaps all previous/next calls. * <p><b>Important:</b> The returned iterator violates the {@link ListIterator#nextIndex()} and {@link ListIterator#previousIndex()} specifications. *//* w ww . j a v a2s . co m*/ public static <E> ListIterator<E> reverse(ListIterator<E> iterator) { return new ListIterator<E>() { @Override public boolean hasNext() { return iterator.hasPrevious(); } @Override public E next() { return iterator.previous(); } @Override public boolean hasPrevious() { return iterator.hasNext(); } @Override public E previous() { return iterator.next(); } @Override public int nextIndex() { return iterator.previousIndex(); } @Override public int previousIndex() { return iterator.nextIndex(); } @Override public void remove() { iterator.remove(); } @Override public void set(E e) { iterator.set(e); } @Override public void add(E e) { iterator.add(e); } }; }
From source file:com.ppp.prm.portal.server.service.gwt.HibernateDetachUtility.java
/** * @param value the object needing to be detached/scrubbed. * @param checkedObjectMap This maps identityHashCodes to Objects we've already detached. In that way we can * quickly determine if we've already done the work for the incoming value and avoid taversing it again. This * works well almost all of the time, but it is possible that two different objects can have the same identity hash * (conflicts are always possible with a hash). In that case we utilize the checkedObjectCollisionMap (see below). * @param checkedObjectCollisionMap checkedObjectMap maps the identityhash to the *first* object with that hash. In * most cases there will only be mapping for one hash, but it is possible to encounter the same hash for multiple * objects, especially on 32bit or IBM JVMs. It is important to know if an object has already been detached * because if it is somehow self-referencing, we have to stop the recursion. This map holds the 2nd..Nth mapping * for a single hash and is used to ensure we never try to detach an object already processed. * @param depth used to stop infinite recursion, defaults to a depth we don't expectto see, but it is configurable. * @param serializationType//from w w w. j av a 2 s . com * @throws Exception if a problem occurs * @throws IllegalStateException if the recursion depth limit is reached */ private static void nullOutUninitializedFields(Object value, Map<Integer, Object> checkedObjectMap, Map<Integer, List<Object>> checkedObjectCollisionMap, int depth, SerializationType serializationType) throws Exception { if (depth > depthAllowed) { String warningMessage = "Recursed too deep [" + depth + " > " + depthAllowed + "], will not attempt to detach object of type [" + ((value != null) ? value.getClass().getName() : "N/A") + "]. This may cause serialization errors later. " + "You can try to work around this by setting the system property [" + DEPTH_ALLOWED_SYSPROP + "] to a value higher than [" + depth + "] or you can set the system property [" + THROW_EXCEPTION_ON_DEPTH_LIMIT_SYSPROP + "] to 'false'"; LOG.warn(warningMessage); if (throwExceptionOnDepthLimit) { throw new IllegalStateException(warningMessage); } return; } if (null == value) { return; } // System.identityHashCode is a hash code, and therefore not guaranteed to be unique. And we've seen this // be the case. So, we use it to try and avoid duplicating work, but handle the case when two objects may // have an identity crisis. Integer valueIdentity = hashCodeGenerator.getHashCode(value); Object checkedObject = checkedObjectMap.get(valueIdentity); if (null == checkedObject) { // if we have not yet encountered an object with this hash, store it in our map and start scrubbing checkedObjectMap.put(valueIdentity, value); } else if (value == checkedObject) { // if we have scrubbed this already, no more work to be done return; } else { // we have a situation where multiple objects have the same identity hashcode, work with our // collision map to decide whether it needs to be scrubbed and add if necessary. // Note that this code block is infrequently hit, it is by design that we've pushed the extra // work, map, etc, involved for this infrequent case into its own block. The standard cases must // be as fast and lean as possible. boolean alreadyDetached = false; List<Object> collisionObjects = checkedObjectCollisionMap.get(valueIdentity); if (null == collisionObjects) { // if this is the 2nd occurrence for this hash, create a new map entry collisionObjects = new ArrayList<Object>(1); checkedObjectCollisionMap.put(valueIdentity, collisionObjects); } else { // if we have scrubbed this already, no more work to be done for (Object collisionObject : collisionObjects) { if (value == collisionObject) { alreadyDetached = true; break; } } } if (LOG.isDebugEnabled()) { StringBuilder message = new StringBuilder("\n\tIDENTITY HASHCODE COLLISION [hash="); message.append(valueIdentity); message.append(", alreadyDetached="); message.append(alreadyDetached); message.append("]"); message.append("\n\tCurrent : "); message.append(value.getClass().getName()); message.append("\n\t "); message.append(value); message.append("\n\tPrevious : "); message.append(checkedObject.getClass().getName()); message.append("\n\t "); message.append(checkedObject); for (Object collisionObject : collisionObjects) { message.append("\n\tPrevious : "); message.append(collisionObject.getClass().getName()); message.append("\n\t "); message.append(collisionObject); } LOG.debug(message); } // now that we've done our logging, if already detached we're done. Otherwise add to the list of collision // objects for this hash, and start scrubbing if (alreadyDetached) { return; } collisionObjects.add(value); } // Perform the detaching if (value instanceof Object[]) { Object[] objArray = (Object[]) value; for (int i = 0; i < objArray.length; i++) { Object listEntry = objArray[i]; Object replaceEntry = replaceObject(listEntry); if (replaceEntry != null) { objArray[i] = replaceEntry; } nullOutUninitializedFields(objArray[i], checkedObjectMap, checkedObjectCollisionMap, depth + 1, serializationType); } } else if (value instanceof List) { // Null out any entries in initialized collections ListIterator i = ((List) value).listIterator(); while (i.hasNext()) { Object val = i.next(); Object replace = replaceObject(val); if (replace != null) { val = replace; i.set(replace); } nullOutUninitializedFields(val, checkedObjectMap, checkedObjectCollisionMap, depth + 1, serializationType); } } else if (value instanceof Collection) { Collection collection = (Collection) value; Collection itemsToBeReplaced = new ArrayList(); Collection replacementItems = new ArrayList(); for (Object item : collection) { Object replacementItem = replaceObject(item); if (replacementItem != null) { itemsToBeReplaced.add(item); replacementItems.add(replacementItem); item = replacementItem; } nullOutUninitializedFields(item, checkedObjectMap, checkedObjectCollisionMap, depth + 1, serializationType); } collection.removeAll(itemsToBeReplaced); collection.addAll(replacementItems); // watch out! if this collection is a Set, HashMap$MapSet doesn't support addAll. See BZ 688000 } else if (value instanceof Map) { Map originalMap = (Map) value; HashMap<Object, Object> replaceMap = new HashMap<Object, Object>(); for (Iterator i = originalMap.keySet().iterator(); i.hasNext();) { // get original key and value - these might be hibernate proxies Object originalKey = i.next(); Object originalKeyValue = originalMap.get(originalKey); // replace with non-hibernate classes, if appropriate (will be null otherwise) Object replaceKey = replaceObject(originalKey); Object replaceValue = replaceObject(originalKeyValue); // if either original key or original value was a hibernate proxy object, we have to // remove it from the original map, and remember the replacement objects for later if (replaceKey != null || replaceValue != null) { Object newKey = (replaceKey != null) ? replaceKey : originalKey; Object newValue = (replaceValue != null) ? replaceValue : originalKeyValue; replaceMap.put(newKey, newValue); i.remove(); } } // all hibernate proxies have been removed, we need to replace them with their // non-proxy object representations that we got from replaceObject() calls originalMap.putAll(replaceMap); // now go through each item in the map and null out their internal fields for (Object key : originalMap.keySet()) { nullOutUninitializedFields(originalMap.get(key), checkedObjectMap, checkedObjectCollisionMap, depth + 1, serializationType); nullOutUninitializedFields(key, checkedObjectMap, checkedObjectCollisionMap, depth + 1, serializationType); } } else if (value instanceof Enum) { // don't need to detach enums, treat them as special objects return; } if (serializationType == SerializationType.JAXB) { XmlAccessorType at = value.getClass().getAnnotation(XmlAccessorType.class); if (at != null && at.value() == XmlAccessType.FIELD) { nullOutFieldsByFieldAccess(value, checkedObjectMap, checkedObjectCollisionMap, depth, serializationType); } else { nullOutFieldsByAccessors(value, checkedObjectMap, checkedObjectCollisionMap, depth, serializationType); } } else if (serializationType == SerializationType.SERIALIZATION) { nullOutFieldsByFieldAccess(value, checkedObjectMap, checkedObjectCollisionMap, depth, serializationType); } }
From source file:ch.flashcard.HibernateDetachUtility.java
/** * @param value the object needing to be detached/scrubbed. * @param checkedObjectMap This maps identityHashCodes to Objects we've already detached. In that way we can * quickly determine if we've already done the work for the incoming value and avoid taversing it again. This * works well almost all of the time, but it is possible that two different objects can have the same identity hash * (conflicts are always possible with a hash). In that case we utilize the checkedObjectCollisionMap (see below). * @param checkedObjectCollisionMap checkedObjectMap maps the identityhash to the *first* object with that hash. In * most cases there will only be mapping for one hash, but it is possible to encounter the same hash for multiple * objects, especially on 32bit or IBM JVMs. It is important to know if an object has already been detached * because if it is somehow self-referencing, we have to stop the recursion. This map holds the 2nd..Nth mapping * for a single hash and is used to ensure we never try to detach an object already processed. * @param depth used to stop infinite recursion, defaults to a depth we don't expectto see, but it is configurable. * @param serializationType/*w w w . j a v a2s . co m*/ * @throws Exception if a problem occurs * @throws IllegalStateException if the recursion depth limit is reached */ private static void nullOutUninitializedFields(Object value, Map<Integer, Object> checkedObjectMap, Map<Integer, List<Object>> checkedObjectCollisionMap, int depth, SerializationType serializationType) throws Exception { if (depth > depthAllowed) { String warningMessage = "Recursed too deep [" + depth + " > " + depthAllowed + "], will not attempt to detach object of type [" + ((value != null) ? value.getClass().getName() : "N/A") + "]. This may cause serialization errors later. " + "You can try to work around this by setting the system property [" + DEPTH_ALLOWED_SYSPROP + "] to a value higher than [" + depth + "] or you can set the system property [" + THROW_EXCEPTION_ON_DEPTH_LIMIT_SYSPROP + "] to 'false'"; LOG.warn(warningMessage); if (throwExceptionOnDepthLimit) { throw new IllegalStateException(warningMessage); } return; } if (null == value) { return; } // System.identityHashCode is a hash code, and therefore not guaranteed to be unique. And we've seen this // be the case. So, we use it to try and avoid duplicating work, but handle the case when two objects may // have an identity crisis. Integer valueIdentity = hashCodeGenerator.getHashCode(value); Object checkedObject = checkedObjectMap.get(valueIdentity); if (null == checkedObject) { // if we have not yet encountered an object with this hash, store it in our map and start scrubbing checkedObjectMap.put(valueIdentity, value); } else if (value == checkedObject) { // if we have scrubbed this already, no more work to be done return; } else { // we have a situation where multiple objects have the same identity hashcode, work with our // collision map to decide whether it needs to be scrubbed and add if necessary. // Note that this code block is infrequently hit, it is by design that we've pushed the extra // work, map, etc, involved for this infrequent case into its own block. The standard cases must // be as fast and lean as possible. boolean alreadyDetached = false; List<Object> collisionObjects = checkedObjectCollisionMap.get(valueIdentity); if (null == collisionObjects) { // if this is the 2nd occurrence for this hash, create a new map entry collisionObjects = new ArrayList<Object>(1); checkedObjectCollisionMap.put(valueIdentity, collisionObjects); } else { // if we have scrubbed this already, no more work to be done for (Object collisionObject : collisionObjects) { if (value == collisionObject) { alreadyDetached = true; break; } } } if (LOG.isDebugEnabled()) { StringBuilder message = new StringBuilder("\n\tIDENTITY HASHCODE COLLISION [hash="); message.append(valueIdentity); message.append(", alreadyDetached="); message.append(alreadyDetached); message.append("]"); message.append("\n\tCurrent : "); message.append(value.getClass().getName()); message.append("\n\t "); message.append(value); message.append("\n\tPrevious : "); message.append(checkedObject.getClass().getName()); message.append("\n\t "); message.append(checkedObject); for (Object collisionObject : collisionObjects) { message.append("\n\tPrevious : "); message.append(collisionObject.getClass().getName()); message.append("\n\t "); message.append(collisionObject); } LOG.debug(message); } // now that we've done our logging, if already detached we're done. Otherwise add to the list of collision // objects for this hash, and start scrubbing if (alreadyDetached) { return; } collisionObjects.add(value); } // Perform the detaching if (value instanceof Object[]) { Object[] objArray = (Object[]) value; for (int i = 0; i < objArray.length; i++) { Object listEntry = objArray[i]; Object replaceEntry = replaceObject(listEntry); if (replaceEntry != null) { objArray[i] = replaceEntry; } nullOutUninitializedFields(objArray[i], checkedObjectMap, checkedObjectCollisionMap, depth + 1, serializationType); } } else if (value instanceof List) { // Null out any entries in initialized collections ListIterator i = ((List) value).listIterator(); while (i.hasNext()) { Object val = i.next(); Object replace = replaceObject(val); if (replace != null) { val = replace; i.set(replace); } nullOutUninitializedFields(val, checkedObjectMap, checkedObjectCollisionMap, depth + 1, serializationType); } } else if (value instanceof Collection) { Collection collection = (Collection) value; Collection itemsToBeReplaced = new ArrayList(); Collection replacementItems = new ArrayList(); for (Object item : collection) { Object replacementItem = replaceObject(item); if (replacementItem != null) { itemsToBeReplaced.add(item); replacementItems.add(replacementItem); item = replacementItem; } nullOutUninitializedFields(item, checkedObjectMap, checkedObjectCollisionMap, depth + 1, serializationType); } collection.removeAll(itemsToBeReplaced); collection.addAll(replacementItems); // watch out! if this collection is a Set, HashMap$MapSet doesn't support addAll. See BZ 688000 } else if (value instanceof Map) { Map originalMap = (Map) value; HashMap<Object, Object> replaceMap = new HashMap<Object, Object>(); for (Iterator i = originalMap.keySet().iterator(); i.hasNext();) { // get original key and value - these might be hibernate proxies Object originalKey = i.next(); Object originalKeyValue = originalMap.get(originalKey); // replace with non-hibernate classes, if appropriate (will be null otherwise) Object replaceKey = replaceObject(originalKey); Object replaceValue = replaceObject(originalKeyValue); // if either original key or original value was a hibernate proxy object, we have to // remove it from the original map, and remember the replacement objects for later if (replaceKey != null || replaceValue != null) { Object newKey = (replaceKey != null) ? replaceKey : originalKey; Object newValue = (replaceValue != null) ? replaceValue : originalKeyValue; replaceMap.put(newKey, newValue); i.remove(); } } // all hibernate proxies have been removed, we need to replace them with their // non-proxy object representations that we got from replaceObject() calls originalMap.putAll(replaceMap); // now go through each item in the map and null out their internal fields for (Object key : originalMap.keySet()) { nullOutUninitializedFields(originalMap.get(key), checkedObjectMap, checkedObjectCollisionMap, depth + 1, serializationType); nullOutUninitializedFields(key, checkedObjectMap, checkedObjectCollisionMap, depth + 1, serializationType); } } else if (value instanceof Enum) { // don't need to detach enums, treat them as special objects return; } if (serializationType == SerializationType.JAXB) { XmlAccessorType at = (XmlAccessorType) value.getClass().getAnnotation(XmlAccessorType.class); if (at != null && at.value() == XmlAccessType.FIELD) { nullOutFieldsByFieldAccess(value, checkedObjectMap, checkedObjectCollisionMap, depth, serializationType); } else { nullOutFieldsByAccessors(value, checkedObjectMap, checkedObjectCollisionMap, depth, serializationType); } } else if (serializationType == SerializationType.SERIALIZATION) { nullOutFieldsByFieldAccess(value, checkedObjectMap, checkedObjectCollisionMap, depth, serializationType); } }
From source file:org.apache.tajo.engine.planner.physical.RadixSort.java
/** * Entry method./* w w w . jav a2 s.c om*/ * * @param list * @param schema input schema * @param sortSpecs sort specs * @param comp comparator for Tim sort * @return a sorted list of tuples */ public static List<UnSafeTuple> sort(QueryContext queryContext, UnSafeTupleList list, Schema schema, SortSpec[] sortSpecs, Comparator<UnSafeTuple> comp) { UnSafeTuple[] in = list.toArray(new UnSafeTuple[list.size()]); RadixSortContext context = new RadixSortContext(in, schema, sortSpecs, comp, queryContext.getInt(SessionVars.TEST_TIM_SORT_THRESHOLD_FOR_RADIX_SORT)); long before = System.currentTimeMillis(); recursiveCallForNextKey(context, 0, context.in.length, 0); context.msdRadixSortTime += System.currentTimeMillis() - before; context.printStat(); ListIterator<UnSafeTuple> it = list.listIterator(); for (UnSafeTuple t : context.in) { it.next(); it.set(t); } return list; }
From source file:se.inera.axel.shs.broker.ds.internal.HeaderToFilterConverter.java
public MessageLogService.Filter toFilter(@Header("producttype") String producttype, @Header("filter") String noAckfilter, @Header("maxhits") Integer maxHits, @Header("status") String status, @Header("corrid") String corrId, @Header("contentid") String contentId, @Header("originator") String originator, @Header("since") String since, @Header("sortattribute") String sortattribute, @Header("sortorder") String sortorder, @Header("arrivalorder") String arrivalorder, @Header("endrecipient") String endrecipient) { MessageLogService.Filter filter = new MessageLogService.Filter(); if ("noack".equals(noAckfilter)) filter.setNoAck(true);//from ww w . ja v a 2 s . c o m if (producttype != null) { List<String> productIds = Arrays.asList(StringUtils.split(producttype, ',')); ListIterator<String> productIdsIterator = productIds.listIterator(); while (productIdsIterator.hasNext()) { productIdsIterator.set(UrnProduct.valueOf(productIdsIterator.next()).getProductId()); } filter.setProductIds(productIds); } filter.setOriginator(originator); filter.setEndRecipient(endrecipient); filter.setMaxHits(maxHits); filter.setCorrId(corrId); filter.setContentId(contentId); if (since != null) { try { filter.setSince(TimestampConverter.stringToDate(since)); } catch (Exception e) { throw new IllegalArgumentException("timestamp format error on 'since': " + since); } } filter.setSortAttribute(sortattribute); filter.setArrivalOrder(arrivalorder); filter.setSortOrder(sortorder); if (status != null) filter.setStatus(Status.valueOf(status.toUpperCase())); return filter; }