List of usage examples for java.util Queue isEmpty
boolean isEmpty();
From source file:it.scoppelletti.mobilepower.app.FragmentLayoutController.java
/** * Ripristina lo stato dell’istanza. * /*from ww w. j a v a2 s . com*/ * <P>L’attività ripristina lo stato dell’istanza * {@code FragmentLayoutController} all’interno del proprio metodo * {@code onRestoreInstanceState}.</P> * * @param savedInstanceState Stato dell’istanza. * @param fragmentCollector Collettore dei frammenti di dettaglio. */ public void onRestoreInstanceState(Bundle savedInstanceState, FragmentLayoutController.FragmentCollector fragmentCollector) { int n, oldPanelCount, tnId; String tag; ActivitySupport activitySupport; FragmentSupport fragment; FragmentManager fragmentMgr; FragmentLayoutController.BackStackChangedListener backStackListener; Queue<FragmentSupport> fragmentQueue; Queue<FragmentLayoutController.FragmentEntry> clonedQueue; if (savedInstanceState == null) { throw new NullPointerException("Argument savedInstanceState is null."); } if (fragmentCollector == null) { throw new NullPointerException("Argument fragmentCollector is null."); } if (!(myActivity instanceof ActivitySupport)) { myLogger.warn("Activity not implement interface ActivitySupport."); return; } oldPanelCount = savedInstanceState.getInt(FragmentLayoutController.STATE_PANELCOUNT, 0); if (oldPanelCount < 1) { myLogger.warn("Unexpected {}={} in saved instance state.", FragmentLayoutController.STATE_PANELCOUNT, oldPanelCount); return; } myLogger.debug("{}: current={}, saved instance state={}.", new Object[] { FragmentLayoutController.STATE_PANELCOUNT, myFrameCount, oldPanelCount }); if (oldPanelCount == myFrameCount) { // Il numero di pannelli non e' cambiato: // Il sistema ha gia' ripristinato correttamente i frammenti. return; } fragmentQueue = new ArrayDeque<FragmentSupport>(); fragmentCollector.collectFragments(fragmentQueue); // Ad ogni frammento associo il tag con il quale è stato // inserito clonedQueue = new ArrayDeque<FragmentLayoutController.FragmentEntry>(); while (!fragmentQueue.isEmpty()) { fragment = fragmentQueue.remove(); if (fragment == null) { myLogger.warn("Ignoring null."); continue; } tag = fragment.asFragment().getTag(); if (StringUtils.isBlank(tag)) { myLogger.warn("Ignoring fragment with empty tag."); continue; } clonedQueue.offer(new FragmentLayoutController.FragmentEntry(fragment.cloneFragment(), tag)); } fragmentQueue = null; // free memory activitySupport = (ActivitySupport) myActivity; fragmentMgr = activitySupport.getSupportFragmentManager(); // Ripristino la configurazione dei frammenti iniziale for (n = fragmentMgr.getBackStackEntryCount(); n > 0; n--) { fragmentMgr.popBackStack(); } if (myFrameCount > 1) { tnId = arrangeFragments(fragmentMgr, clonedQueue); } else { tnId = arrangePanel(fragmentMgr, clonedQueue); } if (Build.VERSION.SDK_INT < BuildCompat.VERSION_CODES.HONEYCOMB) { return; } // - Android 4.1.2 // La barra delle azioni non e' correttamente aggiornata forse perche' // si assume che non ce ne sia bisogno con transazioni schedulate // durante il ripristino dell'attivita' (o magari perche' non e' proprio // previsto che si schedulino transazioni durante il ripristino // dell'attivita'): // Visto che l'esecuzione delle transazioni e' asincrona, devo // utilizzare un gestore degli eventi di modifica del back stack che // gestisca l’ultima transazione che ho schedulato. backStackListener = new FragmentLayoutController.BackStackChangedListener(myActivity, fragmentMgr, tnId); fragmentMgr.addOnBackStackChangedListener(backStackListener); }
From source file:edu.northwestern.jcr.adapter.fedora.persistence.FedoraConnector.java
/** * Gets a list of all descendants of a given object in Fedora * repository through resource index, applying the filter * if available./*from w w w. j a v a 2s . c o m*/ * The result is in CSV format as if it is generated directly * from resouce index. * * @param pid pid of the object * @param filter filter condition applied - null if there is no filter * @return list of pid of the descendants that satisfy the filter condition */ public String[] listDescendantsRI(String pid, String filter) throws Exception { String[] members; Map<String, String> pathMap; Queue<String> queue; List<String> resultList; String nextPID; String parentPath; pathMap = new HashMap<String, String>(); queue = new LinkedList<String>(); resultList = new ArrayList<String>(); if (pid == null) { try { members = listObjectsRI(null); } catch (Exception e) { throw e; } } else { // to be implemented members = listMembers(pid, null); } for (String member : members) { queue.add(member); pathMap.put(member, member); } if (filter != null) { if (pid == null) { try { members = listObjectsRI(filter); } catch (Exception e) { throw e; } } else { // to be implemented members = listMembers(pid, filter); } } // add only those satisfying the filter to the result list for (String member : members) { resultList.add(member); } while (!queue.isEmpty()) { nextPID = queue.remove(); parentPath = pathMap.get(nextPID); members = listMembers(nextPID, null); for (String member : members) { queue.add(member); pathMap.put(member, parentPath + "," + member); } if (filter != null) { members = listMembers(nextPID, filter); } // add only those satisfying the filter to the result list for (String member : members) { resultList.add(parentPath + "," + member); } } return (String[]) resultList.toArray(new String[0]); }
From source file:org.apache.pdfbox.pdfparser.NonSequentialPDFParser.java
/** * Will parse every object necessary to load a single page from the pdf document. * We try our best to order objects according to offset in file before reading * to minimize seek operations.//from w w w . j a v a 2 s . co m * * @param dict the COSObject from the parent pages. * @param excludeObjects dictionary object reference entries with these names will not be parsed * * @throws IOException */ private void parseDictObjects(COSDictionary dict, COSName... excludeObjects) throws IOException { // ---- create queue for objects waiting for further parsing final Queue<COSBase> toBeParsedList = new LinkedList<COSBase>(); // offset ordered object map final TreeMap<Long, List<COSObject>> objToBeParsed = new TreeMap<Long, List<COSObject>>(); // in case of compressed objects offset points to stmObj final Set<Long> parsedObjects = new HashSet<Long>(); final Set<Long> addedObjects = new HashSet<Long>(); // ---- add objects not to be parsed to list of already parsed objects if (excludeObjects != null) { for (COSName objName : excludeObjects) { COSBase baseObj = dict.getItem(objName); if (baseObj instanceof COSObject) { parsedObjects.add(getObjectId((COSObject) baseObj)); } } } addNewToList(toBeParsedList, dict.getValues(), addedObjects); // ---- go through objects to be parsed while (!(toBeParsedList.isEmpty() && objToBeParsed.isEmpty())) { // -- first get all COSObject from other kind of objects and // put them in objToBeParsed; afterwards toBeParsedList is empty COSBase baseObj; while ((baseObj = toBeParsedList.poll()) != null) { if (baseObj instanceof COSStream) { addNewToList(toBeParsedList, ((COSStream) baseObj).getValues(), addedObjects); } else if (baseObj instanceof COSDictionary) { addNewToList(toBeParsedList, ((COSDictionary) baseObj).getValues(), addedObjects); } else if (baseObj instanceof COSArray) { final Iterator<COSBase> arrIter = ((COSArray) baseObj).iterator(); while (arrIter.hasNext()) { addNewToList(toBeParsedList, arrIter.next(), addedObjects); } } else if (baseObj instanceof COSObject) { COSObject obj = (COSObject) baseObj; long objId = getObjectId(obj); COSObjectKey objKey = new COSObjectKey(obj.getObjectNumber().intValue(), obj.getGenerationNumber().intValue()); if (!(parsedObjects.contains(objId) /*|| document.hasObjectInPool( objKey ) */ )) { Long fileOffset = xrefTrailerResolver.getXrefTable().get(objKey); // it is allowed that object references point to null, thus we have to test if (fileOffset != null) { if (fileOffset > 0) { objToBeParsed.put(fileOffset, Collections.singletonList(obj)); } else { // negative offset means we have a compressed object within object stream; // get offset of object stream fileOffset = xrefTrailerResolver.getXrefTable() .get(new COSObjectKey(-fileOffset, 0)); if ((fileOffset == null) || (fileOffset <= 0)) { throw new IOException( "Invalid object stream xref object reference: " + fileOffset); } List<COSObject> stmObjects = objToBeParsed.get(fileOffset); if (stmObjects == null) { objToBeParsed.put(fileOffset, stmObjects = new ArrayList<COSObject>()); } stmObjects.add(obj); } } else { // NULL object COSObject pdfObject = document.getObjectFromPool(objKey); pdfObject.setObject(COSNull.NULL); } } } } // ---- read first COSObject with smallest offset; // resulting object will be added to toBeParsedList if (objToBeParsed.isEmpty()) { break; } for (COSObject obj : objToBeParsed.remove(objToBeParsed.firstKey())) { COSBase parsedObj = parseObjectDynamically(obj, false); obj.setObject(parsedObj); addNewToList(toBeParsedList, parsedObj, addedObjects); parsedObjects.add(getObjectId(obj)); } } }
From source file:ome.services.graphs.GraphPathBean.java
/** * Process the Hibernate domain object model to initialize this class' instance fields. * No other method should write to them. * @param sessionFactory the Hibernate session factory *//*from ww w . j av a 2 s. co m*/ private void initialize(SessionFactoryImplementor sessionFactory) { /* note all the direct superclasses */ final Map<String, String> superclasses = new HashMap<String, String>(); final Map<String, ClassMetadata> classesMetadata = sessionFactory.getAllClassMetadata(); for (final String className : classesMetadata.keySet()) { try { final Class<?> actualClass = Class.forName(className); if (IObject.class.isAssignableFrom(actualClass)) { classesBySimpleName.put(actualClass.getSimpleName(), actualClass.asSubclass(IObject.class)); final Set<String> subclassNames = sessionFactory.getEntityPersister(className) .getEntityMetamodel().getSubclassEntityNames(); for (final String subclassName : subclassNames) { if (!subclassName.equals(className)) { final Class<?> actualSubclass = Class.forName(subclassName); if (actualSubclass.getSuperclass() == actualClass) { superclasses.put(subclassName, className); } } } } else { log.warn("mapped class " + className + " is not a " + IObject.class.getName()); } } catch (ClassNotFoundException e) { log.error("could not instantiate class", e); } } /* note the indirect superclasses and subclasses */ for (final Entry<String, String> superclassRelationship : superclasses.entrySet()) { final String startClass = superclassRelationship.getKey(); String superclass = superclassRelationship.getValue(); while (superclass != null) { allSuperclasses.put(startClass, superclass); allSubclasses.put(superclass, startClass); superclass = superclasses.get(superclass); } } /* queue for processing all the properties of all the mapped entities: name, type, nullability */ final Queue<PropertyDetails> propertyQueue = new LinkedList<PropertyDetails>(); final Map<String, Set<String>> allPropertyNames = new HashMap<String, Set<String>>(); for (final Entry<String, ClassMetadata> classMetadata : classesMetadata.entrySet()) { final String className = classMetadata.getKey(); final ClassMetadata metadata = classMetadata.getValue(); /* note name of identifier property */ classIdProperties.put(metadata.getEntityName(), metadata.getIdentifierPropertyName()); /* queue other properties */ final String[] propertyNames = metadata.getPropertyNames(); final Type[] propertyTypes = metadata.getPropertyTypes(); final boolean[] propertyNullabilities = metadata.getPropertyNullability(); for (int i = 0; i < propertyNames.length; i++) { final List<String> propertyPath = Collections.singletonList(propertyNames[i]); propertyQueue.add( new PropertyDetails(className, propertyPath, propertyTypes[i], propertyNullabilities[i])); } final Set<String> propertyNamesSet = new HashSet<String>(propertyNames.length); propertyNamesSet.addAll(Arrays.asList(propertyNames)); allPropertyNames.put(className, propertyNamesSet); } /* process each property to note entity linkages */ while (!propertyQueue.isEmpty()) { final PropertyDetails property = propertyQueue.remove(); if (ignoreProperty(property.path.get(property.path.size() - 1))) { continue; } /* if the property has a component type, queue the parts for processing */ if (property.type instanceof ComponentType) { final ComponentType componentType = (ComponentType) property.type; final String[] componentPropertyNames = componentType.getPropertyNames(); final Type[] componentPropertyTypes = componentType.getSubtypes(); final boolean[] componentPropertyNullabilities = componentType.getPropertyNullability(); for (int i = 0; i < componentPropertyNames.length; i++) { final List<String> componentPropertyPath = new ArrayList<String>(property.path.size() + 1); componentPropertyPath.addAll(property.path); componentPropertyPath.add(componentPropertyNames[i]); propertyQueue.add(new PropertyDetails(property.holder, componentPropertyPath, componentPropertyTypes[i], componentPropertyNullabilities[i])); } } else { /* determine if another mapped entity class is linked by this property */ final boolean isAssociatedEntity; if (property.type instanceof CollectionType) { final CollectionType ct = (CollectionType) property.type; isAssociatedEntity = sessionFactory.getCollectionPersister(ct.getRole()).getElementType() .isEntityType(); } else { isAssociatedEntity = property.type instanceof AssociationType; } /* the property can link to entities, so process it further */ String propertyPath = Joiner.on('.').join(property.path); /* find if the property is accessible (e.g., not protected) */ boolean propertyIsAccessible = false; String classToInstantiateName = property.holder; Class<?> classToInstantiate = null; try { classToInstantiate = Class.forName(classToInstantiateName); while (Modifier.isAbstract(classToInstantiate.getModifiers())) { classToInstantiateName = allSubclasses.get(classToInstantiateName).iterator().next(); classToInstantiate = Class.forName(classToInstantiateName); } try { PropertyUtils.getNestedProperty(classToInstantiate.newInstance(), propertyPath); propertyIsAccessible = true; } catch (NoSuchMethodException e) { /* expected for collection properties */ } catch (NestedNullException e) { log.warn("guessing " + propertyPath + " of " + property.holder + " to be accessible"); propertyIsAccessible = true; } } catch (ReflectiveOperationException e) { log.error("could not probe property " + propertyPath + " of " + property.holder, e); continue; } /* build property report line for log */ final char arrowShaft = property.isNullable ? '-' : '='; final StringBuffer sb = new StringBuffer(); sb.append(property.holder); sb.append(' '); for (final String propertyName : property.path) { sb.append(arrowShaft); sb.append(arrowShaft); sb.append(propertyName); } sb.append(arrowShaft); sb.append(arrowShaft); sb.append("> "); final String valueClassName; if (isAssociatedEntity) { valueClassName = ((AssociationType) property.type).getAssociatedEntityName(sessionFactory); sb.append(valueClassName); } else { valueClassName = null; sb.append("value"); } if (property.type.isCollectionType()) { sb.append("[]"); } if (!propertyIsAccessible) { sb.append(" (inaccessible)"); } /* determine from which class the property is inherited, if at all */ String superclassWithProperty = null; String currentClass = property.holder; while (true) { currentClass = superclasses.get(currentClass); if (currentClass == null) { break; } else if (allPropertyNames.get(currentClass).contains(property.path.get(0))) { superclassWithProperty = currentClass; } } /* check if the property actually comes from an interface */ final String declaringClassName = superclassWithProperty == null ? property.holder : superclassWithProperty; final Class<? extends IObject> interfaceForProperty = getInterfaceForProperty(declaringClassName, property.path.get(0)); /* report where the property is declared */ if (superclassWithProperty != null) { sb.append(" from "); sb.append(superclassWithProperty); } else { if (interfaceForProperty != null) { sb.append(" see "); sb.append(interfaceForProperty.getName()); /* It would be nice to set PropertyDetails to have the interface as the holder, * but then properties would not be unique by declarer class and instance ID. */ } /* entity linkages by non-inherited properties are recorded */ if (valueClassName == null && property.path.size() > 1) { /* assume that the top-level property suffices for describing simple properties */ log.debug("recording " + propertyPath + " as " + property.path.get(0)); propertyPath = property.path.get(0); } final Entry<String, String> classPropertyName = Maps.immutableEntry(property.holder, propertyPath); if (valueClassName == null) { simpleProperties.put(property.holder, propertyPath); } else { linkedTo.put(property.holder, Maps.immutableEntry(valueClassName, propertyPath)); linkedBy.put(valueClassName, classPropertyName); } final PropertyKind propertyKind; if (property.type.isCollectionType()) { propertyKind = PropertyKind.COLLECTION; } else if (property.isNullable) { propertyKind = PropertyKind.OPTIONAL; } else { propertyKind = PropertyKind.REQUIRED; } propertyKinds.put(classPropertyName, propertyKind); if (propertyIsAccessible) { accessibleProperties.add(classPropertyName); } } if (log.isDebugEnabled()) { log.debug(sb.toString()); } } } log.info("initialized graph path bean with " + propertyKinds.size() + " properties"); }
From source file:com.dbay.apns4j.impl.ApnsConnectionImpl.java
private void startErrorWorker() { Thread thread = new Thread(new Runnable() { @Override//from w w w. ja v a 2 s . c o m public void run() { Socket curSocket = socket; try { if (!isSocketAlive(curSocket)) { return; } InputStream socketIs = curSocket.getInputStream(); byte[] res = new byte[ERROR_RESPONSE_BYTES_LENGTH]; int size = 0; while (true) { try { size = socketIs.read(res); if (size > 0 || size == -1) { // break, when something was read or there is no // data any more break; } } catch (SocketTimeoutException e) { // There is no data. Keep reading. Thread.sleep(10); } } int command = res[0]; /** * EN: error-response,close the socket and resent * notifications CN: ??????? */ if (size == res.length && command == Command.ERROR) { int status = res[1]; int errorId = ApnsTools.parse4ByteInt(res[2], res[3], res[4], res[5]); String token = ErrorResponse.desc(status); // callback error token? if (null != errorProcessHandler) { errorProcessHandler.process(errorId, status, token); } if (logger.isInfoEnabled()) { logger.info(String.format( "%s, %s Received error response. status: %s, id: %s, error-desc: %s", serviceName, connName, status, errorId, token)); } Queue<PushNotification> resentQueue = new LinkedList<PushNotification>(); synchronized (lock) { boolean found = false; errorHappendedLastConn = true; while (!notificationCachedQueue.isEmpty()) { PushNotification pn = notificationCachedQueue.poll(); if (pn.getId() == errorId) { found = true; } else { /** * https://developer.apple.com/library/ios/ * documentation * /NetworkingInternet/Conceptual * /RemoteNotificationsPG * /Chapters/CommunicatingWIthAPS.html As * the document said, add the notifications * which need be resent to the queue. Igonre * the error one */ if (found) { resentQueue.add(pn); } } } if (!found) { logger.warn(connName + " Didn't find error-notification in the queue. Maybe it's time to adjust cache length. id: " + errorId); } } // resend notifications if (!resentQueue.isEmpty()) { ApnsResender.getInstance().resend(name, resentQueue); } } else { // ignore and continue reading logger.error( connName + " Unexpected command or size. commend: " + command + " , size: " + size); } } catch (Exception e) { // logger.error(connName + " " + e.getMessage(), e); logger.error(connName + " " + e.getMessage()); } finally { /** * EN: close the old socket although it may be closed once * before. CN: ??? */ closeSocket(curSocket); } } }); thread.start(); }
From source file:de.tudarmstadt.ukp.dkpro.lexsemresource.graph.EntityGraphJGraphT.java
/** * Creates the hyponym map, that maps from nodes to their (recursive) number of hyponyms for * each node. "recursive" means that the hyponyms of hyponyms are also taken into account. * * @throws UnsupportedOperationException * @throws LexicalSemanticResourceException */// www.jav a2 s . co m private Map<String, Integer> getHyponymCountMap() throws LexicalSemanticResourceException { // do only create hyponymMap, if it was not already computed if (hyponymCountMap != null) { return hyponymCountMap; } // work on the lcc, otherwise this is not going to work // EntityGraphJGraphT lcc = this; EntityGraphJGraphT lcc = this.getLargestConnectedComponent(); lcc.removeCycles(); int nrOfNodes = lcc.getNumberOfNodes(); File hyponymCountMapSerializedFile = new File( getGraphId() + "_" + hyponymCountMapFilename + (lexSemRes.getIsCaseSensitive() ? "-cs" : "-cis")); hyponymCountMap = new HashMap<String, Integer>(); if (hyponymCountMapSerializedFile.exists()) { logger.info("Loading saved hyponymyCountMap ..."); hyponymCountMap = EntityGraphUtils.deserializeMap(hyponymCountMapSerializedFile); if (hyponymCountMap.size() != nrOfNodes) { throw new LexicalSemanticResourceException( "HyponymCountMap does not contain an entry for each node in the graph." + hyponymCountMap.size() + "/" + nrOfNodes); } logger.info("Done loading saved hyponymyCountMap"); return hyponymCountMap; } hyponymCountMap = new HashMap<String, Integer>(); // a queue holding the nodes to process Queue<String> queue = new LinkedList<String>(); // In the entity graph a node may have more than one father. // Thus, we check whether a node was already visited. // Then, it is not expanded again. Set<String> visited = new HashSet<String>(); // initialize the queue with all leaf nodes Set<String> leafNodes = new HashSet<String>(); for (Entity leaf : lcc.getLeaves()) { leafNodes.add(leaf.getId()); } queue.addAll(leafNodes); logger.info(leafNodes.size() + " leaf nodes."); ProgressMeter progress = new ProgressMeter(getNumberOfNodes()); // while the queue is not empty while (!queue.isEmpty()) { // remove first element from queue String currNodeId = queue.poll(); Entity currNode = lexSemRes.getEntityById(currNodeId); // in some rare cases, getEntityById might fail - so better check for nulls and fail // gracefully if (currNode == null) { visited.add(currNodeId); hyponymCountMap.put(currNodeId, 0); } logger.debug(queue.size()); if (visited.contains(currNodeId)) { continue; } progress.next(); if (logger.isDebugEnabled()) { logger.debug(progress + " - " + queue.size() + " left in queue"); } else if (logger.isInfoEnabled() && (progress.getCount() % 100 == 0)) { logger.info(progress + " - " + queue.size() + " left in queue"); } Set<Entity> children = lcc.getChildren(currNode); Set<String> invalidChildIds = new HashSet<String>(); int validChildren = 0; int sumChildHyponyms = 0; boolean invalid = false; for (Entity child : children) { if (lcc.containsVertex(child)) { if (hyponymCountMap.containsKey(child.getId())) { sumChildHyponyms += hyponymCountMap.get(child.getId()); validChildren++; } else { invalid = true; invalidChildIds.add(child.getId()); } } } // we cannot use continue directly if invalid as this would continue the inner loop not // the outer loop if (invalid) { // One of the childs is not in the hyponymCountMap yet // Re-Enter the node into the queue and continue with next node // Also enter all the childs that are not in the queue yet queue.add(currNodeId); for (String childId : invalidChildIds) { if (!visited.contains(childId) && !queue.contains(childId)) { queue.add(childId); } } continue; } // mark as visited visited.add(currNodeId); // number of hyponomys of current node is the number of its own hyponyms and the sum of // the hyponyms of its children. int currNodeHyponomyCount = validChildren + sumChildHyponyms; hyponymCountMap.put(currNodeId, currNodeHyponomyCount); // add parents of current node to queue for (Entity parent : lcc.getParents(currNode)) { if (lcc.containsVertex(parent)) { queue.add(parent.getId()); } } } // while queue not empty logger.info(visited.size() + " nodes visited"); if (visited.size() != nrOfNodes) { List<Entity> missed = new ArrayList<Entity>(); for (Entity e : lcc.getNodes()) { if (!visited.contains(e.getId())) { missed.add(e); System.out.println("Missed: [" + e + "]"); } } throw new LexicalSemanticResourceException( "Visited only " + visited.size() + " out of " + nrOfNodes + " nodes."); } if (hyponymCountMap.size() != nrOfNodes) { throw new LexicalSemanticResourceException( "HyponymCountMap does not contain an entry for each node in the graph." + hyponymCountMap.size() + "/" + nrOfNodes); } /* * As an EntityGraph is a graph rather than a tree, the hyponymCount for top nodes can be * greater than the number of nodes in the graph. This is due to the multiple counting of nodes * having more than one parent. Thus, we have to scale hyponym counts to fall in * [0,NumberOfNodes]. */ for (String key : hyponymCountMap.keySet()) { if (hyponymCountMap.get(key) > hyponymCountMap.size()) { // TODO scaling function is not optimal (to say the least :) hyponymCountMap.put(key, (hyponymCountMap.size() - 1)); } } logger.info("Computed hyponymCountMap"); EntityGraphUtils.serializeMap(hyponymCountMap, hyponymCountMapSerializedFile); logger.info("Serialized hyponymCountMap"); return hyponymCountMap; }
From source file:org.codice.ddf.spatial.ogc.csw.catalog.endpoint.CswEndpointTest.java
private List<QueryResponse> getQueryResponseBatch(int batchSize, int total) { Queue<Result> results = new ArrayDeque<>(); for (int i = 1; i <= total; i++) { MetacardImpl metacard = new MetacardImpl(); metacard.setId(i + ""); results.add(new ResultImpl(metacard)); }/*from w w w . j ava 2 s . c o m*/ List<QueryResponse> queryResponses = new ArrayList<>(); while (!results.isEmpty()) { List<Result> batchList = new ArrayList<>(); for (int i = 0; i < batchSize; i++) { Result result = results.poll(); if (result == null) { break; } batchList.add(result); } queryResponses.add(new QueryResponseImpl(null, batchList, total)); } // Add one empty response list to the end queryResponses.add(new QueryResponseImpl(null, Collections.emptyList(), 0)); return queryResponses; }
From source file:org.apache.synapse.transport.nhttp.HttpCoreNIOListener.java
private void startEndpoints() throws AxisFault { Queue<ListenerEndpoint> endpoints = new LinkedList<ListenerEndpoint>(); Set<InetSocketAddress> addressSet = new HashSet<InetSocketAddress>(); addressSet.addAll(connFactory.getBindAddresses()); if (NHttpConfiguration.getInstance().getMaxActiveConnections() != -1) { addMaxConnectionCountController(NHttpConfiguration.getInstance().getMaxActiveConnections()); }/*from w w w. java 2 s. co m*/ if (listenerContext.getBindAddress() != null) { addressSet.add(new InetSocketAddress(listenerContext.getBindAddress(), listenerContext.getPort())); } if (addressSet.isEmpty()) { addressSet.add(new InetSocketAddress(listenerContext.getPort())); } // Ensure simple but stable order List<InetSocketAddress> addressList = new ArrayList<InetSocketAddress>(addressSet); Collections.sort(addressList, new Comparator<InetSocketAddress>() { public int compare(InetSocketAddress a1, InetSocketAddress a2) { String s1 = a1.toString(); String s2 = a2.toString(); return s1.compareTo(s2); } }); for (InetSocketAddress address : addressList) { endpoints.add(ioReactor.listen(address)); } // Wait for the endpoint to become ready, i.e. for the listener to start accepting // requests. while (!endpoints.isEmpty()) { ListenerEndpoint endpoint = endpoints.remove(); try { endpoint.waitFor(); if (log.isInfoEnabled()) { InetSocketAddress address = (InetSocketAddress) endpoint.getAddress(); if (!address.isUnresolved()) { log.info(name + " started on " + address.getHostName() + ":" + address.getPort()); } else { log.info(name + " started on " + address); } } } catch (InterruptedException e) { log.warn("Listener startup was interrupted"); break; } } }
From source file:org.apache.pdfbox.pdfparser.COSParser.java
/** * Will parse every object necessary to load a single page from the pdf document. We try our * best to order objects according to offset in file before reading to minimize seek operations. * * @param dict the COSObject from the parent pages. * @param excludeObjects dictionary object reference entries with these names will not be parsed * * @throws IOException if something went wrong *///from ww w. j av a2 s .c o m protected void parseDictObjects(COSDictionary dict, COSName... excludeObjects) throws IOException { // ---- create queue for objects waiting for further parsing final Queue<COSBase> toBeParsedList = new LinkedList<COSBase>(); // offset ordered object map final TreeMap<Long, List<COSObject>> objToBeParsed = new TreeMap<Long, List<COSObject>>(); // in case of compressed objects offset points to stmObj final Set<Long> parsedObjects = new HashSet<Long>(); final Set<Long> addedObjects = new HashSet<Long>(); addExcludedToList(excludeObjects, dict, parsedObjects); addNewToList(toBeParsedList, dict.getValues(), addedObjects); // ---- go through objects to be parsed while (!(toBeParsedList.isEmpty() && objToBeParsed.isEmpty())) { // -- first get all COSObject from other kind of objects and // put them in objToBeParsed; afterwards toBeParsedList is empty COSBase baseObj; while ((baseObj = toBeParsedList.poll()) != null) { if (baseObj instanceof COSDictionary) { addNewToList(toBeParsedList, ((COSDictionary) baseObj).getValues(), addedObjects); } else if (baseObj instanceof COSArray) { final Iterator<COSBase> arrIter = ((COSArray) baseObj).iterator(); while (arrIter.hasNext()) { addNewToList(toBeParsedList, arrIter.next(), addedObjects); } } else if (baseObj instanceof COSObject) { COSObject obj = (COSObject) baseObj; long objId = getObjectId(obj); COSObjectKey objKey = new COSObjectKey(obj.getObjectNumber(), obj.getGenerationNumber()); if (!parsedObjects.contains(objId)) { Long fileOffset = xrefTrailerResolver.getXrefTable().get(objKey); // it is allowed that object references point to null, // thus we have to test if (fileOffset != null && fileOffset != 0) { if (fileOffset > 0) { objToBeParsed.put(fileOffset, Collections.singletonList(obj)); } else { // negative offset means we have a compressed // object within object stream; // get offset of object stream fileOffset = xrefTrailerResolver.getXrefTable() .get(new COSObjectKey((int) -fileOffset, 0)); if ((fileOffset == null) || (fileOffset <= 0)) { throw new IOException("Invalid object stream xref object reference for key '" + objKey + "': " + fileOffset); } List<COSObject> stmObjects = objToBeParsed.get(fileOffset); if (stmObjects == null) { stmObjects = new ArrayList<COSObject>(); objToBeParsed.put(fileOffset, stmObjects); } stmObjects.add(obj); } } else { // NULL object COSObject pdfObject = document.getObjectFromPool(objKey); pdfObject.setObject(COSNull.NULL); } } } } // ---- read first COSObject with smallest offset // resulting object will be added to toBeParsedList if (objToBeParsed.isEmpty()) { break; } for (COSObject obj : objToBeParsed.remove(objToBeParsed.firstKey())) { COSBase parsedObj = parseObjectDynamically(obj, false); obj.setObject(parsedObj); addNewToList(toBeParsedList, parsedObj, addedObjects); parsedObjects.add(getObjectId(obj)); } } }
From source file:org.aliuge.crawler.extractor.selector.IFConditions.java
/** * ???/*from ww w .jav a2 s. co m*/ * * @param depend * @return */ public boolean test(Map<String, Object> selectContent) throws ExtractException { TreeMap<Integer, String> conIndex = Maps.newTreeMap(); Queue<SimpleExpression> expressionQueue = Queues.newArrayDeque(); Queue<String> logicQueue = Queues.newArrayDeque(); // a=b and c=d or c=e or x=y int index = 0; for (String co : cond) { index = 0; while ((index = conditions.indexOf(co, index + 1)) > -1) { int i = index; conIndex.put(i, co); } } index = 0; for (Entry<Integer, String> entry : conIndex.entrySet()) { String subExp = conditions.substring(index, entry.getKey()); for (String op : operations) { int i = subExp.indexOf(op); if (i > -1) { String[] ss = subExp.split(op); if (null == selectContent.get(ss[0].trim())) { throw new ExtractException("?????[" + this.conditions + "] " + ss[0]); } expressionQueue .add(new SimpleExpression(StringUtils.trim((String) selectContent.get(ss[0].trim())), StringUtils.trim(ss[1]), op)); logicQueue.add(StringUtils.trim(entry.getValue())); } } index = entry.getKey() + entry.getValue().length(); } // ?? String subExp = conditions.substring(index); for (String op : operations) { int i = subExp.indexOf(op); if (i > -1) { String[] ss = subExp.split(op); if (null == selectContent.get(ss[0].trim())) { throw new ExtractException("?????[" + this.conditions + "] " + ss[0]); } expressionQueue.add(new SimpleExpression(StringUtils.trim((String) selectContent.get(ss[0].trim())), StringUtils.trim(ss[1]), op)); } } boolean b; try { b = expressionQueue.poll().test(); while (!expressionQueue.isEmpty()) { b = cacl(b, logicQueue.poll(), expressionQueue.poll()); } return b; } catch (Exception e) { e.printStackTrace(); } return false; }