List of usage examples for java.util ArrayDeque ArrayDeque
public ArrayDeque(Collection<? extends E> c)
From source file:org.carrot2.source.microsoft.v5.Bing5NewsDocumentSource.java
@Override protected void handleResponse(BingResponse response, SearchEngineResponse ser) { NewsResponse newsResponse = (NewsResponse) response; ser.metadata.put(SearchEngineResponse.RESULTS_TOTAL_KEY, newsResponse.totalEstimatedMatches); if (newsResponse.value != null) { ArrayDeque<NewsResponse.NewsArticle> articles = new ArrayDeque<>(newsResponse.value); while (!articles.isEmpty()) { NewsResponse.NewsArticle r = articles.removeFirst(); if (r.clusteredArticles != null) { articles.addAll(r.clusteredArticles); }// w w w .j a v a2s .c o m Document doc = new Document(r.name, r.description, r.url); if (r.image != null && r.image.thumbnail != null) { doc.setField(Document.THUMBNAIL_URL, r.image.thumbnail.contentUrl); } if (r.provider != null) { ArrayList<String> sources = new ArrayList<>(); for (NewsResponse.NewsArticle.Organization o : r.provider) { sources.add(o.name); } doc.setField(Document.SOURCES, sources); } ser.results.add(doc); } } }
From source file:org.llorllale.youtrack.api.mock.http.MockHttpClient.java
/** * Primary ctor./*w w w.ja v a2 s . c o m*/ * * <p>Each call to * {@link #execute(org.apache.http.client.methods.HttpUriRequest)} will return * one of the {@code intermediateResponses}, in the encountered order, after * which {@code finalResponse} will be consistently returned on all subsequent * calls. This is useful for testing the <em>streaming</em> functionalities. * @param finalResponse the response to return after all intermediate * responses have been exhausted * @param intermediateResponses the mock {@link HttpResponse responses} to * return before the {@code finalResponse} * @since 0.4.0 */ public MockHttpClient(CloseableHttpResponse finalResponse, CloseableHttpResponse... intermediateResponses) { this.finalResponse = finalResponse; this.intermediateResponses = new ArrayDeque<>(Arrays.asList(intermediateResponses)); }
From source file:com.espertech.esper.event.EventTypeUtility.java
public static EventPropertyDescriptor getNestablePropertyDescriptor(EventType target, String propertyName) { EventPropertyDescriptor descriptor = target.getPropertyDescriptor(propertyName); if (descriptor != null) { return descriptor; }/* w ww . ja v a 2 s . co m*/ int index = ASTFilterSpecHelper.unescapedIndexOfDot(propertyName); if (index == -1) { return null; } // parse, can be an nested property Property property = PropertyParser.parse(propertyName, false); if (property instanceof PropertyBase) { return target.getPropertyDescriptor(((PropertyBase) property).getPropertyNameAtomic()); } if (!(property instanceof NestedProperty)) { return null; } NestedProperty nested = (NestedProperty) property; Deque<Property> properties = new ArrayDeque<Property>(nested.getProperties()); return getNestablePropertyDescriptor(target, properties); }
From source file:com.plotsquared.iserver.core.Worker.java
static void setup(final int n) { Assert.isPositive(n);//from www.j a va 2 s . c o m availableWorkers = new ArrayDeque<>(n); for (int i = 0; i < n; i++) { availableWorkers.add(new Worker()); } }
From source file:com.espertech.esper.filter.IndexTreeBuilder.java
/** * Add a filter callback according to the filter specification to the top node returning * information to be used to remove the filter callback. * @param filterValueSet is the filter definition * @param filterCallback is the callback to be added * @param topNode node to be added to any subnode beneath it * @return an encapsulation of information need to allow for safe removal of the filter tree. *///from ww w.java2 s .co m public final IndexTreePath add(FilterValueSet filterValueSet, FilterHandle filterCallback, FilterHandleSetNode topNode) { this.remainingParameters = new ArrayDeque<FilterValueSetParam>( Arrays.asList(filterValueSet.getParameters())); this.filterCallback = filterCallback; this.currentThreadId = Thread.currentThread().getId(); if ((ExecutionPathDebugLog.isDebugEnabled) && (log.isDebugEnabled())) { log.debug(".add (" + currentThreadId + ") Adding filter callback, " + " topNode=" + topNode + " filterCallback=" + this.filterCallback); } IndexTreePath treePathInfo = new IndexTreePath(); addToNode(topNode, treePathInfo); this.remainingParameters = null; this.filterCallback = null; return treePathInfo; }
From source file:edu.byu.nlp.al.EmpiricalAnnotationInstanceManager.java
@VisibleForTesting EmpiricalAnnotationInstanceManager(Iterable<FlatInstance<D, L>> instances, EmpiricalAnnotations<D, L> annotations, AnnotationRecorder<D, L> annotationRecorder, int maxNumAnnotations, int maxNumMeasurements, boolean prioritizeLabelProportions, RandomGenerator rnd) {//from ww w . j a v a 2s.c o m super(annotationRecorder); List<FlatInstance<D, L>> sortedAnnotations = Lists.newArrayList(); for (FlatInstance<D, L> inst : instances) { // add each annotation associated with this item to the queue sortedAnnotations.addAll(annotations.getAnnotationsFor(inst.getSource(), inst.getData()).values()); } // sort the annotation queue based on annotation order Datasets.sortAnnotationsInPlace(sortedAnnotations); // interleave measurements and annotations Deque<FlatInstance<D, L>> measurementDeque = Deques.randomizedDeque(annotations.getMeasurements(), rnd); prioritizeMeasurements(measurementDeque, prioritizeLabelProportions); Deque<FlatInstance<D, L>> annotationDeque = new ArrayDeque<FlatInstance<D, L>>(sortedAnnotations); queue = Lists.newLinkedList(); // better queueing behavior // add measurements int numMeasurements = 0; while (measurementDeque.size() > 0 && numMeasurements < maxNumMeasurements) { numMeasurements += 1; queue.add(measurementDeque.pop()); } // add annotations int numAnnotations = 0; while (annotationDeque.size() > 0 && numAnnotations < maxNumAnnotations) { numAnnotations += 1; queue.add(annotationDeque.pop()); } }
From source file:com.espertech.esper.epl.expression.subquery.ExprSubselectRowEvalStrategyUnfilteredSelected.java
public Collection<EventBean> evaluateGetCollEvents(EventBean[] eventsPerStream, boolean newData, Collection<EventBean> matchingEvents, ExprEvaluatorContext context, ExprSubselectRowNode parent) { if (matchingEvents.size() == 0) { return Collections.emptyList(); }/*from w ww .j a v a 2 s . c o m*/ // when selecting a single property in the select clause that provides a fragment if (parent.subselectMultirowType == null) { Collection<EventBean> events = new ArrayDeque<EventBean>(matchingEvents.size()); ExprIdentNodeEvaluator eval = (ExprIdentNodeEvaluator) parent.selectClauseEvaluator[0]; EventPropertyGetter getter = eval.getGetter(); for (EventBean subselectEvent : matchingEvents) { Object fragment = getter.getFragment(subselectEvent); if (fragment == null) { continue; } events.add((EventBean) fragment); } return events; } // when selecting a combined output row that contains multiple fields Collection<EventBean> events = new ArrayDeque<EventBean>(matchingEvents.size()); EventBean[] eventsPerStreamEval = EventBeanUtility.allocatePerStreamShift(eventsPerStream); for (EventBean subselectEvent : matchingEvents) { eventsPerStreamEval[0] = subselectEvent; Map<String, Object> row = parent.evaluateRow(eventsPerStreamEval, true, context); EventBean event = parent.subselectMultirowType.getEventAdapterService().adapterForTypedMap(row, parent.subselectMultirowType.getEventType()); events.add(event); } return events; }
From source file:de.l3s.archivepig.enrich.Response.java
@Override public void enrich(Tuple data, Tuple enrichment, Object... params) throws Exception { long size = get(data, "_record.size"); long offset = get(data, "_record.offset"); String filename = get(data, "_record.filename"); String cdxFile = get(data, "_record.cdxFile"); if (size < 0 || offset < 0) return;/*from ww w .ja v a2s . c o m*/ FileSystem fs = FileSystem.get(UDFContext.getUDFContext().getJobConf()); Deque<String> cdxSegments = new ArrayDeque<String>(Lists.reverse(list(cdxFile.split("\\/")))); cdxSegments.pop(); // remove filename String pathExtension = ""; Path path = new Path(ArchiveLoader.dataPath(), pathExtension + filename); while (!fs.exists(path)) { if (cdxSegments.isEmpty()) { enrichment.append(new HashMap<String, String>()); enrichment.append(new HashMap<String, String>()); enrichment.append(null); return; } String cdxSegment = cdxSegments.pop(); if (cdxSegment.endsWith(".har")) cdxSegment = cdxSegment.substring(0, cdxSegment.length() - 4); pathExtension = cdxSegment + "/" + pathExtension; path = new Path(ArchiveLoader.dataPath(), pathExtension + filename); } FSDataInputStream fsin = fs.open(path); fsin.seek(offset); InputStream in = fsin; ByteArrayOutputStream recordOutput = new ByteArrayOutputStream(); try { try (BoundedInputStream boundedIn = new BoundedInputStream(in, size); ArchiveReader reader = ArchiveReaderFactory.get(filename, boundedIn, false);) { ArchiveRecord record; record = reader.get(); ArchiveRecordHeader header = record.getHeader(); enrichment.append(header.getHeaderFields()); record.dump(recordOutput); } catch (Exception e) { return; } finally { in.close(); recordOutput.close(); } } catch (Exception e) { return; } try (InputStream httpResponse = new ByteArrayInputStream(recordOutput.toByteArray())) { // ALL COMMENTS ARE NEW VERSION VARIANTS FOR HTTP-CORE 4.3, currently in use 4.2.5 // SessionInputBufferImpl sessionInputBuffer = new SessionInputBufferImpl(new HttpTransportMetricsImpl(), 2048); // sessionInputBuffer.bind(httpResponse); // DefaultHttpResponseParserFactory responseParserFactory = new DefaultHttpResponseParserFactory(); // HttpMessageParser<HttpResponse> responseParser = responseParserFactory.create(sessionInputBuffer, MessageConstraints.DEFAULT); // HttpResponse response = responseParser.parse(); // Header[] httpHeaders = response.getAllHeaders(); HttpResponseParser parser = new HttpResponseParser(); HttpResponse response = parser.parse(httpResponse); HttpHeaders httpHeaders = response.getHeaders(); Map<String, String> httpHeadersMap = new HashMap<String, String>(); for (HttpHeader httpHeader : httpHeaders) { httpHeadersMap.put(httpHeader.getName(), httpHeader.getValue()); } enrichment.append(httpHeadersMap); // byte[] payload = new byte[sessionInputBuffer.length()]; // sessionInputBuffer.read(payload); byte[] payload = IOUtils.toByteArray(response); enrichment.append(payload); // HttpEntity entity = new ByteArrayEntity(payload); // output.append(entity == null ? null : EntityUtils.toString(entity)); } catch (Exception ignored) { } }
From source file:org.silverpeas.tools.file.regexpr.RegExprMatcher.java
private void analyse(File startFile) throws Exception { Queue<File> fileQueue = new ArrayDeque<>(100000); fileQueue.add(startFile);/* ww w . j a va 2 s . c o m*/ while (!fileQueue.isEmpty()) { File file = fileQueue.poll(); if (file.isFile()) { if (config.getFileFilter().accept(file)) { boolean fileMatched = false; for (PatternConfig patternConfig : config.getPatternConfigs()) { boolean found = patternConfig.pattern.matcher(FileUtils.readFileToString(file)).find(); fileMatched = (found && patternConfig.mustMatch) || (!found && !patternConfig.mustMatch); if (!fileMatched) { break; } } if (fileMatched) { System.out.println(file.getPath()); nbMatchedFiles++; } nbAnalysedFiles++; } } else if (file.isDirectory() && config.getDirFilter().accept(file)) { for (File subFile : file.listFiles()) { fileQueue.add(subFile); } } else { int i = 0; } } }
From source file:de.tudarmstadt.ukp.experiments.argumentation.comments.pipeline.DebateArgumentReader.java
protected void loadArgumentsFromNextFile() throws IOException { File file = files.poll();//from ww w . j a va 2s.c o m Debate debate = DebateSerializer.deserializeFromXML(FileUtils.readFileToString(file, "utf-8")); currentArguments = new ArrayDeque<>(debate.getArgumentList()); // and set current debate currentDebate = debate; }