List of usage examples for java.util Queue remove
E remove();
From source file:com.facebook.config.AbstractExpandedConfJSONProvider.java
private JSONObject getExpandedJSONConfig() throws JSONException { Set<String> traversedFiles = new HashSet<>(); Queue<String> toTraverse = new LinkedList<>(); // Seed the graph traversal with the root node traversedFiles.add(root);// w w w. ja va2 s . co m toTraverse.add(root); // Policy: parent configs will override children (included) configs JSONObject expanded = new JSONObject(); while (!toTraverse.isEmpty()) { String current = toTraverse.remove(); JSONObject json = load(current); JSONObject conf = json.getJSONObject(CONF_KEY); Iterator<String> iter = conf.keys(); while (iter.hasNext()) { String key = iter.next(); // Current config will get to insert keys before its include files if (!expanded.has(key)) { expanded.put(key, conf.get(key)); } } // Check if the file itself has any included files if (json.has(INCLUDES_KEY)) { JSONArray includes = json.getJSONArray(INCLUDES_KEY); for (int idx = 0; idx < includes.length(); idx++) { String include = resolve(current, includes.getString(idx)); if (traversedFiles.contains(include)) { LOG.warn("Config file was included twice: " + include); } else { toTraverse.add(include); traversedFiles.add(include); } } } } return expanded; }
From source file:org.callimachusproject.client.HttpAuthenticator.java
private void generateAuthResponse(final HttpRequest request, final AuthState authState, final HttpContext context) throws HttpException, IOException { AuthScheme authScheme = authState.getAuthScheme(); Credentials creds = authState.getCredentials(); switch (authState.getState()) { case FAILURE: return;/*from www .j a v a 2s . c o m*/ case SUCCESS: ensureAuthScheme(authScheme); if (authScheme.isConnectionBased()) { return; } break; case CHALLENGED: final Queue<AuthOption> authOptions = authState.getAuthOptions(); if (authOptions != null) { while (!authOptions.isEmpty()) { final AuthOption authOption = authOptions.remove(); authScheme = authOption.getAuthScheme(); creds = authOption.getCredentials(); authState.update(authScheme, creds); if (this.log.isDebugEnabled()) { this.log.debug("Generating response to an authentication challenge using " + authScheme.getSchemeName() + " scheme"); } try { final Header header = doAuth(authScheme, creds, request, context); request.addHeader(header); break; } catch (final AuthenticationException ex) { if (this.log.isWarnEnabled()) { this.log.warn(authScheme + " authentication error: " + ex.getMessage()); } } } return; } else { ensureAuthScheme(authScheme); } } if (authScheme != null) { try { final Header header = doAuth(authScheme, creds, request, context); request.addHeader(header); } catch (final AuthenticationException ex) { if (this.log.isErrorEnabled()) { this.log.error(authScheme + " authentication error: " + ex.getMessage()); } } } }
From source file:com.shollmann.igcparser.ui.activity.IGCFilesActivity.java
private List<IGCFile> getListIGCFiles(File parentDir) { List<IGCFile> inFiles = new ArrayList<>(); Queue<File> files = new LinkedList<>(); try {//from ww w . j a va 2s.c o m files.addAll(Arrays.asList(parentDir.listFiles())); while (!files.isEmpty()) { File file = files.remove(); if (!Utilities.isUnlikelyIGCFolder(file)) { if (file != null && file.isDirectory()) { files.addAll(Arrays.asList(file.listFiles())); } else if (file != null && (file.getName().toLowerCase().endsWith(".igc"))) { inFiles.add(Parser.quickParse(Uri.parse(file.getAbsolutePath()))); } } } Collections.sort(inFiles, Comparators.compareByDate); } catch (Throwable t) { final String message = "Couldn't open files"; Crashlytics.log(message); Crashlytics.logException(t); Logger.logError(message); } return inFiles; }
From source file:org.opencb.commons.datastore.solr.FacetQueryParser.java
public String parseJson(String facetJson) throws IOException { Queue<Map<String, Object>> myQueue = new LinkedList<>(); Map jsonMap = new ObjectMapper().readValue(facetJson, Map.class); myQueue.add(jsonMap);/*w ww.jav a2 s . c o m*/ while (!myQueue.isEmpty()) { Map<String, Object> map = myQueue.remove(); for (Map.Entry<String, Object> entry : map.entrySet()) { if (entry.getValue() instanceof Map) { Map<String, Object> innerMap = (Map<String, Object>) entry.getValue(); // Analyse map to fill in content if (innerMap.containsKey("start")) { // Ranges innerMap.put("type", "range"); innerMap.put("gap", innerMap.get("step")); innerMap.remove("step"); } else if (innerMap.containsKey("q")) { // Query innerMap.put("type", "query"); } else if (innerMap.containsKey("field")) { // Categorical innerMap.put("type", "terms"); } // Check if there is a 'facet' field and insert all the items in the queue Object facet = innerMap.get("facet"); if (facet != null) { myQueue.add((Map<String, Object>) facet); } } } } return new ObjectMapper().writeValueAsString(jsonMap); }
From source file:com.marklogic.contentpump.DelimitedJSONReader.java
@SuppressWarnings("unchecked") protected String findUriInJSON(String line) throws JsonParseException, IOException { /* Breadth-First-Search */ Queue<Object> q = new LinkedList<Object>(); Object root = mapper.readValue(line.getBytes(), Object.class); if (root instanceof Map || root instanceof ArrayList) { q.add(root);/* w w w.j a v a 2s . c om*/ } else { throw new UnsupportedOperationException("invalid JSON"); } while (!q.isEmpty()) { Object current = q.remove(); if (current instanceof ArrayList) { for (Object element : (ArrayList<Object>) current) { if (element instanceof Map || element instanceof ArrayList) { q.add(element); } } } else { // instanceof Map // First Match Map<String, ?> map = (Map<String, ?>) current; if (map.containsKey(uriName)) { Object uriValue = map.get(uriName); if (uriValue instanceof Number || uriValue instanceof String) { return uriValue.toString(); } else { return null; } } // Add child elements to queue Iterator<?> it = map.entrySet().iterator(); while (it.hasNext()) { Entry<String, ?> KVpair = (Entry<String, ?>) it.next(); Object pairValue = KVpair.getValue(); if (pairValue instanceof Map || pairValue instanceof ArrayList) { q.add(pairValue); } } ; } } return null; }
From source file:com.datatorrent.stram.appdata.AppDataPushAgent.java
private JSONObject getPushData() { // assemble the json that contains the app stats and logical operator stats and counters JSONObject json = new JSONObject(); try {//from ww w . j ava 2s. c o m json.put("type", DATA); json.put("appId", dnmgr.getLogicalPlan().getValue(DAGContext.APPLICATION_ID)); json.put("appName", dnmgr.getLogicalPlan().getValue(DAGContext.APPLICATION_NAME)); json.put("appUser", appContext.getUser()); List<LogicalOperatorInfo> logicalOperatorInfoList = dnmgr.getLogicalOperatorInfoList(); JSONObject logicalOperators = new JSONObject(); for (LogicalOperatorInfo logicalOperator : logicalOperatorInfoList) { JSONObject logicalOperatorJson = extractFields(logicalOperator); JSONArray metricsList = new JSONArray(); Queue<Pair<Long, Map<String, Object>>> windowMetrics = dnmgr.getWindowMetrics(logicalOperator.name); if (windowMetrics != null) { while (!windowMetrics.isEmpty()) { Pair<Long, Map<String, Object>> metrics = windowMetrics.remove(); long windowId = metrics.first; // metric name, aggregated value Map<String, Object> aggregates = metrics.second; long now = System.currentTimeMillis(); if (!operatorsSchemaLastSentTime.containsKey(logicalOperator.name) || (metricsTransport.getSchemaResendInterval() > 0 && operatorsSchemaLastSentTime.get(logicalOperator.name) < now - metricsTransport.getSchemaResendInterval())) { try { pushMetricsSchema(dnmgr.getLogicalPlan().getOperatorMeta(logicalOperator.name), aggregates); operatorsSchemaLastSentTime.put(logicalOperator.name, now); } catch (IOException ex) { LOG.error("Cannot push metrics schema", ex); } } JSONObject metricsItem = new JSONObject(); metricsItem.put("_windowId", windowId); long windowToMillis = dnmgr.windowIdToMillis(windowId); LOG.debug("metric window {} time {}", windowId, windowToMillis); metricsItem.put("_time", windowToMillis); for (Map.Entry<String, Object> entry : aggregates.entrySet()) { String metricName = entry.getKey(); Object aggregateValue = entry.getValue(); metricsItem.put(metricName, aggregateValue); } metricsList.put(metricsItem); } } logicalOperatorJson.put("metrics", metricsList); logicalOperators.put(logicalOperator.name, logicalOperatorJson); } json.put("time", System.currentTimeMillis()); json.put("logicalOperators", logicalOperators); json.put("stats", extractFields(appContext.getStats())); } catch (JSONException ex) { throw new RuntimeException(ex); } return json; }
From source file:za.co.jumpingbean.alfresco.repo.EmailDocumentsAction.java
public void addAttachments(final Action action, final NodeRef nodeRef, MimeMessage mimeMessage) throws MessagingException { String text = (String) action.getParameterValue(PARAM_BODY); Boolean convertToPDF = (Boolean) action.getParameterValue(PARAM_CONVERT); MimeMultipart mail = new MimeMultipart("mixed"); MimeBodyPart bodyText = new MimeBodyPart(); bodyText.setText(text);//from w w w . j a v a 2s . c om mail.addBodyPart(bodyText); Queue<NodeRef> que = new LinkedList<>(); QName type = nodeService.getType(nodeRef); if (type.isMatch(ContentModel.TYPE_FOLDER) || type.isMatch(ContentModel.TYPE_CONTAINER)) { que.add(nodeRef); } else { addAttachement(nodeRef, mail, convertToPDF); } while (!que.isEmpty()) { NodeRef tmpNodeRef = que.remove(); List<ChildAssociationRef> list = nodeService.getChildAssocs(tmpNodeRef); for (ChildAssociationRef childRef : list) { NodeRef ref = childRef.getChildRef(); if (nodeService.getType(ref).isMatch(ContentModel.TYPE_CONTENT)) { addAttachement(ref, mail, convertToPDF); } else { que.add(ref); } } } mimeMessage.setContent(mail); }
From source file:com.microsoft.applicationinsights.internal.logger.FileLoggerOutputTest.java
@Test public void testChangeOfFiles() throws IOException { LogFileProxy mockProxy1 = Mockito.mock(LogFileProxy.class); Mockito.doReturn(true).when(mockProxy1).isFull(); LogFileProxy mockProxy2 = Mockito.mock(LogFileProxy.class); Mockito.doReturn(true).when(mockProxy2).isFull(); LogFileProxy mockProxy3 = Mockito.mock(LogFileProxy.class); final Queue<LogFileProxy> proxies = new LinkedList<LogFileProxy>(); proxies.add(mockProxy1);/* w w w. ja v a 2 s. co m*/ proxies.add(mockProxy2); proxies.add(mockProxy3); LogFileProxyFactory mockFactory = Mockito.mock(LogFileProxyFactory.class); Mockito.doAnswer(new Answer() { @Override public Object answer(InvocationOnMock invocation) throws Throwable { return proxies.remove(); } }).when(mockFactory).create((File) anyObject(), anyString(), anyInt()); File folder = createFolderForTest(); FileLoggerOutput tested = createFileLoggerOutput(); tested.setLogProxyFactory(mockFactory); try { tested.log("line1"); tested.log("line2"); tested.log("line3"); } finally { if (folder != null && folder.exists()) { FileUtils.deleteDirectory(folder); } } Mockito.verify(mockFactory, Mockito.times(3)).create((File) anyObject(), anyString(), anyInt()); Mockito.verify(mockProxy1, Mockito.times(1)).writeLine(anyString()); Mockito.verify(mockProxy1, Mockito.times(1)).writeLine("line1"); Mockito.verify(mockProxy1, Mockito.times(1)).close(); Mockito.verify(mockProxy1, Mockito.times(1)).delete(); Mockito.verify(mockProxy2, Mockito.times(1)).writeLine(anyString()); Mockito.verify(mockProxy2, Mockito.times(1)).writeLine("line2"); Mockito.verify(mockProxy2, Mockito.times(1)).close(); Mockito.verify(mockProxy2, Mockito.never()).delete(); Mockito.verify(mockProxy3, Mockito.times(1)).writeLine(anyString()); Mockito.verify(mockProxy3, Mockito.times(1)).writeLine("line3"); Mockito.verify(mockProxy3, Mockito.never()).close(); Mockito.verify(mockProxy3, Mockito.never()).delete(); }
From source file:com.jaspersoft.jasperserver.war.cascade.token.FilterCore.java
@Override public LinkedHashSet<String> resolveCascadingOrder(Map<String, Set<String>> masterDependencies) { Deque<String> orderedNames = new LinkedList<String>(); Queue<String> workingQueue = new LinkedList<String>(masterDependencies.keySet()); int maxIterations = (masterDependencies.size() * (masterDependencies.size() + 1)) / 2 + 1; while (workingQueue.size() > 0 && maxIterations-- > 0) { String currentName = workingQueue.remove(); Set<String> masterDependency = masterDependencies.get(currentName); if (masterDependency == null || masterDependency.isEmpty()) { orderedNames.addFirst(currentName); } else {/* w w w .ja va 2 s . c om*/ if (orderedNames.containsAll(masterDependency)) { orderedNames.addLast(currentName); } else { workingQueue.add(currentName); } } } if (maxIterations > 0) { return new LinkedHashSet<String>(orderedNames); } else { throw new JSException("Order cannot be resolved because of circular or non-existing dependencies."); } }