List of usage examples for java.util Queue peek
E peek();
From source file:org.kuali.student.enrollment.class1.krms.service.impl.FERuleEditorMaintainableImpl.java
public AgendaItemDefinition maintainAgendaItems(AgendaEditor agenda, String namePrefix, String nameSpace) { Queue<RuleDefinition.Builder> rules = new LinkedList<RuleDefinition.Builder>(); FEAgendaEditor feAgenda;// w w w . j a va 2 s . c om if (agenda instanceof FEAgendaEditor) { feAgenda = (FEAgendaEditor) agenda; for (RuleEditor rule : feAgenda.getRules()) { if (!rule.isDummy()) { rules.add(this.finRule(rule, namePrefix, nameSpace)); } } AgendaItemDefinition.Builder rootItemBuilder = manageFirstItem(agenda); AgendaItemDefinition.Builder itemToDelete = null; AgendaItemDefinition.Builder itemBuilder = rootItemBuilder; while (rules.peek() != null) { itemBuilder.setRule(rules.poll()); itemBuilder.setRuleId(itemBuilder.getRule().getId()); if (rules.peek() != null) { if (itemBuilder.getWhenFalse() == null) { itemBuilder.setWhenFalse(AgendaItemDefinition.Builder.create(null, agenda.getId())); } itemBuilder = itemBuilder.getWhenFalse(); } else { itemToDelete = itemBuilder.getWhenFalse(); itemBuilder.setWhenFalse(null); } } return manageAgendaItems(agenda, rootItemBuilder, itemToDelete); } return null; }
From source file:org.languagetool.rules.AbstractCompoundRule.java
@Override public RuleMatch[] match(AnalyzedSentence sentence) { List<RuleMatch> ruleMatches = new ArrayList<>(); AnalyzedTokenReadings[] tokens = getSentenceWithImmunization(sentence).getTokensWithoutWhitespace(); RuleMatch prevRuleMatch = null;/* ww w. j ava 2s . com*/ Queue<AnalyzedTokenReadings> prevTokens = new ArrayBlockingQueue<>(MAX_TERMS); for (int i = 0; i < tokens.length + MAX_TERMS - 1; i++) { AnalyzedTokenReadings token; // we need to extend the token list so we find matches at the end of the original list: if (i >= tokens.length) { token = new AnalyzedTokenReadings(new AnalyzedToken("", "", null), prevTokens.peek().getStartPos()); } else { token = tokens[i]; } if (i == 0) { addToQueue(token, prevTokens); continue; } else if (token.isImmunized()) { continue; } AnalyzedTokenReadings firstMatchToken = prevTokens.peek(); List<String> stringsToCheck = new ArrayList<>(); List<String> origStringsToCheck = new ArrayList<>(); // original upper/lowercase spelling Map<String, AnalyzedTokenReadings> stringToToken = getStringToTokenMap(prevTokens, stringsToCheck, origStringsToCheck); // iterate backwards over all potentially incorrect strings to make // sure we match longer strings first: for (int k = stringsToCheck.size() - 1; k >= 0; k--) { String stringToCheck = stringsToCheck.get(k); String origStringToCheck = origStringsToCheck.get(k); if (getCompoundRuleData().getIncorrectCompounds().contains(stringToCheck)) { AnalyzedTokenReadings atr = stringToToken.get(stringToCheck); String msg = null; List<String> replacement = new ArrayList<>(); if (!getCompoundRuleData().getNoDashSuggestion().contains(stringToCheck)) { replacement.add(origStringToCheck.replace(' ', '-')); msg = withHyphenMessage; } if (isNotAllUppercase(origStringToCheck) && !getCompoundRuleData().getOnlyDashSuggestion().contains(stringToCheck)) { replacement.add(mergeCompound(origStringToCheck, getCompoundRuleData().getNoDashLowerCaseSuggestion().stream() .anyMatch(s -> origStringsToCheck.contains(s)))); msg = withoutHyphenMessage; } String[] parts = stringToCheck.split(" "); if (parts.length > 0 && parts[0].length() == 1) { replacement.clear(); replacement.add(origStringToCheck.replace(' ', '-')); msg = withHyphenMessage; } else if (replacement.isEmpty() || replacement.size() == 2) { // isEmpty shouldn't happen msg = withOrWithoutHyphenMessage; } RuleMatch ruleMatch = new RuleMatch(this, sentence, firstMatchToken.getStartPos(), atr.getEndPos(), msg, shortDesc); ruleMatch.setSuggestedReplacements(replacement); // avoid duplicate matches: if (prevRuleMatch != null && prevRuleMatch.getFromPos() == ruleMatch.getFromPos()) { prevRuleMatch = ruleMatch; break; } prevRuleMatch = ruleMatch; ruleMatches.add(ruleMatch); break; } } addToQueue(token, prevTokens); } return toRuleMatchArray(ruleMatches); }
From source file:org.mule.util.queue.AbstractTransactionQueueManagerTestCase.java
@Test public void testPeek() throws Exception { TransactionalQueueManager mgr = createQueueManager(); try {/* www .j ava 2 s . c o m*/ mgr.start(); QueueSession s = mgr.getQueueSession(); Queue q = s.getQueue("queue1"); assertEquals("Queue size", 0, q.size()); Object o = q.peek(); assertEquals("Queue size", 0, q.size()); assertNull(o); q.put("String1"); assertEquals("Queue size", 1, q.size()); o = q.peek(); assertEquals("Queue size", 1, q.size()); assertEquals("Queue content", "String1", o); o = q.poll(1000); assertEquals("Queue size", 0, q.size()); assertEquals("Queue content", "String1", o); purgeQueue(q); } finally { mgr.stop(AbstractResourceManager.SHUTDOWN_MODE_NORMAL); } }
From source file:org.polymap.core.model2.engine.EntityRepositoryImpl.java
public EntityRepositoryImpl(final EntityRepositoryConfiguration config) { this.config = config; // init store getStore().init(new StoreRuntimeContextImpl()); // init infos log.info("Initialializing Composite types:"); Queue<Class<? extends Composite>> queue = new LinkedList(); queue.addAll(Arrays.asList(config.getEntities())); while (!queue.isEmpty()) { log.info(" Composite type: " + queue.peek()); CompositeInfoImpl info = new CompositeInfoImpl(queue.poll()); infos.put(info.getType(), info); // mixins queue.addAll(info.getMixins());// ww w. j ava2s . c om // Composite properties for (PropertyInfo propInfo : info.getProperties()) { if (Composite.class.isAssignableFrom(propInfo.getType())) { queue.offer(propInfo.getType()); } } } }
From source file:org.polymap.rhei.batik.layout.cp.BestFirstOptimizerTest.java
@Test public void unboundSolutionQueue() { Queue<TestScoredSolution> queue = SolutionQueueBuilder.create(-1); queue.add(new TestScoredSolution(PercentScore.NULL)); queue.add(new TestScoredSolution(new PercentScore(10))); assertEquals(2, queue.size());/*from ww w . j ava2s.c o m*/ // assertEquals( PercentScore.NULL, queue.getFirst().score ); assertEquals(new PercentScore(10), queue.peek().score); queue.add(new TestScoredSolution(new PercentScore(5))); assertEquals(3, queue.size()); assertEquals(new PercentScore(10), queue.peek().score); queue.add(new TestScoredSolution(new PercentScore(5))); // assertEquals( 3, queue.size() ); // assertEquals( new PercentScore( 5 ), queue.getFirst().score ); assertEquals(new PercentScore(10), queue.peek().score); queue.add(new TestScoredSolution(new PercentScore(20))); // assertEquals( 3, queue.size() ); // assertEquals( new PercentScore( 5 ), queue.getFirst().score ); assertEquals(new PercentScore(20), queue.peek().score); }
From source file:org.polymap.rhei.batik.layout.cp.BestFirstOptimizerTest.java
@Test public void boundSolutionQueue() { Queue<TestScoredSolution> queue = SolutionQueueBuilder.create(3); queue.add(new TestScoredSolution(PercentScore.NULL)); queue.add(new TestScoredSolution(new PercentScore(10))); assertEquals(2, queue.size());//ww w . j a va2 s . com // assertEquals( PercentScore.NULL, queue.getFirst().score ); assertEquals(new PercentScore(10), queue.peek().score); queue.add(new TestScoredSolution(new PercentScore(5))); assertEquals(3, queue.size()); assertEquals(new PercentScore(10), queue.peek().score); queue.add(new TestScoredSolution(new PercentScore(5))); assertEquals(3, queue.size()); // assertEquals( new PercentScore( 5 ), queue.getFirst().score ); assertEquals(new PercentScore(10), queue.peek().score); queue.add(new TestScoredSolution(new PercentScore(20))); assertEquals(3, queue.size()); // assertEquals( new PercentScore( 5 ), queue.getFirst().score ); assertEquals(new PercentScore(20), queue.peek().score); }
From source file:org.trend.hgraph.util.test.GenerateTestData.java
private void doGenerateTestData() throws IOException { HTable vertexTable = null;//from w w w. j a v a 2 s. com HTable edgeTable = null; Put put = null; long vIdx = 0; byte[] parentVertexKey = null; StopWatch timer = new StopWatch(); timer.start(); try { vertexTable = new HTable(this.getConf(), this.vertexTable); vertexTable.setAutoFlush(false); edgeTable = new HTable(this.getConf(), this.edgeTable); edgeTable.setAutoFlush(false); Queue<byte[]> parentVertexKeysQueue = new ArrayDeque<byte[]>(); int tmpEdgeCountPerVertex = 0; int edgeAcctCount = 0; Properties.Pair<Integer, Integer> pair = null; for (int rowCount = 0; rowCount < this.vertexCount; rowCount++) { put = generateVertexPut(); vertexTable.put(put); parentVertexKeysQueue.offer(put.getRow()); if (rowCount > 0) { vIdx = rowCount % tmpEdgeCountPerVertex; if (vIdx == 0) { parentVertexKey = parentVertexKeysQueue.poll(); edgeAcctCount++; if (this.isDistributionMode && !this.isFirstVertices[pair.key] && edgeAcctCount == tmpEdgeCountPerVertex) { this.addFirstVertex(Bytes.toString(parentVertexKey)); this.isFirstVertices[pair.key] = true; } pair = this.determineEdgeCountPerVertex(rowCount); tmpEdgeCountPerVertex = pair.value; edgeAcctCount = 0; } else if (vIdx > 0) { edgeAcctCount++; parentVertexKey = parentVertexKeysQueue.peek(); } else { throw new RuntimeException("vIdex:" + vIdx + " shall always not small than 0"); } put = generateEdgePut(rowCount, parentVertexKey, put.getRow()); edgeTable.put(put); } else { pair = this.determineEdgeCountPerVertex(rowCount); tmpEdgeCountPerVertex = pair.value; if (!this.isDistributionMode) this.addFirstVertex(Bytes.toString(put.getRow())); } } vertexTable.flushCommits(); edgeTable.flushCommits(); } catch (IOException e) { LOG.error("doGenerateTestData failed", e); throw e; } finally { if (null != vertexTable) vertexTable.close(); if (null != edgeTable) edgeTable.close(); timer.stop(); LOG.info("Time elapsed:" + timer.toString() + ", " + timer.getTime() + " for pushing " + this.vertexCount + " vertices test data to HBase"); LOG.info("first vertices id:" + this.firstVertices); } }
From source file:org.unitime.timetable.backup.SessionBackup.java
@Override public void backup(OutputStream out, Progress progress, Long sessionId) throws IOException { iOut = CodedOutputStream.newInstance(out); iProgress = progress;/*from w ww .j av a 2s .c om*/ iSessionId = sessionId; iHibSession = new _RootDAO().createNewSession(); iHibSession.setCacheMode(CacheMode.IGNORE); iHibSessionFactory = iHibSession.getSessionFactory(); try { iProgress.setStatus("Exporting Session"); iProgress.setPhase("Loading Model", 3); TreeSet<ClassMetadata> allMeta = new TreeSet<ClassMetadata>(new Comparator<ClassMetadata>() { @Override public int compare(ClassMetadata m1, ClassMetadata m2) { return m1.getEntityName().compareTo(m2.getEntityName()); } }); allMeta.addAll(iHibSessionFactory.getAllClassMetadata().values()); iProgress.incProgress(); Queue<QueueItem> queue = new LinkedList<QueueItem>(); queue.add(new QueueItem(iHibSessionFactory.getClassMetadata(Session.class), null, "uniqueId", Relation.None)); Set<String> avoid = new HashSet<String>(); // avoid following relations avoid.add(TimetableManager.class.getName() + ".departments"); avoid.add(TimetableManager.class.getName() + ".solverGroups"); avoid.add(DistributionType.class.getName() + ".departments"); avoid.add(LastLikeCourseDemand.class.getName() + ".student"); avoid.add(Student.class.getName() + ".lastLikeCourseDemands"); Set<String> disallowedNotNullRelations = new HashSet<String>(); disallowedNotNullRelations.add(Assignment.class.getName() + ".datePattern"); disallowedNotNullRelations.add(Assignment.class.getName() + ".timePattern"); disallowedNotNullRelations.add(LastLikeCourseDemand.class.getName() + ".student"); disallowedNotNullRelations.add(OnlineSectioningLog.class.getName() + ".session"); Map<String, List<QueueItem>> data = new HashMap<String, List<QueueItem>>(); List<QueueItem> sessions = new ArrayList<QueueItem>(); sessions.add(queue.peek()); data.put(queue.peek().name(), sessions); QueueItem item = null; while ((item = queue.poll()) != null) { if (item.size() == 0) continue; for (ClassMetadata meta : allMeta) { if (meta.hasSubclasses()) continue; for (int i = 0; i < meta.getPropertyNames().length; i++) { String property = meta.getPropertyNames()[i]; if (disallowedNotNullRelations.contains(meta.getEntityName() + "." + property) || meta.getPropertyNullability()[i]) continue; Type type = meta.getPropertyTypes()[i]; if (type instanceof EntityType && type.getReturnedClass().equals(item.clazz())) { QueueItem qi = new QueueItem(meta, item, property, Relation.Parent); if (!data.containsKey(qi.name())) { List<QueueItem> items = new ArrayList<QueueItem>(); data.put(qi.name(), items); queue.add(qi); items.add(qi); if (qi.size() > 0) iProgress.info("Parent: " + qi); } } } } } iProgress.incProgress(); for (List<QueueItem> list : data.values()) queue.addAll(list); // The following part is needed to ensure that instructor distribution preferences are saved including their distribution types List<QueueItem> distributions = new ArrayList<QueueItem>(); for (QueueItem instructor : data.get(DepartmentalInstructor.class.getName())) { QueueItem qi = new QueueItem(iHibSessionFactory.getClassMetadata(DistributionPref.class), instructor, "owner", Relation.Parent); distributions.add(qi); queue.add(qi); if (qi.size() > 0) iProgress.info("Extra: " + qi); } data.put(DistributionPref.class.getName(), distributions); while ((item = queue.poll()) != null) { if (item.size() == 0) continue; for (int i = 0; i < item.meta().getPropertyNames().length; i++) { String property = item.meta().getPropertyNames()[i]; Type type = item.meta().getPropertyTypes()[i]; if (type instanceof EntityType) { if (avoid.contains(item.name() + "." + property)) continue; ClassMetadata meta = iHibSessionFactory.getClassMetadata(type.getReturnedClass()); if (item.contains(meta.getEntityName())) continue; QueueItem qi = new QueueItem(meta, item, property, Relation.One); List<QueueItem> items = data.get(qi.name()); if (items == null) { items = new ArrayList<QueueItem>(); data.put(qi.name(), items); } queue.add(qi); items.add(qi); if (qi.size() > 0) iProgress.info("One: " + qi); } if (type instanceof CollectionType) { if (avoid.contains(item.name() + "." + property)) continue; ClassMetadata meta = iHibSessionFactory.getClassMetadata(((CollectionType) type) .getElementType((SessionFactoryImplementor) iHibSessionFactory).getReturnedClass()); if (meta == null || item.contains(meta.getEntityName())) continue; QueueItem qi = new QueueItem(meta, item, property, Relation.Many); List<QueueItem> items = data.get(qi.name()); if (items == null) { items = new ArrayList<QueueItem>(); data.put(qi.name(), items); } queue.add(qi); items.add(qi); if (qi.size() > 0) iProgress.info("Many: " + qi); } } } iProgress.incProgress(); Map<String, Set<Serializable>> allExportedIds = new HashMap<String, Set<Serializable>>(); for (String name : new TreeSet<String>(data.keySet())) { List<QueueItem> list = data.get(name); Map<String, TableData.Table.Builder> tables = new HashMap<String, TableData.Table.Builder>(); for (QueueItem current : list) { if (current.size() == 0) continue; iProgress.info("Loading " + current); List<Object> objects = current.list(); if (objects == null || objects.isEmpty()) continue; iProgress.setPhase(current.abbv() + " [" + objects.size() + "]", objects.size()); objects: for (Object object : objects) { iProgress.incProgress(); // Get meta data (check for sub-classes) ClassMetadata meta = iHibSessionFactory.getClassMetadata(object.getClass()); if (meta == null) meta = current.meta(); if (meta.hasSubclasses()) { for (Iterator i = iHibSessionFactory.getAllClassMetadata().entrySet().iterator(); i .hasNext();) { Map.Entry entry = (Map.Entry) i.next(); ClassMetadata classMetadata = (ClassMetadata) entry.getValue(); if (classMetadata.getMappedClass().isInstance(object) && !classMetadata.hasSubclasses()) { meta = classMetadata; break; } } } // Get unique identifier Serializable id = meta.getIdentifier(object, (SessionImplementor) iHibSession); // Check if already exported Set<Serializable> exportedIds = allExportedIds.get(meta.getEntityName()); if (exportedIds == null) { exportedIds = new HashSet<Serializable>(); allExportedIds.put(meta.getEntityName(), exportedIds); } if (!exportedIds.add(id)) continue; // Check relation to an academic session (if exists) for (String property : meta.getPropertyNames()) { Type type = meta.getPropertyType(property); if (type instanceof EntityType && type.getReturnedClass().equals(Session.class)) { Session s = (Session) meta.getPropertyValue(object, property); if (s != null && !s.getUniqueId().equals(iSessionId)) { iProgress.warn(meta.getEntityName() .substring(meta.getEntityName().lastIndexOf('.') + 1) + "@" + id + " belongs to a different academic session (" + s + ")"); continue objects; // wrong session } } } // Get appropriate table TableData.Table.Builder table = tables.get(meta.getEntityName()); if (table == null) { table = TableData.Table.newBuilder(); tables.put(meta.getEntityName(), table); table.setName(meta.getEntityName()); } // Export object TableData.Record.Builder record = TableData.Record.newBuilder(); record.setId(id.toString()); for (String property : meta.getPropertyNames()) { Type type = meta.getPropertyType(property); Object value = meta.getPropertyValue(object, property); if (value == null) continue; TableData.Element.Builder element = TableData.Element.newBuilder(); element.setName(property); if (type instanceof PrimitiveType) { element.addValue(((PrimitiveType) type).toString(value)); } else if (type instanceof StringType) { element.addValue(((StringType) type).toString((String) value)); } else if (type instanceof BinaryType) { element.addValueBytes(ByteString.copyFrom((byte[]) value)); } else if (type instanceof TimestampType) { element.addValue(((TimestampType) type).toString((Date) value)); } else if (type instanceof DateType) { element.addValue(((DateType) type).toString((Date) value)); } else if (type instanceof EntityType) { List<Object> ids = current.relation(property, id, false); if (ids != null) for (Object i : ids) element.addValue(i.toString()); iHibSession.evict(value); } else if (type instanceof CustomType && value instanceof Document) { if (object instanceof CurriculumClassification && property.equals("students")) continue; StringWriter w = new StringWriter(); XMLWriter x = new XMLWriter(w, OutputFormat.createCompactFormat()); x.write((Document) value); x.flush(); x.close(); element.addValue(w.toString()); } else if (type instanceof CollectionType) { List<Object> ids = current.relation(property, id, false); if (ids != null) for (Object i : ids) element.addValue(i.toString()); } else if (type instanceof EmbeddedComponentType && property.equalsIgnoreCase("uniqueCourseNbr")) { continue; } else { iProgress.warn("Unknown data type: " + type + " (property " + meta.getEntityName() + "." + property + ", class " + value.getClass() + ")"); continue; } record.addElement(element.build()); } table.addRecord(record.build()); iHibSession.evict(object); } current.clearCache(); } for (TableData.Table.Builder table : tables.values()) { add(table.build()); } } /* // Skip ConstraintInfo if (!iData.containsKey(ConstraintInfo.class.getName())) iData.put(ConstraintInfo.class.getName(), new QueueItem(iHibSessionFactory.getClassMetadata(ConstraintInfo.class), null, null, Relation.Empty)); for (String name: items) export(iData.get(name)); while (true) { List<Object> objects = new ArrayList<Object>(); ClassMetadata meta = null; for (Entity e: iObjects) { if (e.exported()) continue; if (objects.isEmpty() || meta.getEntityName().equals(e.name())) { meta = e.meta(); objects.add(e.object()); e.notifyExported(); } } if (objects.isEmpty()) break; export(meta, objects, null); } */ iProgress.setStatus("All done."); } finally { iHibSession.close(); } }
From source file:org.wso2.carbon.dataservices.core.DBUtils.java
/** * This method is used to embed syntaxes associated with UDT attribute notations to * a queue of string tokens extracted from a UDT parameter. * * @param tokens Queue of string tokens * @param syntaxQueue Syntax embedded tokens * @param isIndex Flag to determine whether a particular string token is an inidex * or a column name//from w w w . j a v a 2 s . c om */ public static void getSyntaxEmbeddedQueue(Queue<String> tokens, Queue<String> syntaxQueue, boolean isIndex) { if (!tokens.isEmpty()) { if ("[".equals(tokens.peek())) { isIndex = true; tokens.poll(); syntaxQueue.add("INEDX_START"); syntaxQueue.add(tokens.poll()); } else if ("]".equals(tokens.peek())) { isIndex = false; tokens.poll(); syntaxQueue.add("INDEX_END"); } else if (".".equals(tokens.peek())) { tokens.poll(); syntaxQueue.add("DOT"); syntaxQueue.add("COLUMN"); syntaxQueue.add(tokens.poll()); } else { if (isIndex) { syntaxQueue.add("INDEX"); syntaxQueue.add(tokens.poll()); } else { syntaxQueue.add("COLUMN"); syntaxQueue.add(tokens.poll()); } } getSyntaxEmbeddedQueue(tokens, syntaxQueue, isIndex); } }
From source file:pt.webdetails.cda.cache.scheduler.CacheActivator.java
public boolean execute() throws Exception { ClassLoader contextCL = Thread.currentThread().getContextClassLoader(); Session session = PluginHibernateUtil.getSession(); Date rightNow = new Date(); Transaction transaction = null;/*from w w w . j ava 2s .c om*/ try { Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader()); transaction = session.beginTransaction(); /* If there's any work at all to be done, the first thing we do is proactively * reschedule this action to one hour from now, to ensure that, if for some * reason the queue fails to reschedule after excuting all due queries, we'll * still recover at some point in the future. */ Queue<CachedQuery> queue = CacheScheduleManager.getInstance().getQueue(); if (queue.peek().getNextExecution().before(rightNow)) { Date anHourFromNow = new Date(rightNow.getTime() + ONE_HOUR); reschedule(anHourFromNow); } else { logger.info("No work to be done"); } while (queue.peek().getNextExecution().before(rightNow)) { processQueries(session, queue); rightNow = new Date(); } reschedule(queue); session.flush(); session.getTransaction().commit(); return true; } catch (Exception e) { if (transaction != null) { transaction.rollback(); } logger.error(e); return false; } finally { session.close(); Thread.currentThread().setContextClassLoader(contextCL); } }