List of usage examples for java.util LinkedList addLast
public void addLast(E e)
From source file:com.commander4j.db.JDBDespatch.java
public LinkedList<String> getAssignedSSCCs(String despatchNo) { PreparedStatement stmt = null; LinkedList<String> result = new LinkedList<String>(); ResultSet rs;//from w w w . ja v a 2s. c o m result.clear(); try { stmt = Common.hostList.getHost(getHostID()).getConnection(getSessionID()).prepareStatement( Common.hostList.getHost(getHostID()).getSqlstatements().getSQL("JDBDespatch.getAssignedSSCCs")); stmt.setFetchSize(50); stmt.setString(1, despatchNo); rs = stmt.executeQuery(); while (rs.next()) { result.addLast(rs.getString("SSCC")); } rs.close(); stmt.close(); } catch (SQLException e) { setErrorMessage(e.getMessage()); } return result; }
From source file:com.google.cloud.dns.testing.LocalDnsHelper.java
/** * Lists changes. Next page token is the ID of the last change listed. *///w w w .ja v a2s.c o m @VisibleForTesting Response listChanges(String projectId, String zoneName, String query) { Map<String, Object> options = OptionParsers.parseListChangesOptions(query); Response response = checkListOptions(options); if (response != null) { return response; } ZoneContainer zoneContainer = findZone(projectId, zoneName); if (zoneContainer == null) { return Error.NOT_FOUND.response( String.format("The 'parameters.managedZone' resource named '%s' does not exist", zoneName)); } // take a sorted snapshot of the current change list NavigableMap<Integer, Change> changes = new TreeMap<>(); for (Change c : zoneContainer.changes()) { if (c.getId() != null) { changes.put(Integer.valueOf(c.getId()), c); } } String[] fields = (String[]) options.get("fields"); String sortOrder = (String) options.get("sortOrder"); String pageToken = (String) options.get("pageToken"); Integer maxResults = options.get("maxResults") == null ? null : Integer.valueOf((String) options.get("maxResults")); // as the only supported field is change sequence, we are not reading sortBy NavigableSet<Integer> keys; if ("descending".equals(sortOrder)) { keys = changes.descendingKeySet(); } else { keys = changes.navigableKeySet(); } Integer from = null; try { from = Integer.valueOf(pageToken); } catch (NumberFormatException ex) { // ignore page token } keys = from != null ? keys.tailSet(from, false) : keys; NavigableMap<Integer, Change> fragment = from != null && changes.containsKey(from) ? changes.tailMap(from, false) : changes; boolean sizeReached = false; boolean hasMorePages = false; LinkedList<String> serializedResults = new LinkedList<>(); String lastChangeId = null; for (Integer key : keys) { Change change = fragment.get(key); if (sizeReached) { // we do not add this, just note that there would be more and there should be a token hasMorePages = true; break; } else { lastChangeId = change.getId(); try { serializedResults.addLast(jsonFactory.toString(OptionParsers.extractFields(change, fields))); } catch (IOException e) { return Error.INTERNAL_ERROR.response( String.format("Error when serializing change %s in managed zone %s in project %s", lastChangeId, zoneName, projectId)); } } sizeReached = maxResults != null && maxResults.equals(serializedResults.size()); } boolean includePageToken = hasMorePages && (fields == null || Arrays.asList(fields).contains("nextPageToken")); return toListResponse(serializedResults, "changes", lastChangeId, includePageToken); }
From source file:net.timewalker.ffmq4.storage.data.impl.journal.JournalRecovery.java
private int recoverFromJournalFile(File journalFile) throws JournalException { log.debug("[" + baseName + "] Processing " + journalFile.getAbsolutePath()); DataInputStream in;/*ww w . ja v a2 s . c o m*/ try { // Create a buffered data input stream from file in = new DataInputStream(new BufferedInputStream(new FileInputStream(journalFile))); } catch (IOException e) { throw new JournalException("Cannot open journal file : " + journalFile.getAbsolutePath(), e); } int replayedOperations = 0; int replayedTransactions = 0; long currentTransactionId = -1; int newBlockCount = -1; LinkedList<AbstractJournalOperation> transactionQueue = new LinkedList<>(); try { AbstractJournalOperation op; while ((op = readJournalOperation(in)) != null) { // Check transaction id if (currentTransactionId == -1) currentTransactionId = op.getTransactionId(); else if (currentTransactionId != op.getTransactionId()) throw new IllegalStateException("Transaction id inconsistency : " + currentTransactionId + " -> " + op.getTransactionId()); if (op instanceof CommitOperation) { // Check transaction size int opCount = ((CommitOperation) op).getOperationsCount(); if (transactionQueue.size() != opCount) { throw new IllegalStateException("Transaction size mismatch (expected " + opCount + ", got " + transactionQueue.size() + ")"); } else { // Everything looks fine, proceed ... log.trace("[" + baseName + "] Replaying transaction #" + currentTransactionId + " (" + transactionQueue.size() + " operation(s))"); replayedOperations += transactionQueue.size(); replayedTransactions++; newBlockCount = applyOperations(transactionQueue); currentTransactionId = -1; } } else transactionQueue.addLast(op); } if (transactionQueue.size() > 0) { op = transactionQueue.removeFirst(); log.warn("[" + baseName + "] Dropping incomplete transaction : #" + op.getTransactionId()); } syncStore(); log.warn("[" + baseName + "] Recovery complete. (Replayed " + replayedTransactions + " transaction(s) and " + replayedOperations + " operation(s))"); } finally { try { in.close(); } catch (IOException e) { throw new JournalException("Cannot close journal file : " + journalFile.getAbsolutePath(), e); } } return newBlockCount; }
From source file:org.nuxeo.ecm.core.storage.dbs.DBSSession.java
protected String copyRecurse(String sourceId, String parentId, LinkedList<String> ancestorIds, String name) { String copyId = copy(sourceId, parentId, ancestorIds, name); ancestorIds.addLast(copyId); for (String childId : getChildrenIds(sourceId)) { copyRecurse(childId, copyId, ancestorIds, null); }//from w ww. j a v a2 s .c om ancestorIds.removeLast(); return copyId; }
From source file:com.commander4j.db.JDBDespatch.java
public LinkedList<JDBEquipmentList> getEquipment() { LinkedList<JDBEquipmentList> result = new LinkedList<JDBEquipmentList>(); PreparedStatement stmt = null; ResultSet rs;/* w ww. ja va 2 s .c o m*/ String temp = ""; try { temp = Common.hostList.getHost(getHostID()).getSqlstatements().getSQL("JDBDespatch.equipment"); stmt = Common.hostList.getHost(getHostID()).getConnection(getSessionID()).prepareStatement(temp); stmt.setFetchSize(25); stmt.setString(1, getDespatchNo()); rs = stmt.executeQuery(); result.clear(); while (rs.next()) { result.addLast(new JDBEquipmentList(rs.getString("equipment_type"), rs.getInt("total"))); } rs.close(); stmt.close(); } catch (SQLException e) { setErrorMessage(e.getMessage()); } return result; }
From source file:org.gcaldaemon.core.Synchronizer.java
final byte[] syncronizeNow(CachedCalendar calendar) throws Exception { log.debug("Starting Google Calendar synchronizer..."); // Create processing variables boolean remoteEventChanged; Event entry;//from w w w.ja va2 s. co m String uid, remoteUID; long remoteDate; Long storedDate; VEvent event; int i; // Load offline history loadEventRegistry(); // Get historical parameters HashMap uids = (HashMap) eventRegistry.get(calendar.url); if (uids == null) { uids = new HashMap(); } // Processed unique IDs HashSet processedUids = new HashSet(); // Parse ics files Calendar localCalendar = ICalUtilities.parseCalendar(calendar.body); Calendar remoteCalendar = ICalUtilities.parseCalendar(calendar.previousBody); // Get local and remote changes VEvent[] localChanges = ICalUtilities.getNewEvents(remoteCalendar, localCalendar, true, calendar.url); VEvent[] remoteChanges = ICalUtilities.getNewEvents(localCalendar, remoteCalendar, false, null); // Updatable and removable events LinkedList insertableList = new LinkedList(); LinkedList updatableList = new LinkedList(); LinkedList removableList = new LinkedList(); // Process local changes for (i = 0; i < localChanges.length; i++) { event = localChanges[i]; uid = ICalUtilities.getUid(event); if (uid == null) { log.error("Invalid ical file (missing event ID)!"); continue; } // Find remote pair entry = GCalUtilities.findEvent(calendar, event); if (entry == null) { if (uids.containsKey(uid)) { // Event removed at Google side -> download & remove if (log.isDebugEnabled()) { log.debug("Removed event (" + ICalUtilities.getEventTitle(event) + ") found in the Google Calendar."); } } else { // New local event -> insert if (log.isDebugEnabled()) { log.debug("New event (" + ICalUtilities.getEventTitle(event) + ") found in the local calendar."); } insertableList.addLast(event); } } else { // Add local and remote ID to processed UIDs processedUids.add(entry.getId()); // Get remote event's modification date remoteDate = entry.getUpdated().getValue(); storedDate = (Long) uids.get(uid); remoteEventChanged = true; if (storedDate == null) { remoteUID = GCalUtilities.getRemoteUID(calendar, uid); if (remoteUID != null) { storedDate = (Long) uids.get(remoteUID); } } if (storedDate != null) { // FIXME If a 'reminder' changes in GCal singly, // Google Calendar does NOT update the LAST_MODIFIED // timestamp. Otherwise this comparison works. // there is no ms info in ics file remoteEventChanged = storedDate.longValue() != remoteDate / 1000 * 1000; } if (remoteEventChanged) { // Event modified at Google side -> download & update if (log.isDebugEnabled()) { log.debug("Updated event (" + ICalUtilities.getEventTitle(event) + ") found in the Google Calendar."); } } else { // Local event modified -> update if (log.isDebugEnabled()) { log.debug("Updated event (" + ICalUtilities.getEventTitle(event) + ") found in the local calendar."); } updatableList.addLast(event); } } } // Process remote changes for (i = 0; i < remoteChanges.length; i++) { event = remoteChanges[i]; // Verify remote ID entry = GCalUtilities.findEvent(calendar, event); if (entry == null || processedUids.contains(entry.getId())) { continue; } // Verify local ID uid = ICalUtilities.getUid(event); if (uid == null) { log.error("Invalid ical file (missing event ID)!"); continue; } // Find ID in history if (uids.containsKey(uid)) { // Local event removed -> remove event if (log.isDebugEnabled()) { log.debug("Removed event (" + ICalUtilities.getEventTitle(event) + ") found in the local calendar."); } removableList.addLast(event); } else { // New remote event -> download & create if (log.isDebugEnabled()) { log.debug( "New event (" + ICalUtilities.getEventTitle(event) + ") found in the Google Calendar."); } } } // Check changes if (localChanges.length == 0 && remoteChanges.length == 0) { // Save offline registry saveEventRegistry(calendar.url, calendar.previousBody); // Return previous body return calendar.previousBody; } // Show progress monitor if (monitor != null) { monitor.setVisible(true); } try { // Do modifications if (!removableList.isEmpty() && deleteEnabled) { // Remove Google entries VEvent[] events = new VEvent[removableList.size()]; removableList.toArray(events); GCalUtilities.removeEvents(calendar, events); } VTimeZone[] timeZones; if (!updatableList.isEmpty() || !insertableList.isEmpty()) { // Get timezones timeZones = ICalUtilities.getTimeZones(localCalendar); } else { timeZones = new VTimeZone[0]; } if (!updatableList.isEmpty()) { // Update Google entries VEvent[] events = new VEvent[updatableList.size()]; updatableList.toArray(events); GCalUtilities.updateEvents(calendar, timeZones, events); } if (!insertableList.isEmpty()) { // Insert new Google entries VEvent[] events = new VEvent[insertableList.size()]; insertableList.toArray(events); GCalUtilities.insertEvents(calendar, timeZones, events); } // Load new calendar from Google byte[] newBytes = GCalUtilities.loadCalendar(calendar); // Save offline registry saveEventRegistry(calendar.url, newBytes); // Return new ics file return newBytes; } finally { // Hide progress monitor if (monitor != null) { try { monitor.setVisible(false); } catch (Throwable ignored) { } } } }
From source file:com.google.cloud.dns.testing.LocalDnsHelper.java
/** * Lists record sets for a zone. Next page token is the ID of the last record listed. *///from ww w . ja v a 2 s.c om @VisibleForTesting Response listDnsRecords(String projectId, String zoneName, String query) { Map<String, Object> options = OptionParsers.parseListDnsRecordsOptions(query); Response response = checkListOptions(options); if (response != null) { return response; } ZoneContainer zoneContainer = findZone(projectId, zoneName); if (zoneContainer == null) { return Error.NOT_FOUND.response( String.format("The 'parameters.managedZone' resource named '%s' does not exist.", zoneName)); } ImmutableSortedMap<String, ResourceRecordSet> dnsRecords = zoneContainer.dnsRecords().get(); String[] fields = (String[]) options.get("fields"); String name = (String) options.get("name"); String type = (String) options.get("type"); String pageToken = (String) options.get("pageToken"); ImmutableSortedMap<String, ResourceRecordSet> fragment = pageToken != null ? dnsRecords.tailMap(pageToken, false) : dnsRecords; Integer maxResults = options.get("maxResults") == null ? null : Integer.valueOf((String) options.get("maxResults")); boolean sizeReached = false; boolean hasMorePages = false; LinkedList<String> serializedRrsets = new LinkedList<>(); String lastRecordId = null; for (String recordSetId : fragment.keySet()) { ResourceRecordSet recordSet = fragment.get(recordSetId); if (matchesCriteria(recordSet, name, type)) { if (sizeReached) { // we do not add this, just note that there would be more and there should be a token hasMorePages = true; break; } else { lastRecordId = recordSetId; try { serializedRrsets .addLast(jsonFactory.toString(OptionParsers.extractFields(recordSet, fields))); } catch (IOException e) { return Error.INTERNAL_ERROR.response(String.format( "Error when serializing resource record set in managed zone %s in project %s", zoneName, projectId)); } } } sizeReached = maxResults != null && maxResults.equals(serializedRrsets.size()); } boolean includePageToken = hasMorePages && (fields == null || Arrays.asList(fields).contains("nextPageToken")); return toListResponse(serializedRrsets, "rrsets", lastRecordId, includePageToken); }
From source file:com.joliciel.talismane.TalismaneImpl.java
public void analyse(TalismaneConfig config) { try {/*from w w w . j ava2 s . c o m*/ if (config.needsSentenceDetector()) { if (config.getSentenceDetector() == null) { throw new TalismaneException("Sentence detector not provided."); } } if (config.needsTokeniser()) { if (config.getTokeniser() == null) { throw new TalismaneException("Tokeniser not provided."); } } if (config.needsPosTagger()) { if (config.getPosTagger() == null) { throw new TalismaneException("Pos-tagger not provided."); } } if (config.needsParser()) { if (config.getParser() == null) { throw new TalismaneException("Parser not provided."); } } if (config.getEndModule().equals(Module.SentenceDetector)) { if (this.getSentenceProcessor() == null) { throw new TalismaneException( "No sentence processor provided with sentence detector end module, cannot generate output."); } } if (config.getEndModule().equals(Module.Tokeniser)) { if (this.getTokenSequenceProcessor() == null) { throw new TalismaneException( "No token sequence processor provided with tokeniser end module, cannot generate output."); } } if (config.getEndModule().equals(Module.PosTagger)) { if (this.getPosTagSequenceProcessor() == null) { throw new TalismaneException( "No postag sequence processor provided with pos-tagger end module, cannot generate output."); } } if (config.getEndModule().equals(Module.Parser)) { if (this.getParseConfigurationProcessor() == null) { throw new TalismaneException( "No parse configuration processor provided with parser end module, cannot generate output."); } } LinkedList<String> textSegments = new LinkedList<String>(); LinkedList<Sentence> sentences = new LinkedList<Sentence>(); TokenSequence tokenSequence = null; PosTagSequence posTagSequence = null; RollingSentenceProcessor rollingSentenceProcessor = this.getFilterService() .getRollingSentenceProcessor(config.getFileName(), config.isProcessByDefault()); Sentence leftover = null; if (config.getStartModule().equals(Module.SentenceDetector) || config.getStartModule().equals(Module.Tokeniser)) { // prime the sentence detector with two text segments, to ensure everything gets processed textSegments.addLast(""); textSegments.addLast(""); } StringBuilder stringBuilder = new StringBuilder(); boolean finished = false; int sentenceCount = 0; String prevProcessedText = ""; String processedText = ""; String nextProcessedText = ""; SentenceHolder prevSentenceHolder = null; int endBlockCharacterCount = 0; while (!finished) { if (config.getStartModule().equals(Module.SentenceDetector) || config.getStartModule().equals(Module.Tokeniser)) { // Note SentenceDetector and Tokeniser start modules treated identically, // except that for SentenceDetector we apply a probabilistic sentence detector // whereas for Tokeniser we assume all sentence breaks are marked by filters // read characters from the reader, one at a time char c; int r = -1; try { r = this.getReader().read(); } catch (IOException e) { LogUtils.logError(LOG, e); } if (r == -1) { finished = true; c = '\n'; } else { c = (char) r; } // Jump out if we have 3 consecutive end-block characters. if (c == config.getEndBlockCharacter()) { endBlockCharacterCount++; if (endBlockCharacterCount == 3) { LOG.info("Three consecutive end-block characters. Exiting."); finished = true; } } else { endBlockCharacterCount = 0; } // have sentence detector if (finished || (Character.isWhitespace(c) && stringBuilder.length() > config.getBlockSize()) || c == config.getEndBlockCharacter()) { if (c == config.getEndBlockCharacter()) stringBuilder.append(c); if (stringBuilder.length() > 0) { String textSegment = stringBuilder.toString(); stringBuilder = new StringBuilder(); textSegments.add(textSegment); } // is the current block > 0 characters? if (c == config.getEndBlockCharacter()) { textSegments.addLast(""); } } // is there a next block available? if (finished) { if (stringBuilder.length() > 0) { textSegments.addLast(stringBuilder.toString()); stringBuilder = new StringBuilder(); } textSegments.addLast(""); textSegments.addLast(""); textSegments.addLast(""); } if (c != config.getEndBlockCharacter()) stringBuilder.append(c); while (textSegments.size() >= 3) { String prevText = textSegments.removeFirst(); String text = textSegments.removeFirst(); String nextText = textSegments.removeFirst(); if (LOG.isTraceEnabled()) { LOG.trace("prevText: " + prevText); LOG.trace("text: " + text); LOG.trace("nextText: " + nextText); } Set<TextMarker> textMarkers = new TreeSet<TextMarker>(); for (TextMarkerFilter textMarkerFilter : config.getTextMarkerFilters()) { Set<TextMarker> result = textMarkerFilter.apply(prevText, text, nextText); textMarkers.addAll(result); } // push the text segments back onto the beginning of Deque textSegments.addFirst(nextText); textSegments.addFirst(text); SentenceHolder sentenceHolder = rollingSentenceProcessor.addNextSegment(text, textMarkers); prevProcessedText = processedText; processedText = nextProcessedText; nextProcessedText = sentenceHolder.getText(); if (LOG.isTraceEnabled()) { LOG.trace("prevProcessedText: " + prevProcessedText); LOG.trace("processedText: " + processedText); LOG.trace("nextProcessedText: " + nextProcessedText); } boolean reallyFinished = finished && textSegments.size() == 3; if (prevSentenceHolder != null) { if (config.getStartModule().equals(Module.SentenceDetector)) { List<Integer> sentenceBreaks = config.getSentenceDetector() .detectSentences(prevProcessedText, processedText, nextProcessedText); for (int sentenceBreak : sentenceBreaks) { prevSentenceHolder.addSentenceBoundary(sentenceBreak); } } List<Sentence> theSentences = prevSentenceHolder.getDetectedSentences(leftover); leftover = null; for (Sentence sentence : theSentences) { if (sentence.isComplete() || reallyFinished) { sentences.add(sentence); sentenceCount++; } else { LOG.debug("Setting leftover to: " + sentence.getText()); leftover = sentence; } } if (config.getMaxSentenceCount() > 0 && sentenceCount >= config.getMaxSentenceCount()) { finished = true; } } prevSentenceHolder = sentenceHolder; } // we have at least 3 text segments (should always be the case once we get started) } else if (config.getStartModule().equals(Module.PosTagger)) { if (config.getTokenCorpusReader().hasNextTokenSequence()) { tokenSequence = config.getTokenCorpusReader().nextTokenSequence(); } else { tokenSequence = null; finished = true; } } else if (config.getStartModule().equals(Module.Parser)) { if (config.getPosTagCorpusReader().hasNextPosTagSequence()) { posTagSequence = config.getPosTagCorpusReader().nextPosTagSequence(); } else { posTagSequence = null; finished = true; } } // which start module? boolean needToProcess = false; if (config.getStartModule().equals(Module.SentenceDetector) || config.getStartModule().equals(Module.Tokeniser)) needToProcess = !sentences.isEmpty(); else if (config.getStartModule().equals(Module.PosTagger)) needToProcess = tokenSequence != null; else if (config.getStartModule().equals(Module.Parser)) needToProcess = posTagSequence != null; while (needToProcess) { Sentence sentence = null; if (config.getStartModule().compareTo(Module.Tokeniser) <= 0 && config.getEndModule().compareTo(Module.SentenceDetector) >= 0) { sentence = sentences.poll(); LOG.debug("Sentence: " + sentence); if (this.getSentenceProcessor() != null) this.getSentenceProcessor().onNextSentence(sentence.getText(), this.getWriter()); } // need to read next sentence List<TokenSequence> tokenSequences = null; if (config.needsTokeniser()) { tokenSequences = config.getTokeniser().tokenise(sentence); tokenSequence = tokenSequences.get(0); if (this.getTokenSequenceProcessor() != null) { this.getTokenSequenceProcessor().onNextTokenSequence(tokenSequence, this.getWriter()); } } // need to tokenise ? List<PosTagSequence> posTagSequences = null; if (config.needsPosTagger()) { posTagSequence = null; if (tokenSequences == null || !config.isPropagateTokeniserBeam()) { tokenSequences = new ArrayList<TokenSequence>(); tokenSequences.add(tokenSequence); } if (config.getPosTagger() instanceof NonDeterministicPosTagger) { NonDeterministicPosTagger nonDeterministicPosTagger = (NonDeterministicPosTagger) config .getPosTagger(); posTagSequences = nonDeterministicPosTagger.tagSentence(tokenSequences); posTagSequence = posTagSequences.get(0); } else { posTagSequence = config.getPosTagger().tagSentence(tokenSequence); } if (posTagSequenceProcessor != null) { posTagSequenceProcessor.onNextPosTagSequence(posTagSequence, this.getWriter()); } tokenSequence = null; } // need to postag if (config.needsParser()) { if (posTagSequences == null || !config.isPropagatePosTaggerBeam()) { posTagSequences = new ArrayList<PosTagSequence>(); posTagSequences.add(posTagSequence); } ParseConfiguration parseConfiguration = null; List<ParseConfiguration> parseConfigurations = null; try { if (config.getParser() instanceof NonDeterministicParser) { NonDeterministicParser nonDeterministicParser = (NonDeterministicParser) config .getParser(); parseConfigurations = nonDeterministicParser.parseSentence(posTagSequences); parseConfiguration = parseConfigurations.get(0); } else { parseConfiguration = config.getParser().parseSentence(posTagSequence); } if (this.getParseConfigurationProcessor() != null) { this.getParseConfigurationProcessor().onNextParseConfiguration(parseConfiguration, this.getWriter()); } } catch (Exception e) { LOG.error(e); if (stopOnError) throw new RuntimeException(e); } posTagSequence = null; } // need to parse if (config.getStartModule().equals(Module.SentenceDetector) || config.getStartModule().equals(Module.Tokeniser)) needToProcess = !sentences.isEmpty(); else if (config.getStartModule().equals(Module.PosTagger)) needToProcess = tokenSequence != null; else if (config.getStartModule().equals(Module.Parser)) needToProcess = posTagSequence != null; } // next sentence } // next character } finally { if (this.getParseConfigurationProcessor() != null) { this.getParseConfigurationProcessor().onCompleteParse(); } try { this.getReader().close(); this.getWriter().flush(); this.getWriter().close(); } catch (IOException ioe2) { LOG.error(ioe2); throw new RuntimeException(ioe2); } } }
From source file:com.commander4j.db.JDBDespatch.java
public LinkedList<JDBDespatch> browseDespatchData(String status, int limit) { String temp = ""; Boolean top = false;/*from ww w . j av a 2 s . co m*/ PreparedStatement stmt = null; LinkedList<JDBDespatch> result = new LinkedList<JDBDespatch>(); ResultSet rs; result.clear(); try { temp = Common.hostList.getHost(getHostID()).getSqlstatements().getSQL("JDBDespatch.browse"); if (temp.indexOf("[top]") >= 0) { top = true; temp = temp.replace("[top]", "top " + String.valueOf(limit)); } stmt = Common.hostList.getHost(getHostID()).getConnection(getSessionID()).prepareStatement(temp); stmt.setFetchSize(100); stmt.setString(1, status); if (top == false) { stmt.setInt(2, limit); } rs = stmt.executeQuery(); while (rs.next()) { result.addLast(new JDBDespatch(getHostID(), getSessionID(), rs.getString("despatch_no"), rs.getTimestamp("despatch_date"), rs.getString("location_id_from"), rs.getString("location_id_to"), rs.getString("status"), rs.getInt("total_pallets"), rs.getString("trailer"), rs.getString("haulier"), rs.getString("load_no"), rs.getString("user_id"), rs.getString("journey_ref"))); } rs.close(); stmt.close(); } catch (SQLException e) { setErrorMessage(e.getMessage()); } return result; }
From source file:com.google.cloud.dns.testing.LocalDnsHelper.java
/** * Lists zones. Next page token is the last listed zone name and is returned only of there is more * to list and if the user does not exclude nextPageToken from field options. *//* ww w . jav a2s. c o m*/ @VisibleForTesting Response listZones(String projectId, String query) { Map<String, Object> options = OptionParsers.parseListZonesOptions(query); Response response = checkListOptions(options); if (response != null) { return response; } ConcurrentSkipListMap<String, ZoneContainer> containers = findProject(projectId).zones(); String[] fields = (String[]) options.get("fields"); String dnsName = (String) options.get("dnsName"); String pageToken = (String) options.get("pageToken"); Integer maxResults = options.get("maxResults") == null ? null : Integer.valueOf((String) options.get("maxResults")); boolean sizeReached = false; boolean hasMorePages = false; LinkedList<String> serializedZones = new LinkedList<>(); String lastZoneName = null; ConcurrentNavigableMap<String, ZoneContainer> fragment = pageToken != null ? containers.tailMap(pageToken, false) : containers; for (ZoneContainer zoneContainer : fragment.values()) { ManagedZone zone = zoneContainer.zone(); if (dnsName == null || zone.getDnsName().equals(dnsName)) { if (sizeReached) { // we do not add this, just note that there would be more and there should be a token hasMorePages = true; break; } else { try { lastZoneName = zone.getName(); serializedZones.addLast(jsonFactory.toString(OptionParsers.extractFields(zone, fields))); } catch (IOException e) { return Error.INTERNAL_ERROR.response(String.format( "Error when serializing managed zone %s in project %s", lastZoneName, projectId)); } } } sizeReached = maxResults != null && maxResults.equals(serializedZones.size()); } boolean includePageToken = hasMorePages && (fields == null || Arrays.asList(fields).contains("nextPageToken")); return toListResponse(serializedZones, "managedZones", lastZoneName, includePageToken); }