List of usage examples for java.lang Iterable iterator
Iterator<T> iterator();
From source file:com.cloudbees.plugins.credentials.common.AbstractIdCredentialsListBoxModel.java
/** * Adds supplied credentials to the model. * * @param credentials the credentials./*from w w w.j a va 2 s . c o m*/ * @return {@code this} for method chaining. * @deprecated prefer using the {@link #include(Item, Class)} or {@link #includeAs(Authentication, Item, Class)} * methods to build the list box contents in order to allow credentials providers to not have to instantiate * a full credential instance where those credential providers store the secrets external from Jenkins. */ @Deprecated @NonNull public AbstractIdCredentialsListBoxModel<T, C> withAll(@NonNull Iterable<? extends C> credentials) { return withMatching(CredentialsMatchers.always(), credentials.iterator()); }
From source file:com.handywedge.binarystore.store.azure.BinaryStoreManagerImpl.java
@SuppressWarnings("unused") @Override/*from w w w . j a va 2 s . c o m*/ public List<BinaryInfo> list(StorageInfo storage, BinaryInfo binary) throws StoreException { logger.info("ABS list method: start."); logger.debug("" + storage.toString()); logger.debug("?" + binary.toString()); long startSingle = System.currentTimeMillis(); List<BinaryInfo> objInfoList = new ArrayList<BinaryInfo>(); CloudBlobClient bClient = getABSClient(binary.getBucketName(), false); try { logger.info("Listing binaries"); Iterable<ListBlobItem> blobs = bClient.getContainerReference(binary.getBucketName()).listBlobs("", true); Iterator<ListBlobItem> blobIterator = blobs.iterator(); while (blobIterator.hasNext()) { ListBlobItem blobItem = blobIterator.next(); if (blobItem instanceof CloudBlockBlob) { CloudBlockBlob blob = (CloudBlockBlob) blobItem; objInfoList.add(createReturnBinaryInfo(blob)); } } } catch (com.microsoft.azure.storage.StorageException se) { throw new StoreException(HttpStatus.SC_BAD_REQUEST, ErrorClassification.LIST_FAIL, se, binary.getFileName()); } catch (URISyntaxException ue) { throw new StoreException(HttpStatus.SC_BAD_REQUEST, ErrorClassification.LIST_FAIL, ue, binary.getFileName()); } logger.info(" " + objInfoList.size()); long endSingle = System.currentTimeMillis(); logger.info("{} Geted : {} ms\n", objInfoList.size(), (endSingle - startSingle)); logger.info("ABS list method: end."); return objInfoList; }
From source file:com.transwarp.hbase.bulkload.TextSortReducer.java
@Override protected void reduce(ImmutableBytesWritable rowKey, java.lang.Iterable<Text> lines, Reducer<ImmutableBytesWritable, Text, ImmutableBytesWritable, KeyValue>.Context context) throws java.io.IOException, InterruptedException { // although reduce() is called per-row, handle pathological case long threshold = context.getConfiguration().getLong("reducer.row.threshold", 1L * (1 << 30)); Iterator<Text> iter = lines.iterator(); while (iter.hasNext()) { Set<KeyValue> map = new TreeSet<KeyValue>(KeyValue.COMPARATOR); long curSize = 0; // stop at the end or the RAM threshold while (iter.hasNext() && curSize < threshold) { Text line = iter.next(); String lineStr = line.toString(); try { ArrayList<String> parsedLine = ParsedLine.parse(converter.getRecordSpec(), lineStr); Put p = converter.convert(parsedLine, rowKey.get()); for (List<KeyValue> kvs : p.getFamilyMap().values()) { for (KeyValue kv : kvs) { map.add(kv);//w w w . j a va 2s. c o m curSize += kv.getLength(); } } } catch (FormatException badLine) { if (skipBadLines) { System.err.println("Bad line." + badLine.getMessage()); incrementBadLineCount(1); return; } throw new IOException(badLine); } catch (IllegalArgumentException e) { if (skipBadLines) { System.err.println("Bad line." + e.getMessage()); incrementBadLineCount(1); return; } throw new IOException(e); } } context.setStatus("Read " + map.size() + " entries of " + map.getClass() + "(" + StringUtils.humanReadableInt(curSize) + ")"); int index = 0; for (KeyValue kv : map) { if (isDelete) { kv = new KeyValue(kv.getRow(), kv.getFamily(), kv.getQualifier(), 0, KeyValue.Type.Delete, kv.getValue()); } context.write(rowKey, kv); if (++index > 0 && index % 100 == 0) context.setStatus("Wrote " + index + " key values."); } // if we have more entries to process if (iter.hasNext()) { // force flush because we cannot guarantee intra-row sorted order context.write(null, null); } } }
From source file:net.solarnetwork.node.backup.FileSystemBackupService.java
@Override public Backup performBackup(final Iterable<BackupResource> resources) { if (resources == null) { return null; }/*from ww w. j av a 2s . c o m*/ final Iterator<BackupResource> itr = resources.iterator(); if (!itr.hasNext()) { log.debug("No resources provided, nothing to backup"); return null; } BackupStatus status = setStatusIf(RunningBackup, Configured); if (status != RunningBackup) { return null; } final Calendar now = new GregorianCalendar(); now.set(Calendar.MILLISECOND, 0); final String archiveName = String.format(ARCHIVE_NAME_FORMAT, now); final File archiveFile = new File(backupDir, archiveName); final String archiveKey = getArchiveKey(archiveName); log.info("Starting backup to archive {}", archiveName); log.trace("Backup archive: {}", archiveFile.getAbsolutePath()); Backup backup = null; ZipOutputStream zos = null; try { zos = new ZipOutputStream(new BufferedOutputStream(new FileOutputStream(archiveFile))); while (itr.hasNext()) { BackupResource r = itr.next(); log.debug("Backup up resource {} to archive {}", r.getBackupPath(), archiveName); zos.putNextEntry(new ZipEntry(r.getBackupPath())); FileCopyUtils.copy(r.getInputStream(), new FilterOutputStream(zos) { @Override public void close() throws IOException { // FileCopyUtils closes the stream, which we don't want } }); } zos.flush(); zos.finish(); log.info("Backup complete to archive {}", archiveName); backup = new SimpleBackup(now.getTime(), archiveKey, archiveFile.length(), true); // clean out older backups File[] backupFiles = getAvailableBackupFiles(); if (backupFiles != null && backupFiles.length > additionalBackupCount + 1) { // delete older files for (int i = additionalBackupCount + 1; i < backupFiles.length; i++) { log.info("Deleting old backup archive {}", backupFiles[i].getName()); if (!backupFiles[i].delete()) { log.warn("Unable to delete backup archive {}", backupFiles[i].getAbsolutePath()); } } } } catch (IOException e) { log.error("IO error creating backup: {}", e.getMessage()); setStatus(Error); } catch (RuntimeException e) { log.error("Error creating backup: {}", e.getMessage()); setStatus(Error); } finally { if (zos != null) { try { zos.close(); } catch (IOException e) { // ignore this } } status = setStatusIf(Configured, RunningBackup); if (status != Configured) { // clean up if we encountered an error if (archiveFile.exists()) { archiveFile.delete(); } } } return backup; }
From source file:com.atlassian.jira.rest.client.internal.json.IssueJsonParserTest.java
@Test public void issueWithChangelog() throws JSONException { final Issue issue = parseIssue("/json/issue/valid-5.0-with-changelog.json"); assertEquals("HST-1", issue.getKey()); final Iterable<ChangelogGroup> changelog = issue.getChangelog(); assertNotNull(changelog);/* ww w . j av a 2 s . c o m*/ assertEquals(4, Iterables.size(changelog)); final Iterator<ChangelogGroup> iterator = changelog.iterator(); final BasicUser user1 = new BasicUser(toUri("http://localhost:2990/jira/rest/api/2/user?username=user1"), "user1", "User One"); final BasicUser user2 = new BasicUser(toUri("http://localhost:2990/jira/rest/api/2/user?username=user2"), "user2", "User Two"); verifyChangelog(iterator.next(), "2012-04-12T14:28:28.255+0200", user1, ImmutableList.of( new ChangelogItem(FieldType.JIRA, "duedate", null, null, "2012-04-12", "2012-04-12 00:00:00.0"), new ChangelogItem(FieldType.CUSTOM, "Radio Field", null, null, "10000", "One"))); verifyChangelog(iterator.next(), "2012-04-12T14:28:44.079+0200", user1, ImmutableList .of(new ChangelogItem(FieldType.JIRA, "assignee", "user1", "User One", "user2", "User Two"))); verifyChangelog(iterator.next(), "2012-04-12T14:30:09.690+0200", user2, ImmutableList.of( new ChangelogItem(FieldType.JIRA, "summary", null, "Simple history test", null, "Simple history test - modified"), new ChangelogItem(FieldType.JIRA, "issuetype", "1", "Bug", "2", "New Feature"), new ChangelogItem(FieldType.JIRA, "priority", "3", "Major", "4", "Minor"), new ChangelogItem(FieldType.JIRA, "description", null, "Initial Description", null, "Modified Description"), new ChangelogItem(FieldType.CUSTOM, "Date Field", "2012-04-11T14:26+0200", "11/Apr/12 2:26 PM", "2012-04-12T14:26+0200", "12/Apr/12 2:26 PM"), new ChangelogItem(FieldType.JIRA, "duedate", "2012-04-12", "2012-04-12 00:00:00.0", "2012-04-13", "2012-04-13 00:00:00.0"), new ChangelogItem(FieldType.CUSTOM, "Radio Field", "10000", "One", "10001", "Two"), new ChangelogItem(FieldType.CUSTOM, "Text Field", null, "Initial text field value", null, "Modified text field value"))); verifyChangelog(iterator.next(), "2012-04-12T14:28:44.079+0200", null, ImmutableList .of(new ChangelogItem(FieldType.JIRA, "assignee", "user1", "User One", "user2", "User Two"))); }
From source file:guru.qas.martini.DefaultMixologist.java
private Step getGherkinStep(Background background, ScenarioDefinition definition, PickleStep step) { List<Step> backgroundSteps = null == background ? ImmutableList.of() : background.getSteps(); List<Step> definitionSteps = definition.getSteps(); Iterable<Step> steps = Iterables.concat(backgroundSteps, definitionSteps); List<PickleLocation> locations = step.getLocations(); Set<Integer> lines = Sets.newHashSetWithExpectedSize(locations.size()); for (PickleLocation location : locations) { int line = location.getLine(); lines.add(line);/*from ww w . j a v a 2s .co m*/ } Step gherkinStep = null; for (Iterator<Step> i = steps.iterator(); gherkinStep == null && i.hasNext();) { Step candidate = i.next(); Location location = candidate.getLocation(); int line = location.getLine(); gherkinStep = lines.contains(line) ? candidate : null; } checkState(null != gherkinStep, "unable to locate Step %s in ScenarioDefinition %s", step, definition); return gherkinStep; }
From source file:org.apereo.openlrs.storage.aws.elasticsearch.XApiOnlyAwsElasticsearchTierTwoStorage.java
@Override public List<OpenLRSEntity> findAll() { SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); List<OpenLRSEntity> openLRSentities = new ArrayList<OpenLRSEntity>(); searchSourceBuilder.query(QueryBuilders.matchAllQuery()); Search search = new Search.Builder(searchSourceBuilder.toString()).addIndex(STATEMENT_INDEX) .addType(STATEMENT_TYPE).setParameter("from", 0).setParameter(Parameters.SIZE, 100) // default elasticsearch returns only 10 results. .build();/* w w w . j ava 2s . c om*/ try { SearchResult result = jestClient.execute(search); //TODO remove deprecated. Iterable<Statement> iterableStatements = result.getSourceAsObjectList(Statement.class); if (iterableStatements != null) { openLRSentities = new ArrayList<OpenLRSEntity>(IteratorUtils.toList(iterableStatements.iterator())); } } catch (IOException e) { log.error("Could not findAll AWS Statements"); e.printStackTrace(); } return openLRSentities; }
From source file:com.dasein.cloud.azurepack.tests.platform.AzurePackRelationalDatabaseSupportTest.java
@Test public void testGetSupportedVersionsReturnsCorrectValues() throws CloudException, InternalException { //for now getSupportedVersions should return an empty list final Iterable<String> supportedVersions = support.getSupportedVersions(null); assertNotNull(supportedVersions);/*from w w w . j a v a 2 s . co m*/ assertEquals(0, IteratorUtils.toList(supportedVersions.iterator()).size()); }
From source file:com.link_intersystems.lang.reflect.criteria.MemberCriteriaTest.java
@Test public void annotatedElementsPackagesOnly() throws SecurityException, NoSuchMethodException { ClassCriteria classCriteria = new ClassCriteria(); classCriteria.setSelection(ClassType.CLASSES); memberCriteria.membersOfType(Method.class); memberCriteria.named("size"); Iterable<Class<?>> classIterable = classCriteria.getIterable(ArrayList.class); Iterable<? extends AnnotatedElement> annotatedElementIterable = memberCriteria .getAnnotatedElementIterable(classIterable, IterateStrategy.PACKAGES_ONLY); Iterator<? extends AnnotatedElement> iterator = annotatedElementIterable.iterator(); assertTrue(iterator.hasNext());//ww w .j a v a 2 s . c o m AnnotatedElement next = iterator.next(); assertEquals(ArrayList.class.getPackage(), next); next = iterator.next(); assertEquals(AbstractList.class.getPackage(), next); next = iterator.next(); assertEquals(AbstractCollection.class.getPackage(), next); next = iterator.next(); assertEquals(Object.class.getPackage(), next); assertFalse(iterator.hasNext()); }
From source file:net.solarnetwork.web.support.SimpleCsvView.java
@Override protected void renderMergedOutputModel(Map<String, Object> model, HttpServletRequest request, HttpServletResponse response) throws Exception { if (model.isEmpty()) { return;/*from w w w . ja v a2 s .com*/ } final String charset = getResponseCharacterEncoding(); response.setCharacterEncoding(charset); response.setContentType(getContentType()); final Object data = (dataModelKey != null && model.containsKey(dataModelKey) ? model.get(dataModelKey) : model); if (data == null) { return; } @SuppressWarnings("unchecked") final Collection<String> fieldOrder = (fieldOrderKey != null && model.get(fieldOrderKey) instanceof Collection ? (Collection<String>) model.get(fieldOrderKey) : null); Iterable<?> rows = null; if (data instanceof Iterable) { rows = (Iterable<?>) data; } else { List<Object> tmpList = new ArrayList<Object>(1); tmpList.add(data); rows = tmpList; } Object row = null; Iterator<?> rowIterator = rows.iterator(); if (!rowIterator.hasNext()) { return; } // get first row, to use for fields row = rowIterator.next(); if (row == null) { return; } final List<String> fieldList = getCSVFields(row, fieldOrder); final String[] fields = fieldList.toArray(new String[fieldList.size()]); final ICsvMapWriter writer = new CsvMapWriter(response.getWriter(), CsvPreference.EXCEL_PREFERENCE); try { // output header if (true) { // TODO make configurable property Map<String, String> headerMap = new HashMap<String, String>(fields.length); for (String field : fields) { headerMap.put(field, field); } writeCSV(writer, fields, headerMap); } // output first row writeCSV(writer, fields, row); // output remainder rows while (rowIterator.hasNext()) { row = rowIterator.next(); writeCSV(writer, fields, row); } } finally { writer.flush(); writer.close(); } }