List of usage examples for java.util List listIterator
ListIterator<E> listIterator(int index);
From source file:ezbake.data.mongo.EzMongoHandler.java
private PurgeResult purge(long id, Set<Long> toPurge, int batchSize, EzSecurityToken token) { appLog.info("Purging ID {} with batchSize {} with items:\n {}", id, batchSize, toPurge); final PurgeResult result = new PurgeResult(false); final Set<Long> purged = new HashSet<>(); final Set<Long> unpurged = new HashSet<>(); Set<String> collectionNames = db.getCollectionNames(); //sort the collection names List<String> collNamesSortedList = MongoConverter.asSortedList(collectionNames); //get the collection name at which to start the purge String purgeStartCollName = getCollNameOfPurgeId(id); appLog.info(/*from w ww.j a v a2 s .c o m*/ "Collection Name of Collection in Purge Tracker: " + purgeStartCollName + " For Purge Id: " + id); ListIterator<String> collNamesIterator = null; if (purgeStartCollName != null) { collNamesIterator = collNamesSortedList.listIterator(collNamesSortedList.indexOf(purgeStartCollName)); } else { collNamesIterator = collNamesSortedList.listIterator(); } //batch size tracker int batchTracker = 0; while ((collNamesIterator.hasNext()) && (batchTracker < batchSize)) { String collectionName = collNamesIterator.next(); if (!isNotSystemCollection(collectionName)) { continue; } DBObject query = new BasicDBObject(); query.put(RedactHelper.APP_ID_FIELD, appId); query.put(RedactHelper.ID_FIELD, new BasicDBObject("$in", toPurge)); DBCursor cursor = db.getCollection(collectionName).find(query); while ((cursor.hasNext()) && (batchTracker < batchSize)) { DBObject dbObject = cursor.next(); long purgeId = (Long) dbObject.get(RedactHelper.ID_FIELD); Object composite = dbObject.get(RedactHelper.COMPOSITE_FIELD); if (composite != null && (Boolean) composite) { appLog.info("Composite item cannot be purged: _id {} ", dbObject.get("_id")); unpurged.add(purgeId); } else { appLog.info("Purging item _id {} and Purge Id {}", dbObject.get("_id"), purgeId); purged.add(purgeId); db.getCollection(collectionName).remove(dbObject); } batchTracker++; } if (cursor.hasNext()) { result.setIsFinished(false); } else { result.setIsFinished(true); } //update purge tracker collection name with purge id and collection name where purge stopped updatePurgeTracker(id, collectionName, token); } result.setPurged(purged); result.setUnpurged(unpurged); appLog.info("Size of Purged Items {}", result.getPurged().size()); return result; }
From source file:net.sf.jasperreports.engine.export.JRHtmlExporter.java
protected void writeImageMap(String imageMapName, JRPrintImage image, List<JRPrintImageAreaHyperlink> imageMapAreas) throws IOException { writer.write("<map name=\"" + imageMapName + "\">\n"); for (ListIterator<JRPrintImageAreaHyperlink> it = imageMapAreas.listIterator(imageMapAreas.size()); it .hasPrevious();) {// w w w .j a va 2s.co m JRPrintImageAreaHyperlink areaHyperlink = it.previous(); JRPrintImageArea area = areaHyperlink.getArea(); writer.write(" <area shape=\"" + JRPrintImageArea.getHtmlShape(area.getShape()) + "\""); writeImageAreaCoordinates(area.getCoordinates()); writeImageAreaHyperlink(areaHyperlink.getHyperlink()); writer.write("/>\n"); } if (image.getHyperlinkTypeValue() != HyperlinkTypeEnum.NONE) { writer.write(" <area shape=\"default\""); writeImageAreaCoordinates(new int[] { 0, 0, image.getWidth(), image.getHeight() });//for IE writeImageAreaHyperlink(image); writer.write("/>\n"); } writer.write("</map>\n"); }
From source file:net.sf.jasperreports.engine.export.HtmlExporter.java
protected void writeImageMap(String imageMapName, JRPrintImage image, List<JRPrintImageAreaHyperlink> imageMapAreas) throws IOException { writer.write("<map name=\"" + imageMapName + "\">\n"); for (ListIterator<JRPrintImageAreaHyperlink> it = imageMapAreas.listIterator(imageMapAreas.size()); it .hasPrevious();) {//w w w.ja v a 2 s. com JRPrintImageAreaHyperlink areaHyperlink = it.previous(); JRPrintHyperlink link = areaHyperlink.getHyperlink(); JRPrintImageArea area = areaHyperlink.getArea(); writer.write(" <area shape=\"" + JRPrintImageArea.getHtmlShape(area.getShape()) + "\""); writeImageAreaCoordinates(area.getCoordinates()); writeImageAreaHyperlink(link); writer.write("/>\n"); } if (image.getHyperlinkTypeValue() != HyperlinkTypeEnum.NONE) { writer.write(" <area shape=\"default\""); writeImageAreaCoordinates(new int[] { 0, 0, image.getWidth(), image.getHeight() });//for IE writeImageAreaHyperlink(image); writer.write("/>\n"); } writer.write("</map>\n"); }
From source file:net.sourceforge.fenixedu.domain.DegreeCurricularPlan.java
public ExecutionDegree getMostRecentExecutionDegree() { if (getExecutionDegreesSet().isEmpty()) { return null; }// w w w . j a v a 2s .c o m final ExecutionYear currentYear = ExecutionYear.readCurrentExecutionYear(); ExecutionDegree result = getExecutionDegreeByYear(currentYear); if (result != null) { return result; } final List<ExecutionDegree> sorted = new ArrayList<ExecutionDegree>(getExecutionDegreesSet()); Collections.sort(sorted, ExecutionDegree.EXECUTION_DEGREE_COMPARATORY_BY_YEAR); final ExecutionDegree first = sorted.iterator().next(); if (sorted.size() == 1) { return first; } if (first.getExecutionYear().isAfter(currentYear)) { return first; } else { final ListIterator<ExecutionDegree> iter = sorted.listIterator(sorted.size()); while (iter.hasPrevious()) { final ExecutionDegree executionDegree = iter.previous(); if (executionDegree.getExecutionYear().isBeforeOrEquals(currentYear)) { return executionDegree; } } } return null; }
From source file:org.codehaus.mojo.webminifier.WebMinifierMojo.java
/** * Main entry point for the MOJO./*from www. j a va 2 s . c o m*/ * * @throws MojoExecutionException if there's a problem in the normal course of execution. * @throws MojoFailureException if there's a problem with the MOJO itself. */ public void execute() throws MojoExecutionException, MojoFailureException { // Start off by copying all files over. We'll ultimately remove the js files that we don't need from there, and // create new ones in there (same goes for css files and anything else we minify). FileUtils.deleteQuietly(destinationFolder); try { FileUtils.copyDirectory(sourceFolder, destinationFolder); } catch (IOException e) { throw new MojoExecutionException("Cannot copy file to target folder", e); } // Process each HTML source file and concatenate into unminified output scripts int minifiedCounter = 0; // If a split point already exists on disk then we've been through the minification process. As // minification can be expensive, we would like to avoid performing it multiple times. Thus storing // a set of what we've previously minified enables us. Set<File> existingConcatenatedJsResources = new HashSet<File>(); Set<File> consumedJsResources = new HashSet<File>(); for (String targetHTMLFile : getArrayOfTargetHTMLFiles()) { File targetHTML = new File(destinationFolder, targetHTMLFile); // Parse HTML file and locate SCRIPT elements DocumentResourceReplacer replacer; try { replacer = new DocumentResourceReplacer(targetHTML); } catch (SAXException e) { throw new MojoExecutionException("Problem reading html document", e); } catch (IOException e) { throw new MojoExecutionException("Problem opening html document", e); } List<File> jsResources = replacer.findJSResources(); if (jsSplitPoints == null) { jsSplitPoints = new Properties(); } File concatenatedJsResource = null; URI destinationFolderUri = destinationFolder.toURI(); // Split the js resources into two lists: one containing all external dependencies, the other containing // project sources. We do this so that project sources can be minified without the dependencies (libraries // generally don't need to distribute the dependencies). int jsDependencyProjectResourcesIndex; if (splitDependencies) { List<File> jsDependencyResources = new ArrayList<File>(jsResources.size()); List<File> jsProjectResources = new ArrayList<File>(jsResources.size()); for (File jsResource : jsResources) { String jsResourceUri = destinationFolderUri.relativize(jsResource.toURI()).toString(); File jsResourceFile = new File(projectSourceFolder, jsResourceUri); if (jsResourceFile.exists()) { jsProjectResources.add(jsResource); } else { jsDependencyResources.add(jsResource); } } // Re-constitute the js resource list from dependency resources + project resources and note the index // in the list that represents the start of project sources in the list. We need this information later. jsDependencyProjectResourcesIndex = jsDependencyResources.size(); jsResources = jsDependencyResources; jsResources.addAll(jsProjectResources); } else { jsDependencyProjectResourcesIndex = 0; } // Walk backwards through the script declarations and note what files will map to what split point. Map<File, File> jsResourceTargetFiles = new LinkedHashMap<File, File>(jsResources.size()); ListIterator<File> jsResourcesIter = jsResources.listIterator(jsResources.size()); boolean splittingDependencies = false; while (jsResourcesIter.hasPrevious()) { int jsResourceIterIndex = jsResourcesIter.previousIndex(); File jsResource = jsResourcesIter.previous(); String candidateSplitPointNameUri = destinationFolderUri.relativize(jsResource.toURI()).toString(); String splitPointName = (String) jsSplitPoints.get(candidateSplitPointNameUri); // If we do not have a split point name and the resource is a dependency of this project i.e. it is not // within our src/main folder then we give it a split name of "dependencies". Factoring out dependencies // into their own split point is a useful thing to do and will always be required when building // libraries. if (splitDependencies && splitPointName == null && !splittingDependencies) { if (jsResourceIterIndex < jsDependencyProjectResourcesIndex) { splitPointName = Integer.valueOf(++minifiedCounter).toString(); splittingDependencies = true; } } // If we have no name and we've not been in here before, then assign an initial name based on a number. if (splitPointName == null && concatenatedJsResource == null) { splitPointName = Integer.valueOf(++minifiedCounter).toString(); } // We have a new split name so use it for this file and upwards in the script statements until we // either hit another split point or there are no more script statements. if (splitPointName != null) { concatenatedJsResource = new File(destinationFolder, splitPointName + ".js"); // Note that we've previously created this. if (concatenatedJsResource.exists()) { existingConcatenatedJsResources.add(concatenatedJsResource); } } jsResourceTargetFiles.put(jsResource, concatenatedJsResource); } for (File jsResource : jsResources) { concatenatedJsResource = jsResourceTargetFiles.get(jsResource); if (!existingConcatenatedJsResources.contains(concatenatedJsResource)) { // Concatenate input file onto output resource file try { concatenateFile(jsResource, concatenatedJsResource); } catch (IOException e) { throw new MojoExecutionException("Problem concatenating JS files", e); } // Finally, remove the JS resource from the target folder as it is no longer required (we've // concatenated it). consumedJsResources.add(jsResource); } } // Reduce the list of js resource target files to a distinct set LinkedHashSet<File> concatenatedJsResourcesSet = new LinkedHashSet<File>( jsResourceTargetFiles.values()); File[] concatenatedJsResourcesArray = new File[concatenatedJsResourcesSet.size()]; concatenatedJsResourcesSet.toArray(concatenatedJsResourcesArray); List<File> concatenatedJsResources = Arrays.asList(concatenatedJsResourcesArray); // Minify the concatenated JS resource files if (jsCompressorType != JsCompressorType.NONE) { List<File> minifiedJSResources = new ArrayList<File>(concatenatedJsResources.size()); ListIterator<File> concatenatedJsResourcesIter = concatenatedJsResources .listIterator(concatenatedJsResources.size()); while (concatenatedJsResourcesIter.hasPrevious()) { concatenatedJsResource = concatenatedJsResourcesIter.previous(); File minifiedJSResource; try { String uri = concatenatedJsResource.toURI().toString(); int i = uri.lastIndexOf(".js"); String minUri; if (i > -1) { minUri = uri.substring(0, i) + "-min.js"; } else { minUri = uri; } minifiedJSResource = FileUtils.toFile(new URL(minUri)); } catch (MalformedURLException e) { throw new MojoExecutionException("Problem determining file URL", e); } minifiedJSResources.add(minifiedJSResource); // If we've not actually performed the minification before... then do so. This is the expensive bit // so we like to avoid it if we can. if (!existingConcatenatedJsResources.contains(concatenatedJsResource)) { boolean warningsFound; try { warningsFound = minifyJSFile(concatenatedJsResource, minifiedJSResource); } catch (IOException e) { throw new MojoExecutionException("Problem reading/writing JS", e); } logCompressionRatio(minifiedJSResource.getName(), concatenatedJsResource.length(), minifiedJSResource.length()); // If there were warnings then the user may want to manually invoke the compressor for further // investigation. if (warningsFound) { getLog().warn("Warnings were found. " + concatenatedJsResource + " is available for your further investigations."); } } } // Update source references replacer.replaceJSResources(destinationFolder, targetHTML, minifiedJSResources); } else { List<File> unminifiedJSResources = new ArrayList<File>(concatenatedJsResources.size()); ListIterator<File> concatenatedJsResourcesIter = concatenatedJsResources .listIterator(concatenatedJsResources.size()); while (concatenatedJsResourcesIter.hasPrevious()) { concatenatedJsResource = concatenatedJsResourcesIter.previous(); unminifiedJSResources.add(concatenatedJsResource); } replacer.replaceJSResources(destinationFolder, targetHTML, unminifiedJSResources); getLog().info("Concatenated resources with no compression"); } // Write HTML file to output dir try { replacer.writeHTML(targetHTML, encoding); } catch (TransformerException e) { throw new MojoExecutionException("Problem transforming html", e); } catch (IOException e) { throw new MojoExecutionException("Problem writing html", e); } } // Clean up including the destination folder recursively where directories have nothing left in them. for (File consumedJsResource : consumedJsResources) { consumedJsResource.delete(); } removeEmptyFolders(destinationFolder); }
From source file:au.org.emii.portal.composer.MapComposer.java
public void loadUserSession(String sessionid) { Scanner scanner = null;//from w ww .j a va 2s .c o m try { String sfld = getSettingsSupplementary().getProperty(StringConstants.ANALYSIS_OUTPUT_DIR) + "session/" + sessionid; File sessfolder = new File(sfld); if (!sessfolder.exists()) { showMessage("Session information does not exist. Please provide a valid session id"); return; } scanner = new Scanner(new File(sfld + "/details.txt")); // first grab the zoom level and bounding box String[] mapdetails = scanner.nextLine().split(","); BoundingBox bb = new BoundingBox(); bb.setMinLongitude(Float.parseFloat(mapdetails[1])); bb.setMinLatitude(Float.parseFloat(mapdetails[2])); bb.setMaxLongitude(Float.parseFloat(mapdetails[3])); bb.setMaxLatitude(Float.parseFloat(mapdetails[4])); openLayersJavascript.setAdditionalScript(openLayersJavascript.zoomToBoundingBox(bb, true)); String[] scatterplotNames = null; while (scanner.hasNextLine()) { String line = scanner.nextLine(); if (line.startsWith("scatterplotNames")) { scatterplotNames = line.substring(17).split("___"); } } ArrayUtils.reverse(scatterplotNames); // ignore fields not found XStream xstream = new XStream(new DomDriver()) { protected MapperWrapper wrapMapper(MapperWrapper next) { return new MapperWrapper(next) { public boolean shouldSerializeMember(Class definedIn, String fieldName) { if (definedIn == Object.class || !super.shouldSerializeMember(definedIn, fieldName)) System.out.println("faled to read: " + definedIn + ", " + fieldName); return definedIn != Object.class ? super.shouldSerializeMember(definedIn, fieldName) : false; } }; } @Override public Object unmarshal(HierarchicalStreamReader reader) { Object o = super.unmarshal(reader); if (o instanceof BiocacheQuery) ((BiocacheQuery) o).getFullQ(false); return o; } @Override public Object unmarshal(HierarchicalStreamReader reader, Object root) { Object o = super.unmarshal(reader, root); if (o instanceof BiocacheQuery) ((BiocacheQuery) o).getFullQ(false); return o; } @Override public Object unmarshal(HierarchicalStreamReader reader, Object root, DataHolder dataHolder) { Object o = super.unmarshal(reader, root, dataHolder); if (o instanceof BiocacheQuery) ((BiocacheQuery) o).getFullQ(false); return o; } }; PersistenceStrategy strategy = new FilePersistenceStrategy(new File(sfld), xstream); List list = new XmlArrayList(strategy); ListIterator it = list.listIterator(list.size()); int scatterplotIndex = 0; while (it.hasPrevious()) { Object o = it.previous(); MapLayer ml = null; if (o instanceof MapLayer) { ml = (MapLayer) o; LOGGER.debug("Loading " + ml.getName() + " -> " + ml.isDisplayed()); addUserDefinedLayerToMenu(ml, false); } else if (o instanceof ScatterplotDataDTO) { ScatterplotDataDTO spdata = (ScatterplotDataDTO) o; loadScatterplot(spdata, "My Scatterplot " + scatterplotIndex++); } if (ml != null) { addUserDefinedLayerToMenu(ml, true); } } } catch (Exception e) { try { File f = new File("/data/sessions/" + sessionid + ".txt"); PrintWriter pw = new PrintWriter(f); e.printStackTrace(pw); pw.close(); } catch (Exception ex) { } LOGGER.error("Unable to load session data", e); showMessage("Unable to load session data"); } finally { if (scanner != null) { scanner.close(); } try { File f = new File("/data/sessions/ok/" + sessionid + ".txt"); FileUtils.writeStringToFile(f, "ok"); } catch (Exception ex) { } } }
From source file:org.apache.nifi.web.StandardNiFiServiceFacade.java
@Override public BulletinBoardDTO getBulletinBoard(BulletinQueryDTO query) { // build the query final BulletinQuery.Builder queryBuilder = new BulletinQuery.Builder().groupIdMatches(query.getGroupId()) .sourceIdMatches(query.getSourceId()).nameMatches(query.getName()) .messageMatches(query.getMessage()).after(query.getAfter()).limit(query.getLimit()); // get the bulletin repository final BulletinRepository bulletinRepository; if (properties.isClusterManager()) { bulletinRepository = clusterManager.getBulletinRepository(); } else {//from w w w. j a v a 2 s .co m bulletinRepository = controllerFacade.getBulletinRepository(); } // perform the query final List<Bulletin> results = bulletinRepository.findBulletins(queryBuilder.build()); // perform the query and generate the results - iterating in reverse order since we are // getting the most recent results by ordering by timestamp desc above. this gets the // exact results we want but in reverse order final List<BulletinDTO> bulletins = new ArrayList<>(); for (final ListIterator<Bulletin> bulletinIter = results.listIterator(results.size()); bulletinIter .hasPrevious();) { bulletins.add(dtoFactory.createBulletinDto(bulletinIter.previous())); } // create the bulletin board BulletinBoardDTO bulletinBoard = new BulletinBoardDTO(); bulletinBoard.setBulletins(bulletins); bulletinBoard.setGenerated(new Date()); return bulletinBoard; }
From source file:net.sourceforge.fenixedu.domain.student.Registration.java
final public ICurriculum getCurriculum(final DateTime when, final ExecutionYear executionYear, final CycleType cycleType) { if (getStudentCurricularPlansSet().isEmpty()) { return Curriculum.createEmpty(executionYear); }// ww w .j av a2 s . c om if (getDegreeType().isBolonhaType()) { final StudentCurricularPlan studentCurricularPlan = getLastStudentCurricularPlan(); if (studentCurricularPlan == null) { return Curriculum.createEmpty(executionYear); } if (cycleType == null) { return studentCurricularPlan.getCurriculum(when, executionYear); } final CycleCurriculumGroup cycleCurriculumGroup = studentCurricularPlan.getCycle(cycleType); if (cycleCurriculumGroup == null) { return Curriculum.createEmpty(executionYear); } return cycleCurriculumGroup.getCurriculum(when, executionYear); } else { final List<StudentCurricularPlan> sortedSCPs = getSortedStudentCurricularPlans(); final ListIterator<StudentCurricularPlan> sortedSCPsIterator = sortedSCPs .listIterator(sortedSCPs.size()); final StudentCurricularPlan lastStudentCurricularPlan = sortedSCPsIterator.previous(); final ICurriculum curriculum; if (lastStudentCurricularPlan.isBoxStructure()) { curriculum = lastStudentCurricularPlan.getCurriculum(when, executionYear); for (; sortedSCPsIterator.hasPrevious();) { final StudentCurricularPlan studentCurricularPlan = sortedSCPsIterator.previous(); if (executionYear == null || studentCurricularPlan.getStartExecutionYear().isBeforeOrEquals(executionYear)) { ((Curriculum) curriculum).add(studentCurricularPlan.getCurriculum(when, executionYear)); } } return curriculum; } else { curriculum = new StudentCurriculum(this, executionYear); } return curriculum; } }