Example usage for java.util Vector remove

List of usage examples for java.util Vector remove

Introduction

In this page you can find the example usage for java.util Vector remove.

Prototype

public synchronized E remove(int index) 

Source Link

Document

Removes the element at the specified position in this Vector.

Usage

From source file:com.duroty.application.files.manager.FilesManager.java

/**
 * DOCUMENT ME!/*  w  w  w . ja  v  a2 s .  c  om*/
 *
 * @param hsession DOCUMENT ME!
 * @param repositoryName DOCUMENT ME!
 * @param mids DOCUMENT ME!
 * @param hash DOCUMENT ME!
 *
 * @return DOCUMENT ME!
 *
 * @throws FilesException DOCUMENT ME!
 */
public MailPartObj getAttachment(Session hsession, String repositoryName, String mid, String hash)
        throws FilesException {
    MailPartObj part = null;

    try {
        MimeMessage mime = messageable.getMimeMessage(mid, getUser(hsession, repositoryName));

        if (mime != null) {
            Vector dmailParts = new Vector();
            MessageUtilities.decodeContent(mime, new StringBuffer(), dmailParts, true, "<br/>");

            while (dmailParts.size() > 0) {
                MailPart aux = (MailPart) dmailParts.remove(0);

                if (aux.getId() == Integer.parseInt(hash)) {
                    part = new MailPartObj();
                    part.setAttachent(IOUtils.toByteArray(aux.getPart().getInputStream()));
                    part.setContentType(aux.getContentType());
                    part.setId(aux.getId());
                    part.setName(aux.getName());
                    part.setSize(aux.getSize());

                    break;
                }
            }
        } else {
            throw new FilesException("The Attachment is null");
        }

        if (part == null) {
            throw new FilesException("The Attachment is null");
        }

        return part;
    } catch (Exception ex) {
        throw new FilesException(ex);
    } catch (java.lang.OutOfMemoryError ex) {
        System.gc();
        throw new FilesException(ex);
    } catch (Throwable ex) {
        throw new FilesException(ex);
    } finally {
        GeneralOperations.closeHibernateSession(hsession);
        System.gc();
    }
}

From source file:com.evolveum.openicf.lotus.DominoConnector.java

private void removeUserFromGroup(String usernameCanonical, String groupName) throws NotesException {
    LOG.ok("removeUserFromGroup: usernameCanonical {0}, groupName {1}", usernameCanonical, groupName);

    Document group = getGroup(groupName);
    if (group == null) {
        LOG.error("Invalid group name {0}.", groupName);
        throw new ConnectorException("Invalid group name '" + groupName + "'.");
    }//from  ww w . j  ava  2 s  .c o m

    Vector members = group.getItemValue(MEMBERS.getName());
    if (members != null && members.remove(usernameCanonical)) {
        Map<String, Attribute> attrs = new HashMap<String, Attribute>();
        attrs.put(MEMBERS.getName(), build(MEMBERS, members.toArray()));

        updateGroup(createGroupUid(group), attrs, null, Update.REPLACE);
    }
}

From source file:edu.umn.cs.spatialHadoop.nasa.MultiHDFPlot.java

public static boolean multiplot(Path[] input, Path output, OperationsParams params)
        throws IOException, InterruptedException, ClassNotFoundException, ParseException {
    String timeRange = params.get("time");
    final Date dateFrom, dateTo;
    final SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy.MM.dd");
    try {//ww  w  .  ja v a2s  .  c  om
        String[] parts = timeRange.split("\\.\\.");
        dateFrom = dateFormat.parse(parts[0]);
        dateTo = dateFormat.parse(parts[1]);
    } catch (ArrayIndexOutOfBoundsException e) {
        System.err.println("Use the seperator two periods '..' to seperate from and to dates");
        return false; // To avoid an error that causes dateFrom to be uninitialized
    } catch (ParseException e) {
        System.err.println("Illegal date format in " + timeRange);
        return false;
    }
    // Number of frames to combine in each image
    int combine = params.getInt("combine", 1);
    // Retrieve all matching input directories based on date range
    Vector<Path> matchingPathsV = new Vector<Path>();
    for (Path inputFile : input) {
        FileSystem inFs = inputFile.getFileSystem(params);
        FileStatus[] matchingDirs = inFs.listStatus(input, new PathFilter() {
            @Override
            public boolean accept(Path p) {
                String dirName = p.getName();
                try {
                    Date date = dateFormat.parse(dirName);
                    return date.compareTo(dateFrom) >= 0 && date.compareTo(dateTo) <= 0;
                } catch (ParseException e) {
                    LOG.warn("Cannot parse directory name: " + dirName);
                    return false;
                }
            }
        });
        for (FileStatus matchingDir : matchingDirs)
            matchingPathsV.add(new Path(matchingDir.getPath(), "*.hdf"));
    }
    if (matchingPathsV.isEmpty()) {
        LOG.warn("No matching directories to given input");
        return false;
    }

    Path[] matchingPaths = matchingPathsV.toArray(new Path[matchingPathsV.size()]);
    Arrays.sort(matchingPaths);

    // Clear all paths to ensure we set our own paths for each job
    params.clearAllPaths();

    // Create a water mask if we need to recover holes on write
    if (params.get("recover", "none").equals("write")) {
        // Recover images on write requires a water mask image to be generated first
        OperationsParams wmParams = new OperationsParams(params);
        wmParams.setBoolean("background", false);
        Path wmImage = new Path(output, new Path("water_mask"));
        HDFPlot.generateWaterMask(wmImage, wmParams);
        params.set(HDFPlot.PREPROCESSED_WATERMARK, wmImage.toString());
    }
    // Start a job for each path
    int imageWidth = -1;
    int imageHeight = -1;
    boolean overwrite = params.getBoolean("overwrite", false);
    boolean pyramid = params.getBoolean("pyramid", false);
    FileSystem outFs = output.getFileSystem(params);
    Vector<Job> jobs = new Vector<Job>();
    boolean background = params.getBoolean("background", false);
    Rectangle mbr = new Rectangle(-180, -90, 180, 90);
    for (int i = 0; i < matchingPaths.length; i += combine) {
        Path[] inputPaths = new Path[Math.min(combine, matchingPaths.length - i)];
        System.arraycopy(matchingPaths, i, inputPaths, 0, inputPaths.length);
        Path outputPath = new Path(output, inputPaths[0].getParent().getName() + (pyramid ? "" : ".png"));
        if (overwrite || !outFs.exists(outputPath)) {
            // Need to plot
            Job rj = HDFPlot.plotHeatMap(inputPaths, outputPath, params);
            if (imageHeight == -1 || imageWidth == -1) {
                if (rj != null) {
                    imageHeight = rj.getConfiguration().getInt("height", 1000);
                    imageWidth = rj.getConfiguration().getInt("width", 1000);
                    mbr = (Rectangle) OperationsParams.getShape(rj.getConfiguration(), "mbr");
                } else {
                    imageHeight = params.getInt("height", 1000);
                    imageWidth = params.getInt("width", 1000);
                    mbr = (Rectangle) OperationsParams.getShape(params, "mbr");
                }
            }
            if (background && rj != null)
                jobs.add(rj);
        }
    }
    // Wait until all jobs are done
    while (!jobs.isEmpty()) {
        Job firstJob = jobs.firstElement();
        firstJob.waitForCompletion(false);
        if (!firstJob.isSuccessful()) {
            System.err.println("Error running job " + firstJob.getJobID());
            System.err.println("Killing all remaining jobs");
            for (int j = 1; j < jobs.size(); j++)
                jobs.get(j).killJob();
            throw new RuntimeException("Error running job " + firstJob.getJobID());
        }
        jobs.remove(0);
    }

    // Draw the scale in the output path if needed
    String scalerange = params.get("scalerange");
    if (scalerange != null) {
        String[] parts = scalerange.split("\\.\\.");
        double min = Double.parseDouble(parts[0]);
        double max = Double.parseDouble(parts[1]);
        String scale = params.get("scale", "none").toLowerCase();
        if (scale.equals("vertical")) {
            MultiHDFPlot.drawVerticalScale(new Path(output, "scale.png"), min, max, 64, imageHeight, params);
        } else if (scale.equals("horizontal")) {
            MultiHDFPlot.drawHorizontalScale(new Path(output, "scale.png"), min, max, imageWidth, 64, params);
        }
    }
    // Add the KML file
    createKML(outFs, output, mbr, params);
    return true;
}

From source file:Admin_Thesaurus.ImportData.java

public void abortActions(HttpServletRequest request, SessionWrapperClass sessionInstance,
        ServletContext context, Locale targetLocale, CommonUtilsDBadmin common_utils,
        String initiallySelectedThesaurus, String mergedThesaurusName, StringObject DBbackupFileNameCreated,
        StringObject resultObj, PrintWriter out) {

    Utilities u = new Utilities();
    DBGeneral dbGen = new DBGeneral();

    Utils.StaticClass.webAppSystemOutPrintln(Parameters.LogFilePrefix + "ABORT IMPORT");
    //abort transaction and close connection
    //Q.free_all_sets();
    //Q.TEST_abort_transaction();
    //dbGen.CloseDBConnection(Q, TA, sis_session, tms_session, true);

    UsersClass wtmsUsers = new UsersClass();
    StringBuffer xml = new StringBuffer();
    Vector<String> thesauriNames = new Vector<String>();
    Vector<String> allHierarchies = new Vector<String>();
    Vector<String> allGuideTerms = new Vector<String>();
    String importMethodChoice = request.getParameter("ImportThesaurusMode");
    StringObject result = new StringObject("");

    UserInfoClass SessionUserInfo = (UserInfoClass) sessionInstance.getAttribute("SessionUser");

    wtmsUsers.SetSessionAttributeSessionUser(sessionInstance, context, SessionUserInfo.name,
            SessionUserInfo.password, initiallySelectedThesaurus, SessionUserInfo.userGroup);
    Utils.StaticClass.webAppSystemOutPrintln(Parameters.LogFilePrefix + DBbackupFileNameCreated.getValue());

    boolean restored = common_utils.RestoreDBbackup(DBbackupFileNameCreated.getValue(), result);
    thesauriNames.remove(mergedThesaurusName);

    QClass Q = new QClass();
    TMSAPIClass TA = new TMSAPIClass();
    IntegerObject sis_session = new IntegerObject();
    IntegerObject tms_session = new IntegerObject();

    if (restored) {
        Utils.StaticClass.webAppSystemOutPrintln(Parameters.LogFilePrefix + "Restoration of : "
                + DBbackupFileNameCreated.getValue() + " succeeded.");
        //open connection and start Query
        if (dbGen.openConnectionAndStartQueryOrTransaction(Q, TA, sis_session, null, null,
                true) == QClass.APIFail) {
            Utils.StaticClass/*from  w ww .  jav  a  2s .c o m*/
                    .webAppSystemOutPrintln("OPEN CONNECTION ERROR @ servlet " + this.getServletName());
            return;
        }

        dbGen.getDBAdminHierarchiesStatusesAndGuideTermsXML(SessionUserInfo, Q, sis_session, allHierarchies,
                allGuideTerms);
        dbGen.GetExistingThesaurus(false, thesauriNames, Q, sis_session);

        //end query and close connection
        Q.free_all_sets();
        Q.TEST_end_query();
        dbGen.CloseDBConnection(Q, null, sis_session, null, false);

    } else {
        Utils.StaticClass.webAppSystemOutPrintln(
                Parameters.LogFilePrefix + "Did not manage to restore : " + DBbackupFileNameCreated.getValue());
    }

    xml.append(u.getXMLStart(ConstantParameters.LMENU_THESAURI));
    xml.append(u.getDBAdminHierarchiesStatusesAndGuideTermsXML(allHierarchies, allGuideTerms, targetLocale));
    StringObject resultMessageObj_2 = new StringObject();

    xml.append(getXMLMiddle(thesauriNames,
            u.translateFromMessagesXML("root/abortActions/InsertionFailure", null) + resultObj.getValue(),
            importMethodChoice));
    xml.append(u.getXMLUserInfo(SessionUserInfo));
    xml.append(u.getXMLEnd());

    u.XmlPrintWriterTransform(out, xml, sessionInstance.path + "/xml-xsl/page_contents.xsl");

    // ---------------------- UNLOCK SYSTEM ----------------------
    DBAdminUtilities dbAdminUtils = new DBAdminUtilities();
    dbAdminUtils.UnlockSystemForAdministrativeJobs();
}

From source file:edu.umn.cs.spatialHadoop.nasa.SpatioAggregateQueries.java

/**
 * Return all matching partitions according to a time range
 * @param inFile /*www .ja  v  a  2 s .c om*/
 * @param params
 * @return
 * @throws ParseException
 * @throws IOException
 */
private static Vector<Path> selectTemporalPartitions(Path inFile, OperationsParams params)
        throws ParseException, IOException {
    // 1- Run a temporal filter step to find all matching temporal partitions
    Vector<Path> matchingPartitions = new Vector<Path>();
    // List of time ranges to check. Initially it contains one range as
    // specified by the user. Eventually, it can be split into at most two
    // partitions if partially matched by a partition.
    Vector<TimeRange> temporalRanges = new Vector<TimeRange>();
    System.out.println(new TimeRange(params.get("time")));
    temporalRanges.add(new TimeRange(params.get("time")));
    Path[] temporalIndexes = new Path[] { new Path(inFile, "yearly"), new Path(inFile, "monthly"),
            new Path(inFile, "daily") };
    int index = 0;
    final FileSystem fs = inFile.getFileSystem(params);
    while (index < temporalIndexes.length && !temporalRanges.isEmpty()) {
        Path indexDir = temporalIndexes[index];
        LOG.info("Checking index dir " + indexDir);
        TemporalIndex temporalIndex = new TemporalIndex(fs, indexDir);
        for (int iRange = 0; iRange < temporalRanges.size(); iRange++) {
            TimeRange range = temporalRanges.get(iRange);
            TemporalPartition[] matches = temporalIndex.selectContained(range.start, range.end);
            if (matches != null) {
                LOG.info("Matched " + matches.length + " partitions in " + indexDir);
                for (TemporalPartition match : matches) {
                    matchingPartitions.add(new Path(indexDir, match.dirName));
                }
                // Update range to remove matching part
                TemporalPartition firstMatch = matches[0];
                TemporalPartition lastMatch = matches[matches.length - 1];
                if (range.start < firstMatch.start && range.end > lastMatch.end) {
                    // Need to split the range into two
                    temporalRanges.setElementAt(new TimeRange(range.start, firstMatch.start), iRange);
                    temporalRanges.insertElementAt(new TimeRange(lastMatch.end, range.end), iRange);
                } else if (range.start < firstMatch.start) {
                    // Update range in-place
                    range.end = firstMatch.start;
                } else if (range.end > lastMatch.end) {
                    // Update range in-place
                    range.start = lastMatch.end;
                } else {
                    // Current range was completely covered. Remove it
                    temporalRanges.remove(iRange);
                }
            }
        }
        index++;
    }

    numOfTemporalPartitionsInLastQuery = matchingPartitions.size();
    return matchingPartitions;
}

From source file:org.bibsonomy.importer.bookmark.file.FirefoxImporter.java

/**
 * Parses a given node and extracts all links and folders. Uppertags
 * contains all tags provided by nodes above the given node (folder).
 * Bookmarks is requiered because createBookmarks works recursively.
 * //from  w ww . j av  a  2s  . c  o m
 * @param Node
 *            folder
 * @param Vector
 *            <String> upperTags
 * @param LinkedList
 *            <Bookmark>bookmarks
 * @return
 */
private void createBookmarks(final Node folder, final Vector<String> upperTags, final User user,
        final String groupName) {
    // the post gets today's time
    final Date today = new Date();

    // every node requires his own tags
    Vector<String> tags;
    // if tags are provided by upper nodes these tags belong to this node
    // too
    if (upperTags != null) {
        tags = new Vector<String>(upperTags);
    }
    // if no tags are provided create a new vector
    else {
        tags = new Vector<String>();
    }
    // nodelist to parse all children of the given node
    NodeList children = folder.getChildNodes();
    // String to save a foldername if its name is given in a sibling of the
    // concerning DL
    String sepTag = "";

    for (int i = 0; i < children.getLength(); i++) {
        Node currentNode = children.item(i);
        // connect all upper tags with the currentNode
        Vector<String> myTags = new Vector<String>(tags);
        if (!"".equals(sepTag)) {
            myTags.add(sepTag);
        }

        // is currentNode a folder?
        if ("dd".equals(currentNode.getNodeName())) {
            NodeList secondGen = currentNode.getChildNodes();
            // only containing a name?
            // yes, keep tag
            if (secondGen.getLength() == 1 && "h3".equals(secondGen.item(0).getNodeName())) {
                sepTag = secondGen.item(0).getFirstChild().getNodeValue().replaceAll("->|<-|\\s", "_");
            } else if (secondGen.getLength() > 1) { // filtert dd-knoten,
                // die nur einen
                // p-knoten besitzen
                // else find all folders an theis names
                for (int j = 0; j < secondGen.getLength(); j++) {
                    Node son = secondGen.item(j);
                    if ("h3".equals(son.getNodeName())) {
                        // if sepTag != "" remove last added tag and reset
                        // sepTag
                        if (!"".equals(sepTag)) {
                            myTags.remove(sepTag);
                            sepTag = "";
                        }
                        // if upperTags != myTags, a parallel branch was
                        // parsed -> reset myTags
                        if (tags.size() != myTags.size()) {
                            myTags = tags;
                        }
                        // add a found tag
                        myTags.add(son.getFirstChild().getNodeValue().replaceAll("->|<-|\\s", "_"));
                    }
                    // all dl-nodes are new folders
                    if ("dl".equals(son.getNodeName())) {
                        // create bookmarks from new found node
                        createBookmarks(son, myTags, user, groupName);
                    }
                } // for(int j=...
            } // else if
        } // if ("dd".equals....
          // if its no folder.... is it a link?

        /*
         * sometimes the tidy parser decides that <dt></dt> has childnodes
         * ... need to check if the childnode of <dt> is an <a> to avoid
         * NullPointerExceptions!!!!
         */
        else if ("dt".equals(currentNode.getNodeName())
                && "a".equals(currentNode.getFirstChild().getNodeName())) {
            // it is a link
            // create bookmark-object

            // need to check if the <a>-Tag has a name (ChildNodes) i.e. <a
            // href="http://www.foo.bar"></a> causes a failure
            if (currentNode.getFirstChild().hasChildNodes() == true) {
                Post<Bookmark> bookmarkPost = new Post<Bookmark>();
                bookmarkPost.setResource(new Bookmark());
                bookmarkPost.getResource().setTitle(currentNode.getFirstChild().getFirstChild().getNodeValue());
                bookmarkPost.getResource().setUrl(
                        currentNode.getFirstChild().getAttributes().getNamedItem("href").getNodeValue());
                // add tags/relations to bookmark
                if (upperTags != null) {
                    // only 1 tag found -> add a tag
                    if (upperTags.size() == 1) {

                        // bookmark.setTags(upperTags.elementAt(0));
                        bookmarkPost.addTag(upperTags.elementAt(0));
                    } else {
                        // more tags found -> add relations
                        for (int tagCount = 0; tagCount < upperTags.size() - 1; tagCount++) {
                            String upper = upperTags.elementAt(tagCount);
                            String lower = upperTags.elementAt(tagCount + 1);
                            // bookmark.addTagRelation(lower, upper);
                            bookmarkPost.addTag(upper);
                            bookmarkPost.addTag(lower);

                        }
                    }
                } else {
                    /*
                     * link found in "root-folder" -> no folder hierarchy
                     * found
                     * 
                     * check for "TAGS" attribute (common in del.icio.us
                     * export)
                     */
                    final Node tagNode = currentNode.getFirstChild().getAttributes().getNamedItem("tags");
                    if (tagNode != null) {
                        /*
                         * del.icio.us export tags are comma-separated
                         */
                        final StringTokenizer token = new StringTokenizer(tagNode.getNodeValue(), ",");
                        while (token.hasMoreTokens()) {
                            bookmarkPost.addTag(token.nextToken());
                        }
                    } else {
                        // really no tags found -> set imported tag
                        bookmarkPost.setTags(Collections.singleton(TagUtils.getEmptyTag()));
                    }
                }
                bookmarkPost.setDate(today);
                bookmarkPost.setUser(user);
                bookmarkPost.addGroup(groupName);

                // descriptions are saved in a sibling of of a node
                // containing a link
                if (currentNode.getNextSibling() != null
                        && "dd".equals(currentNode.getNextSibling().getNodeName())) {
                    // bookmark.setExtended(currentNode.getNextSibling().getFirstChild().getNodeValue());
                    bookmarkPost.setDescription(currentNode.getNextSibling().getFirstChild().getNodeValue());
                }
                posts.add(bookmarkPost);
            }
        }
    }
}

From source file:edu.umn.cs.spatialHadoop.nasa.SpatioTemporalAggregateQuery.java

/**
 * Performs a spatio-temporal aggregate query on an indexed directory
 * @param inFile//w w  w  .  j a va 2 s.c  om
 * @param params
 * @throws ParseException 
 * @throws IOException 
 */
public static AggregateQuadTree.Node aggregateQuery(Path inFile, OperationsParams params)
        throws ParseException, IOException {
    // 1- Run a temporal filter step to find all matching temporal partitions
    Vector<Path> matchingPartitions = new Vector<Path>();
    // List of time ranges to check. Initially it contains one range as
    // specified by the user. Eventually, it can be split into at most two
    // partitions if partially matched by a partition.
    Vector<TimeRange> temporalRanges = new Vector<TimeRange>();
    temporalRanges.add(new TimeRange(params.get("time")));
    Path[] temporalIndexes = new Path[] { new Path(inFile, "yearly"), new Path(inFile, "monthly"),
            new Path(inFile, "daily") };
    int index = 0;
    final FileSystem fs = inFile.getFileSystem(params);
    while (index < temporalIndexes.length && !temporalRanges.isEmpty()) {
        Path indexDir = temporalIndexes[index];
        LOG.info("Checking index dir " + indexDir);
        TemporalIndex temporalIndex = new TemporalIndex(fs, indexDir);
        for (int iRange = 0; iRange < temporalRanges.size(); iRange++) {
            TimeRange range = temporalRanges.get(iRange);
            TemporalPartition[] matches = temporalIndex.selectContained(range.start, range.end);
            if (matches != null) {
                LOG.info("Matched " + matches.length + " partitions in " + indexDir);
                for (TemporalPartition match : matches) {
                    LOG.info("Matched temporal partition: " + match.dirName);
                    matchingPartitions.add(new Path(indexDir, match.dirName));
                }
                // Update range to remove matching part
                TemporalPartition firstMatch = matches[0];
                TemporalPartition lastMatch = matches[matches.length - 1];
                if (range.start < firstMatch.start && range.end > lastMatch.end) {
                    // Need to split the range into two
                    temporalRanges.setElementAt(new TimeRange(range.start, firstMatch.start), iRange);
                    temporalRanges.insertElementAt(new TimeRange(lastMatch.end, range.end), iRange);
                } else if (range.start < firstMatch.start) {
                    // Update range in-place
                    range.end = firstMatch.start;
                } else if (range.end > lastMatch.end) {
                    // Update range in-place
                    range.start = lastMatch.end;
                } else {
                    // Current range was completely covered. Remove it
                    temporalRanges.remove(iRange);
                }
            }
        }
        index++;
    }

    numOfTemporalPartitionsInLastQuery = matchingPartitions.size();

    // 2- Find all matching files (AggregateQuadTrees) in matching partitions
    final Rectangle spatialRange = params.getShape("rect", new Rectangle()).getMBR();
    // Convert spatialRange from lat/lng space to Sinusoidal space
    double cosPhiRad = Math.cos(spatialRange.y1 * Math.PI / 180);
    double southWest = spatialRange.x1 * cosPhiRad;
    double southEast = spatialRange.x2 * cosPhiRad;
    cosPhiRad = Math.cos(spatialRange.y2 * Math.PI / 180);
    double northWest = spatialRange.x1 * cosPhiRad;
    double northEast = spatialRange.x2 * cosPhiRad;
    spatialRange.x1 = Math.min(northWest, southWest);
    spatialRange.x2 = Math.max(northEast, southEast);
    // Convert to the h v space used by MODIS
    spatialRange.x1 = (spatialRange.x1 + 180.0) / 10.0;
    spatialRange.x2 = (spatialRange.x2 + 180.0) / 10.0;
    spatialRange.y2 = (90.0 - spatialRange.y2) / 10.0;
    spatialRange.y1 = (90.0 - spatialRange.y1) / 10.0;
    // Vertically flip because the Sinusoidal space increases to the south
    double tmp = spatialRange.y2;
    spatialRange.y2 = spatialRange.y1;
    spatialRange.y1 = tmp;
    // Find the range of cells in MODIS Sinusoidal grid overlapping the range
    final int h1 = (int) Math.floor(spatialRange.x1);
    final int h2 = (int) Math.ceil(spatialRange.x2);
    final int v1 = (int) Math.floor(spatialRange.y1);
    final int v2 = (int) Math.ceil(spatialRange.y2);
    PathFilter rangeFilter = new PathFilter() {
        @Override
        public boolean accept(Path p) {
            Matcher matcher = MODISTileID.matcher(p.getName());
            if (!matcher.matches())
                return false;
            int h = Integer.parseInt(matcher.group(1));
            int v = Integer.parseInt(matcher.group(2));
            return h >= h1 && h < h2 && v >= v1 && v < v2;
        }
    };

    final Vector<Path> allMatchingFiles = new Vector<Path>();

    for (Path matchingPartition : matchingPartitions) {
        // Select all matching files
        FileStatus[] matchingFiles = fs.listStatus(matchingPartition, rangeFilter);
        for (FileStatus matchingFile : matchingFiles) {
            allMatchingFiles.add(matchingFile.getPath());
        }
    }

    // 3- Query all matching files in parallel
    Vector<Node> threadsResults = Parallel.forEach(allMatchingFiles.size(),
            new RunnableRange<AggregateQuadTree.Node>() {
                @Override
                public Node run(int i1, int i2) {
                    Node threadResult = new AggregateQuadTree.Node();
                    for (int i_file = i1; i_file < i2; i_file++) {
                        try {
                            Path matchingFile = allMatchingFiles.get(i_file);
                            Matcher matcher = MODISTileID.matcher(matchingFile.getName());
                            matcher.matches(); // It has to match
                            int h = Integer.parseInt(matcher.group(1));
                            int v = Integer.parseInt(matcher.group(2));
                            // Clip the query region and normalize in this tile
                            Rectangle translated = spatialRange.translate(-h, -v);
                            int x1 = (int) (Math.max(translated.x1, 0) * 1200);
                            int y1 = (int) (Math.max(translated.y1, 0) * 1200);
                            int x2 = (int) (Math.min(translated.x2, 1.0) * 1200);
                            int y2 = (int) (Math.min(translated.y2, 1.0) * 1200);
                            AggregateQuadTree.Node fileResult = AggregateQuadTree.aggregateQuery(fs,
                                    matchingFile, new java.awt.Rectangle(x1, y1, (x2 - x1), (y2 - y1)));
                            threadResult.accumulate(fileResult);
                        } catch (IOException e) {
                            e.printStackTrace();
                        }
                    }
                    return threadResult;
                }
            });
    AggregateQuadTree.Node finalResult = new AggregateQuadTree.Node();
    for (Node threadResult : threadsResults)
        finalResult.accumulate(threadResult);
    numOfTreesTouchesInLastRequest = allMatchingFiles.size();
    return finalResult;
}

From source file:edu.ku.brc.specify.tasks.subpane.wb.FormPane.java

@Override
public void setWorkbench(final Workbench workbench) {
    this.workbench = workbench;

    // Make the new Header List
    headers.clear();/*from ww  w  .  ja  va  2  s  .c  o  m*/
    headers.addAll(workbench.getWorkbenchTemplate().getWorkbenchTemplateMappingItems());
    Collections.sort(headers);

    // Now set all the new items into the panels 
    // by checking the Id
    Vector<InputPanel> oldItems = new Vector<InputPanel>(uiComps);
    for (WorkbenchTemplateMappingItem newItem : headers) {
        InputPanel fndItem = null;
        for (InputPanel panel : oldItems) {
            if (newItem.getId().intValue() == panel.getWbtmi().getId().intValue()) {
                fndItem = panel;
                break;
            }
        }

        if (fndItem != null) {
            oldItems.remove(fndItem);
            fndItem.setWbtmi(newItem);

        } else {
            log.error("Couldn't find panel by ID [" + newItem.getId() + "]");
        }
    }
}

From source file:org.openbravo.erpCommon.utility.reporting.printing.PrintController.java

/**
 * /*from   w  w w  . j  a  va 2  s.  c  om*/
 * @param vector
 * @param documentToDelete
 */
private void seekAndDestroy(Vector<Object> vector, String documentToDelete) {
    for (int i = 0; i < vector.size(); i++) {
        final AttachContent content = (AttachContent) vector.get(i);
        if (content.id.equals(documentToDelete)) {
            vector.remove(i);
            break;
        }
    }

}