Example usage for java.util LinkedList remove

List of usage examples for java.util LinkedList remove

Introduction

In this page you can find the example usage for java.util LinkedList remove.

Prototype

public E remove() 

Source Link

Document

Retrieves and removes the head (first element) of this list.

Usage

From source file:com.android.deskclock.timer.TimerFullScreenFragment.java

public void updateAllTimesUpTimers() {
    boolean notifyChange = false;
    //  To avoid race conditions where a timer was dismissed and it is still in the timers list
    // and can be picked again, create a temporary list of timers to be removed first and
    // then removed them one by one
    LinkedList<TimerObj> timesupTimers = new LinkedList<>();
    for (int i = 0; i < mAdapter.getCount(); i++) {
        TimerObj timerObj = mAdapter.getItem(i);
        if (timerObj.mState == TimerObj.STATE_TIMESUP) {
            timesupTimers.addFirst(timerObj);
            notifyChange = true;/*ww w.j ava2 s.c  o m*/
        }
    }

    while (timesupTimers.size() > 0) {
        final TimerObj t = timesupTimers.remove();
        onStopButtonPressed(t);
    }

    if (notifyChange) {
        mPrefs.edit().putBoolean(Timers.REFRESH_UI_WITH_LATEST_DATA, true).apply();
    }
}

From source file:org.hippoecm.hst.demo.components.NonWorkflowWikiImporterComponent.java

/**
 * Relates the nodes to the previous nodes (in order of UUID)
 *
 * @param request/* w  w  w. j a  va  2 s.c om*/
 * @param response
 */
private void relateDocuments(HstRequest request, HstResponse response, Operation op, final int relations,
        String orderByProperty) {
    if (relations < 1) {
        return;
    }

    try {
        Session writableSession = this.getPersistableSession(request);
        Node wikipedia = writableSession
                .getNode("/" + request.getRequestContext().getSiteContentBasePath() + "/wikipedia");
        @SuppressWarnings("deprecation")
        Query q = writableSession.getWorkspace().getQueryManager()
                .createQuery("//element(*,demosite:wikidocument)[@hippo:paths='" + wikipedia.getIdentifier()
                        + "'] order by @jcr:uuid", Query.XPATH);
        QueryResult result = q.execute();
        NodeIterator it = result.getNodes();

        // Fill first queue with elements, which can't be fully linked yet
        Node current;
        LinkedList<Node> firsts = new LinkedList<Node>();
        LinkedList<Node> previous = new LinkedList<Node>();
        while (it.hasNext() && firsts.size() != relations) {
            current = it.nextNode();
            firsts.add(current);
            previous.add(current);
        }

        // Link to previous documents, update previous documents queue, occasionally save
        int count = 1;
        while (it.hasNext()) {
            current = it.nextNode();
            Iterator<Node> qit = previous.listIterator();

            while (qit.hasNext()) {
                op.perform(current, qit.next());
            }

            previous.remove();
            previous.add(current);

            if (count++ % 200 == 0) {
                writableSession.save();
            }
        }

        // Finally, link the first queue with elements
        Iterator<Node> fit = firsts.listIterator();
        while (fit.hasNext()) {
            current = fit.next();
            Iterator<Node> qit = previous.listIterator();

            while (qit.hasNext()) {
                op.perform(current, qit.next());
            }

            previous.remove();
            previous.add(current);
        }

        writableSession.save();
    } catch (RepositoryException e) {
        log.warn("Exception during relating wiki docs", e);
        response.setRenderParameter("message",
                "An exception happened. Did not relate wiki docs. " + e.toString());
    }
}

From source file:org.powertac.customer.model.LiftTruck.java

/**
 * Converts a list of Strings to a sorted list of Shifts. Entries in the
 * list represent pairs of (start, duration) values. 
 *//*from w  w  w .jav a  2 s .  com*/
@ConfigurableValue(valueType = "List", dump = false, description = "shift spec [block, shift, ..., block, shift, ...]")
public void setShiftData(List<String> data) {
    int blk = 0;
    int shf = 1;
    int state = shf;

    LinkedList<String> tokens = new LinkedList<String>(data);
    ArrayList<Integer> blockData = new ArrayList<Integer>();
    ArrayList<Integer> shiftData = new ArrayList<Integer>();
    while (!(tokens.isEmpty())) {
        String token = tokens.remove();
        if (token.equals("block")) {
            // finish shift, switch to block
            if (!shiftData.isEmpty()) {
                finishShift(blockData, shiftData);
                shiftData.clear();
            }
            blockData.clear();
            state = blk;
        } else if (token.equals("shift")) {
            // finish block or previous shift, switch to shift
            if (!shiftData.isEmpty()) {
                finishShift(blockData, shiftData);
                shiftData.clear();
            }
            state = shf;
        } else { // collect numbers into correct list
            try {
                if (state == shf)
                    shiftData.add(Integer.parseInt(token));
                else if (state == blk)
                    blockData.add(Integer.parseInt(token));
            } catch (NumberFormatException nfe) {
                log.error("Config error for " + getName() + ": bad numeric token " + token);
            }
        }
    }
    // finish up last shift
    if (!shiftData.isEmpty()) {
        finishShift(blockData, shiftData);
    }
}

From source file:com.twitter.distributedlog.service.balancer.ClusterBalancer.java

void moveStreams(List<Host> hosts, AtomicInteger hostIdxMoveFrom, int moveFromLowWaterMark,
        AtomicInteger hostIdxMoveTo, int moveToHighWaterMark, Optional<RateLimiter> rateLimiter) {
    if (hostIdxMoveFrom.get() < 0 || hostIdxMoveFrom.get() >= hosts.size() || hostIdxMoveTo.get() < 0
            || hostIdxMoveTo.get() >= hosts.size() || hostIdxMoveFrom.get() >= hostIdxMoveTo.get()) {
        return;/*from w w  w  .  ja va2s  . c  o m*/
    }

    if (logger.isDebugEnabled()) {
        logger.debug(
                "Moving streams : hosts = {}, from = {}, to = {} : from_low_water_mark = {}, to_high_water_mark = {}",
                new Object[] { hosts, hostIdxMoveFrom.get(), hostIdxMoveTo.get(), moveFromLowWaterMark,
                        moveToHighWaterMark });
    }

    Host hostMoveFrom = hosts.get(hostIdxMoveFrom.get());
    int numStreamsOnFromHost = hostMoveFrom.streams.size();
    if (numStreamsOnFromHost <= moveFromLowWaterMark) {
        // do nothing
        return;
    }

    int numStreamsToMove = numStreamsOnFromHost - moveFromLowWaterMark;
    LinkedList<String> streamsToMove = new LinkedList<String>(hostMoveFrom.streams);
    Collections.shuffle(streamsToMove);

    if (logger.isDebugEnabled()) {
        logger.debug("Try to move {} streams from host {} : streams = {}",
                new Object[] { numStreamsToMove, hostMoveFrom.address, streamsToMove });
    }

    while (numStreamsToMove-- > 0 && !streamsToMove.isEmpty()) {
        if (rateLimiter.isPresent()) {
            rateLimiter.get().acquire();
        }

        // pick a host to move
        Host hostMoveTo = hosts.get(hostIdxMoveTo.get());
        while (hostMoveTo.streams.size() >= moveToHighWaterMark) {
            int hostIdx = hostIdxMoveTo.decrementAndGet();
            logger.info("move to host : {}, from {}", hostIdx, hostIdxMoveFrom.get());
            if (hostIdx <= hostIdxMoveFrom.get()) {
                return;
            } else {
                hostMoveTo = hosts.get(hostIdx);
                if (logger.isDebugEnabled()) {
                    logger.debug("Target host to move moved to host {} @ {}", hostIdx, hostMoveTo);
                }
            }
        }

        // pick a stream
        String stream = streamsToMove.remove();

        // move the stream
        if (moveStream(stream, hostMoveFrom, hostMoveTo)) {
            hostMoveFrom.streams.remove(stream);
            hostMoveTo.streams.add(stream);
        }
    }

}

From source file:org.apache.distributedlog.service.balancer.ClusterBalancer.java

void moveStreams(List<Host> hosts, AtomicInteger hostIdxMoveFrom, int moveFromLowWaterMark,
        AtomicInteger hostIdxMoveTo, int moveToHighWaterMark, Optional<RateLimiter> rateLimiter) {
    if (hostIdxMoveFrom.get() < 0 || hostIdxMoveFrom.get() >= hosts.size() || hostIdxMoveTo.get() < 0
            || hostIdxMoveTo.get() >= hosts.size() || hostIdxMoveFrom.get() >= hostIdxMoveTo.get()) {
        return;/*from  w ww .j  a v  a  2  s  . c  om*/
    }

    if (logger.isDebugEnabled()) {
        logger.debug(
                "Moving streams : hosts = {}, from = {}, to = {} :"
                        + " from_low_water_mark = {}, to_high_water_mark = {}",
                new Object[] { hosts, hostIdxMoveFrom.get(), hostIdxMoveTo.get(), moveFromLowWaterMark,
                        moveToHighWaterMark });
    }

    Host hostMoveFrom = hosts.get(hostIdxMoveFrom.get());
    int numStreamsOnFromHost = hostMoveFrom.streams.size();
    if (numStreamsOnFromHost <= moveFromLowWaterMark) {
        // do nothing
        return;
    }

    int numStreamsToMove = numStreamsOnFromHost - moveFromLowWaterMark;
    LinkedList<String> streamsToMove = new LinkedList<String>(hostMoveFrom.streams);
    Collections.shuffle(streamsToMove);

    if (logger.isDebugEnabled()) {
        logger.debug("Try to move {} streams from host {} : streams = {}",
                new Object[] { numStreamsToMove, hostMoveFrom.address, streamsToMove });
    }

    while (numStreamsToMove-- > 0 && !streamsToMove.isEmpty()) {
        if (rateLimiter.isPresent()) {
            rateLimiter.get().acquire();
        }

        // pick a host to move
        Host hostMoveTo = hosts.get(hostIdxMoveTo.get());
        while (hostMoveTo.streams.size() >= moveToHighWaterMark) {
            int hostIdx = hostIdxMoveTo.decrementAndGet();
            logger.info("move to host : {}, from {}", hostIdx, hostIdxMoveFrom.get());
            if (hostIdx <= hostIdxMoveFrom.get()) {
                return;
            } else {
                hostMoveTo = hosts.get(hostIdx);
                if (logger.isDebugEnabled()) {
                    logger.debug("Target host to move moved to host {} @ {}", hostIdx, hostMoveTo);
                }
            }
        }

        // pick a stream
        String stream = streamsToMove.remove();

        // move the stream
        if (moveStream(stream, hostMoveFrom, hostMoveTo)) {
            hostMoveFrom.streams.remove(stream);
            hostMoveTo.streams.add(stream);
        }
    }

}

From source file:view.EditorView.java

/**
 * Renders the current game and unconnected rooms in the view.
 *
 * @param autoLayout      If {@code true}, the rooms will be automatically laid out according to their topology.
 * @param onlyUpdateLines If {@code true}, only connecting lines between the rooms are rendered, rooms are left as they are. Useful if the user is currently moving the room around with the mouse.
 *///from   w  w w.  ja  v a2 s. c om
public void renderView(boolean autoLayout, boolean onlyUpdateLines) {
    int indexCorrection = 0;
    while (drawing.getChildren().size() > indexCorrection) {
        if (!onlyUpdateLines && !(drawing.getChildren().get(indexCorrection) instanceof ConnectionLine)) {
            drawing.getChildren().remove(indexCorrection);
        } else if (drawing.getChildren().get(indexCorrection) instanceof ConnectionLine) {
            // Check if line is still valid
            ((ConnectionLine) drawing.getChildren().get(indexCorrection)).updateLocation();
            indexCorrection++;
        } else {
            indexCorrection++;
        }
    }

    renderThreadPool.submit(() -> {
        // update the connection status of all rooms
        if (allRoomsAsList != null) {
            for (RoomRectangle room : allRoomsAsList) {
                updateConnectionStatusOfRoom(room);
            }
        }

        LinkedList<RoomRectangle> renderQueue = new LinkedList<>();

        // The distance between connected rooms
        double roomDistance = 50;

        RoomRectangle startRoom;
        if (allRoomsAsList == null) {
            // First time to render
            startRoom = new RoomRectangle(drawing, this.getCurrentGame().getCurrentRoom());
            allRoomsAsListCopy = new RoomRectangleList();
            allRoomsAsList = new RoomRectangleList();
            allRoomsAsList.add(startRoom);
        } else {
            startRoom = allRoomsAsList.findByRoom(this.getCurrentGame().getCurrentRoom());
            allRoomsAsListCopy = allRoomsAsList;
            if (!onlyUpdateLines) {
                allRoomsAsList = new RoomRectangleList();
            }
        }

        renderQueue.add(startRoom);

        // render unconnected rooms
        renderQueue.addAll(unconnectedRooms);

        while (!renderQueue.isEmpty()) {
            RoomRectangle currentRoom = renderQueue.remove();
            if (currentRoom == null) {
                FOKLogger.severe(EditorView.class.getName(),
                        "currentRoom == null means that the room was never added to allRoomsAsList and that means that we ran into a bug, so report it :(");
                Platform.runLater(() -> new ReportingDialog(stage.getScene()).show(AppConfig.gitHubUserName,
                        AppConfig.gitHubRepoName, new IllegalStateException(
                                "A room of the game was never added to allRoomsAsList. This is an internal bug and needs to be reported to the dev team. Please tell us at https://github.com/vatbub/zorkClone/issues what you did when this exception occurred.")));
            }

            //noinspection ConstantConditions
            if (!currentRoom.isRendered()) {
                if (!allRoomsAsList.contains(currentRoom)) {
                    allRoomsAsList.add(currentRoom);
                }
                currentRoom.setCustomParent(drawing);
                currentRoom.updateNameLabelPosition();
            }
            for (Map.Entry<WalkDirection, Room> entry : currentRoom.getRoom().getAdjacentRooms().entrySet()) {
                RoomRectangle newRoom;
                newRoom = allRoomsAsListCopy.findByRoom(entry.getValue());

                if (newRoom == null) {
                    // not rendered yet
                    newRoom = new RoomRectangle(drawing, entry.getValue());
                    allRoomsAsList.add(newRoom);
                }

                // Set room position
                if (autoLayout && !newRoom.isRendered()) {
                    switch (entry.getKey()) {
                    case NORTH:
                        newRoom.setY(currentRoom.getY() - newRoom.getHeight() - roomDistance);
                        newRoom.setX(currentRoom.getX() + currentRoom.getWidth() / 2 - newRoom.getWidth() / 2);
                        break;
                    case WEST:
                        newRoom.setY(currentRoom.getY());
                        newRoom.setX(currentRoom.getX() - newRoom.getWidth() - roomDistance);
                        break;
                    case EAST:
                        newRoom.setY(currentRoom.getY());
                        newRoom.setX(currentRoom.getX() + currentRoom.getWidth() + roomDistance);
                        break;
                    case SOUTH:
                        newRoom.setY(currentRoom.getY() + currentRoom.getHeight() + roomDistance);
                        newRoom.setX(currentRoom.getX() + currentRoom.getWidth() / 2 - newRoom.getWidth() / 2);
                        break;
                    case NORTH_WEST:
                        newRoom.setY(currentRoom.getY() - newRoom.getHeight() - roomDistance);
                        newRoom.setX(currentRoom.getX() - newRoom.getWidth() - roomDistance);
                        break;
                    case NORTH_EAST:
                        newRoom.setY(currentRoom.getY() - newRoom.getHeight() - roomDistance);
                        newRoom.setX(currentRoom.getX() + currentRoom.getWidth() + roomDistance);
                        break;
                    case SOUTH_WEST:
                        newRoom.setY(currentRoom.getY() + currentRoom.getHeight() + roomDistance);
                        newRoom.setX(currentRoom.getX() - newRoom.getWidth() - roomDistance);
                        break;
                    case SOUTH_EAST:
                        newRoom.setY(currentRoom.getY() + currentRoom.getHeight() + roomDistance);
                        newRoom.setX(currentRoom.getX() + currentRoom.getWidth() + roomDistance);
                        break;
                    }
                }

                ConnectionLine connectionLine = lineList.findByStartAndEndRoomIgnoreLineDirection(currentRoom,
                        newRoom);
                if (connectionLine == null) {
                    // create a new line
                    connectionLine = new ConnectionLine(currentRoom, newRoom);
                    connectionLine.setInvalidationRunnable(lineInvalidationRunnable);
                    lineList.add(connectionLine);

                    final Line connectionLineCopy = connectionLine;
                    Platform.runLater(() -> drawing.getChildren().add(connectionLineCopy));
                }

                ConnectionLine finalConnectionLine = connectionLine;
                Platform.runLater(finalConnectionLine::updateLocation);

                if (!newRoom.isRendered()) {
                    // render the child
                    renderQueue.add(newRoom);
                }
            }
        }

        // set the room count
        currentRoomCount = allRoomsAsList.size();
        allRoomsAsListCopy = null;
    });
}

From source file:com.opengamma.util.db.management.AbstractDbManagement.java

@Override
public void clearTables(String catalog, String schema, Collection<String> ignoredTables) {
    LinkedList<String> script = new LinkedList<String>();
    Connection conn = null;//from   ww w  . j a v a  2s .  co m
    try {
        if (!getCatalogCreationStrategy().catalogExists(catalog)) {
            return; // nothing to clear
        }

        conn = connect(catalog);
        setActiveSchema(conn, schema);
        Statement statement = conn.createStatement();

        // Clear tables SQL
        List<String> tablesToClear = new ArrayList<String>();
        for (String name : getAllTables(catalog, schema, statement)) {
            if (!ignoredTables.contains(name.toLowerCase())) {
                tablesToClear.add(name);
            }
        }
        List<String> clearTablesCommands = getClearTablesCommand(schema, tablesToClear);
        script.addAll(clearTablesCommands);
        for (String name : tablesToClear) {
            Table table = new Table(name);
            if (matches(table.getName().toLowerCase(), Pattern.compile(".*?hibernate_sequence"))) { // if it's a sequence table, reset it 
                script.add("INSERT INTO " + table.getQualifiedName(getHibernateDialect(), null, schema)
                        + " values ( 1 )");
            }
        }

        // Now execute it all. Constraints are taken into account by retrying the failed statement after all 
        // dependent tables have been cleared first.
        int i = 0;
        int maxAttempts = script.size() * 3; // make sure the loop eventually terminates. Important if there's a cycle in the table dependency graph
        SQLException latestException = null;
        while (i < maxAttempts && !script.isEmpty()) {
            String sql = script.remove();
            try {
                statement.executeUpdate(sql);
            } catch (SQLException e) {
                // assume it failed because of a constraint violation
                // try deleting other tables first - make this the new last statement
                latestException = e;
                script.add(sql);
            }
            i++;
        }
        statement.close();

        if (i == maxAttempts && !script.isEmpty()) {
            throw new OpenGammaRuntimeException(
                    "Failed to clear tables - is there a cycle in the table dependency graph?",
                    latestException);
        }

    } catch (SQLException e) {
        throw new OpenGammaRuntimeException("Failed to clear tables", e);
    } finally {
        try {
            if (conn != null) {
                conn.close();
            }
        } catch (SQLException e) {
        }
    }
}

From source file:com.zimbra.cs.service.FileUploadServlet.java

@SuppressWarnings("unchecked")
List<Upload> handleMultipartUpload(HttpServletRequest req, HttpServletResponse resp, String fmt, Account acct,
        boolean limitByFileUploadMaxSize, AuthToken at, boolean csrfCheckComplete)
        throws IOException, ServiceException {
    List<FileItem> items = null;
    String reqId = null;/*from   w  w  w  .  j  a v a  2s.c om*/

    ServletFileUpload upload = getUploader2(limitByFileUploadMaxSize);
    try {
        items = upload.parseRequest(req);

        if (!csrfCheckComplete && !CsrfUtil.checkCsrfInMultipartFileUpload(items, at)) {
            drainRequestStream(req);
            mLog.info("CSRF token validation failed for account: %s, Auth token is CSRF enabled",
                    acct.getName());
            sendResponse(resp, HttpServletResponse.SC_UNAUTHORIZED, fmt, null, null, items);
            return Collections.emptyList();
        }
    } catch (FileUploadBase.SizeLimitExceededException e) {
        // at least one file was over max allowed size
        mLog.info("Exceeded maximum upload size of " + upload.getSizeMax() + " bytes: " + e);
        drainRequestStream(req);
        sendResponse(resp, HttpServletResponse.SC_REQUEST_ENTITY_TOO_LARGE, fmt, reqId, null, items);
        return Collections.emptyList();
    } catch (FileUploadBase.InvalidContentTypeException e) {
        // at least one file was of a type not allowed
        mLog.info("File upload failed", e);
        drainRequestStream(req);
        sendResponse(resp, HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE, fmt, reqId, null, items);
        return Collections.emptyList();
    } catch (FileUploadException e) {
        // parse of request failed for some other reason
        mLog.info("File upload failed", e);
        drainRequestStream(req);
        sendResponse(resp, HttpServletResponse.SC_INTERNAL_SERVER_ERROR, fmt, reqId, null, items);
        return Collections.emptyList();
    }

    String charset = "utf-8";
    LinkedList<String> names = new LinkedList<String>();
    HashMap<FileItem, String> filenames = new HashMap<FileItem, String>();
    if (items != null) {
        for (Iterator<FileItem> it = items.iterator(); it.hasNext();) {
            FileItem fi = it.next();
            if (fi == null)
                continue;

            if (fi.isFormField()) {
                if (fi.getFieldName().equals("requestId")) {
                    // correlate this file upload session's request and response
                    reqId = fi.getString();
                } else if (fi.getFieldName().equals("_charset_") && !fi.getString().equals("")) {
                    // get the form value charset, if specified
                    charset = fi.getString();
                } else if (fi.getFieldName().startsWith("filename")) {
                    // allow a client to explicitly provide filenames for the uploads
                    names.clear();
                    String value = fi.getString(charset);
                    if (!Strings.isNullOrEmpty(value)) {
                        for (String name : value.split("\n")) {
                            names.add(name.trim());
                        }
                    }
                }
                // strip form fields out of the list of uploads
                it.remove();
            } else {
                if (fi.getName() == null || fi.getName().trim().equals("")) {
                    it.remove();
                } else {
                    filenames.put(fi, names.isEmpty() ? null : names.remove());
                }
            }
        }
    }

    // restrict requestId value for safety due to later use in javascript
    if (reqId != null && reqId.length() != 0) {
        if (!ALLOWED_REQUESTID_CHARS.matcher(reqId).matches()) {
            mLog.info("Rejecting upload with invalid chars in reqId: %s", reqId);
            sendResponse(resp, HttpServletResponse.SC_BAD_REQUEST, fmt, null, null, items);
            return Collections.emptyList();
        }
    }

    // empty upload is not a "success"
    if (items == null || items.isEmpty()) {
        mLog.info("No data in upload for reqId: %s", reqId);
        sendResponse(resp, HttpServletResponse.SC_NO_CONTENT, fmt, reqId, null, items);
        return Collections.emptyList();
    }

    // cache the uploaded files in the hash and construct the list of upload IDs
    List<Upload> uploads = new ArrayList<Upload>(items.size());
    for (FileItem fi : items) {
        String name = filenames.get(fi);
        if (name == null || name.trim().equals(""))
            name = fi.getName();
        Upload up = new Upload(acct.getId(), fi, name);

        mLog.info("Received multipart: %s", up);
        synchronized (mPending) {
            mPending.put(up.uuid, up);
        }
        uploads.add(up);
    }

    sendResponse(resp, HttpServletResponse.SC_OK, fmt, reqId, uploads, items);
    return uploads;
}

From source file:com.oltpbenchmark.benchmarks.seats.SEATSWorker.java

/**
 * Execute the FindOpenSeat procedure//from  w ww  . j av  a  2 s  .co m
 * @throws SQLException
 */
private boolean executeFindOpenSeats(FindOpenSeats proc) throws SQLException {
    final FlightId search_flight = this.profile.getRandomFlightId();
    assert (search_flight != null);
    Long airport_depart_id = search_flight.getDepartAirportId();

    if (LOG.isTraceEnabled())
        LOG.trace("Calling " + proc);
    Object[][] results = proc.run(conn, search_flight.encode());
    conn.commit();

    int rowCount = results.length;
    assert (rowCount <= SEATSConstants.FLIGHTS_NUM_SEATS) : String
            .format("Unexpected %d open seats returned for %s", rowCount, search_flight);

    // there is some tiny probability of an empty flight .. maybe 1/(20**150)
    // if you hit this assert (with valid code), play the lottery!
    if (rowCount == 0)
        return (true);

    LinkedList<Reservation> cache = CACHE_RESERVATIONS.get(CacheType.PENDING_INSERTS);
    assert (cache != null) : "Unexpected " + CacheType.PENDING_INSERTS;

    // Store pending reservations in our queue for a later transaction            
    BitSet seats = getSeatsBitSet(search_flight);
    tmp_reservations.clear();

    for (Object row[] : results) {
        if (row == null)
            continue; //  || rng.nextInt(100) < 75) continue; // HACK
        Integer seatnum = (Integer) row[1];

        // We first try to get a CustomerId based at this departure airport
        if (LOG.isTraceEnabled())
            LOG.trace("Looking for a random customer to fly on " + search_flight);
        CustomerId customer_id = profile.getRandomCustomerId(airport_depart_id);

        // We will go for a random one if:
        //  (1) The Customer is already booked on this Flight
        //  (2) We already made a new Reservation just now for this Customer
        int tries = SEATSConstants.FLIGHTS_NUM_SEATS;
        while (tries-- > 0 && (customer_id == null)) { //  || isCustomerBookedOnFlight(customer_id, flight_id))) {
            customer_id = profile.getRandomCustomerId();
            if (LOG.isTraceEnabled())
                LOG.trace("RANDOM CUSTOMER: " + customer_id);
        } // WHILE
        assert (customer_id != null) : String.format(
                "Failed to find a unique Customer to reserve for seat #%d on %s", seatnum, search_flight);

        Reservation r = new Reservation(profile.getNextReservationId(getId()), search_flight, customer_id,
                seatnum.intValue());
        seats.set(seatnum);
        tmp_reservations.add(r);
        if (LOG.isTraceEnabled())
            LOG.trace(
                    "QUEUED INSERT: " + search_flight + " / " + search_flight.encode() + " -> " + customer_id);
    } // WHILE

    if (tmp_reservations.isEmpty() == false) {
        Collections.shuffle(tmp_reservations);
        cache.addAll(tmp_reservations);
        while (cache.size() > SEATSConstants.CACHE_LIMIT_PENDING_INSERTS) {
            cache.remove();
        } // WHILE
        if (LOG.isDebugEnabled())
            LOG.debug(String.format("Stored %d pending inserts for %s [totalPendingInserts=%d]",
                    tmp_reservations.size(), search_flight, cache.size()));
    }
    return (true);
}

From source file:org.seasr.meandre.components.analytics.socialnetworking.AbstractLinkCreationComponent.java

@Override
public void executeCallBack(ComponentContext cc) throws Exception {
    Strings inMetaTuple = (Strings) cc.getDataComponentFromInput(IN_META_TUPLE);
    SimpleTuplePeer tuplePeer = new SimpleTuplePeer(inMetaTuple);
    console.fine("Input meta tuple: " + tuplePeer.toString());

    StringsArray inTuples = (StringsArray) cc.getDataComponentFromInput(IN_TUPLES);
    Strings[] tuples = BasicDataTypesTools.stringsArrayToJavaArray(inTuples);

    int SENTENCE_ID_IDX = tuplePeer.getIndexForFieldName(OpenNLPNamedEntity.SENTENCE_ID_FIELD);
    int TYPE_IDX = tuplePeer.getIndexForFieldName(OpenNLPNamedEntity.TYPE_FIELD);
    int TEXT_IDX = tuplePeer.getIndexForFieldName(OpenNLPNamedEntity.TEXT_FIELD);

    // Linked list of sentences keyed by sentence id - the HashSet is the set of entities in that sentence
    LinkedList<KeyValuePair<Integer, HashSet<Entity>>> _sentencesWindow = new LinkedList<KeyValuePair<Integer, HashSet<Entity>>>();

    // Note: The algorithm used to mark entities as adjacent if they fall within the specified sentence distance
    //       relies on a sliding-window of sentences that are within the 'adjacency' range. As new sentences are
    //       considered, the window moves to the right and old sentences that are now too far fall out of scope.

    SimpleTuple tuple = tuplePeer.createTuple();
    for (Strings t : tuples) {
        tuple.setValues(t);/*from  w ww  . j a v  a  2  s .com*/

        Integer sentenceId = Integer.parseInt(tuple.getValue(SENTENCE_ID_IDX));
        String tupleType = tuple.getValue(TYPE_IDX);
        String tupleValue = tuple.getValue(TEXT_IDX);

        // If the entity is of the type we're interested in
        if (_entityTypes.contains(tupleType)) {

            if (_normalizeEntities) {
                // Normalize whitespaces
                StringBuilder sb = new StringBuilder();
                Matcher nonWhitespaceMatcher = REGEXP_NONWHITESPACE.matcher(tupleValue);
                while (nonWhitespaceMatcher.find())
                    sb.append(" ").append(nonWhitespaceMatcher.group(1));

                if (sb.length() > 0)
                    tupleValue = sb.substring(1);
                else
                    continue;

                // Normalize people's names
                if (tupleType.toLowerCase().equals("person")) {
                    sb = new StringBuilder();
                    Matcher personMatcher = REGEXP_PERSON.matcher(tupleValue);
                    while (personMatcher.find())
                        sb.append(" ").append(personMatcher.group(1));

                    if (sb.length() > 0)
                        tupleValue = sb.substring(1);
                    else
                        continue;

                    // ignore names with 1 character
                    if (tupleValue.length() == 1)
                        continue;
                }

                tupleValue = WordUtils.capitalizeFully(tupleValue);
            }

            // ... create an object for it
            Entity entity = new Entity(tupleType, tupleValue);

            // Check if we already recorded this entity before
            Entity oldEntity = _entities.get(entity);
            if (oldEntity == null)
                // If not, record it
                _entities.put(entity, entity);
            else
                // Otherwise retrieve the entity we used before
                entity = oldEntity;

            HashSet<Entity> sentenceEntities;

            // Remove all sentences (together with any entities they contained) from the set
            // of sentences that are too far from the current sentence of this entity
            while (_sentencesWindow.size() > 0 && sentenceId - _sentencesWindow.peek().getKey() > _offset)
                _sentencesWindow.remove();

            if (_sentencesWindow.size() > 0) {
                // If this sentence is different from the last sentence in the window
                if (_sentencesWindow.getLast().getKey() != sentenceId) {
                    // Create an entry for it and add it at the end of the window
                    sentenceEntities = new HashSet<Entity>();
                    _sentencesWindow
                            .addLast(new KeyValuePair<Integer, HashSet<Entity>>(sentenceId, sentenceEntities));
                } else
                    sentenceEntities = _sentencesWindow.getLast().getValue();
            } else {
                // If there are no sentences in the window, create an entry for this sentence and add it
                sentenceEntities = new HashSet<Entity>();
                _sentencesWindow
                        .addLast(new KeyValuePair<Integer, HashSet<Entity>>(sentenceId, sentenceEntities));
            }

            // Iterate through all the sentences in the window
            for (KeyValuePair<Integer, HashSet<Entity>> kvp : _sentencesWindow)
                // ... and all the entities in each sentence
                for (Entity e : kvp.getValue()) {
                    // ignore self-references
                    if (e.equals(entity))
                        continue;

                    // ... and mark the new entity as being adjacent to all the entities in the window
                    e.addOutwardLink(entity);
                    entity.addInwardLink(e);
                }

            // Add the new entity to the window
            sentenceEntities.add(entity);
        }
    }

    if (!_isStreaming)
        generateAndPushOutputInternal();
}