Example usage for java.util HashMap remove

List of usage examples for java.util HashMap remove

Introduction

In this page you can find the example usage for java.util HashMap remove.

Prototype

public V remove(Object key) 

Source Link

Document

Removes the mapping for the specified key from this map if present.

Usage

From source file:org.mindswap.swoop.renderer.entity.NLVisitor.java

private void reconcileThingNodes(NLNode node) {
    HashMap links = node.getLinks();
    Set keys = links.keySet();//  www .j av a  2  s.c o m
    NLLink isaLink = new NLLink("", NLVisitor.LINK_SUBCLASS);

    // first: gather all the is-a links
    NLLink nonThingLink = null;
    NLLink thingLink = null;
    boolean thingFound = false;
    Set isaLinks = new HashSet();
    for (Iterator iter = new HashSet(keys).iterator(); iter.hasNext();) {
        // cycle through each link
        NLLink link = (NLLink) iter.next();
        NLNode target = (NLNode) links.get(link);

        System.out.println("LINK: " + link.keyword + " (" + link.linkType + "), TARGET: " + target.keyword);

        if (link.equals(isaLink)) {
            isaLinks.add(link);

            if (target.getKeyword().equals("Thing") || target.getKeyword().equals("thing")) {
                System.out.println("thing found");
                thingFound = true;
                thingLink = link;
            } else {
                nonThingLink = link;
            }
        }
    }

    System.out.println("ISA's: " + isaLinks.size() + " " + thingFound);

    if (thingFound) {
        if (isaLinks.size() == 0) {
            // this should never happen 
        } else if (isaLinks.size() == 1) {
            // there is only one node and it is a thing node, so we can safely pull its links up
            NLNode thingNode = (NLNode) links.get(thingLink);

            HashMap tLinks = thingNode.getLinks();
            node.links.putAll(tLinks);
            links.remove(thingLink);
        } else {
            NLNode thingNode = (NLNode) links.get(thingLink);
            NLNode nonThingNode = (NLNode) links.get(nonThingLink);

            HashMap tLinks = thingNode.getLinks();
            HashMap ntLinks = nonThingNode.getLinks();
            ntLinks.putAll(tLinks);
            links.remove(thingLink);
        }
    }

    // Recurse
    keys = links.keySet();
    for (Iterator iter = keys.iterator(); iter.hasNext();) {
        NLLink link = (NLLink) iter.next();
        NLNode target = (NLNode) links.get(link);

        System.out.println("NEW LINK: " + link.keyword + " (" + link.linkType + "), TARGET: " + target.keyword);

        reconcileThingNodes(target);
    }
}

From source file:net.daboross.bukkitdev.skywars.config.RandomChestConfiguration.java

private void load() throws IOException, InvalidConfigurationException, SkyConfigurationException {
    SkyStatic.debug("[RandomChests] Loading chests.yml");
    Path chestsFile = plugin.getDataFolder().toPath().resolve("chests.yml");
    if (!Files.exists(chestsFile)) {
        plugin.saveResource("chests.yml", true);
    }/*www  .j  a va 2 s .co  m*/
    YamlConfiguration config = new YamlConfiguration();

    config.load(chestsFile.toFile());

    int version = config.getInt("version", 1);
    if (version > 1) {
        throw new InvalidConfigurationException("Future version in chests.yml!");
    }
    config.set("version", 1);

    levels.clear();

    HashMap<String, ChestLevel> incompleteLevels = new HashMap<>();

    ConfigurationSection levelsSection = config.getConfigurationSection("levels");
    ConfigurationSection itemsSection = config.getConfigurationSection("items");
    if (levelsSection == null || itemsSection == null || levelsSection.getKeys(false).isEmpty()
            || itemsSection.getKeys(false).isEmpty()) {
        YamlConfiguration defaultConfig = new YamlConfiguration();
        try (InputStream stream = plugin.getResourceAsStream("chests.yml");
                Reader reader = new InputStreamReader(stream);
                BufferedReader bufferedReader = new BufferedReader(reader)) {
            defaultConfig.load(bufferedReader);
        }
        if (levelsSection == null || levelsSection.getKeys(false).isEmpty()) {
            levelsSection = defaultConfig.getConfigurationSection("levels");
            config.set("levels", levelsSection);
        }
        if (itemsSection == null || itemsSection.getKeys(false).isEmpty()) {
            itemsSection = defaultConfig.getConfigurationSection("items");
            config.set("items", itemsSection);
        }
        config.options().header(defaultConfig.options().header());
        config.save(chestsFile.toFile());
    }
    if (levelsSection == null) {
        plugin.getLogger().log(Level.WARNING, "Not loading chests.yml: no levels section found");
        return;
    }
    if (itemsSection == null) {
        plugin.getLogger().log(Level.WARNING, "Not loading chests.yml: no items section found");
        return;
    }

    for (String key : levelsSection.getKeys(false)) {
        if (levelsSection.isConfigurationSection(key)) {
            // is this bad? I'm not sure. it's a hack, for sure - but it does let us use getSetInt(),
            // and it allows us to display accurate paths in case of error.
            ConfigurationSection levelSection = levelsSection.getConfigurationSection(key);
            if (!levelSection.isInt("item-value")) {
                throw new SkyConfigurationException(
                        "Invalid chests.yml: level `" + key + "` is missing item-value!");
            }
            if (!levelSection.isInt("chance")) {
                throw new SkyConfigurationException(
                        "Invalid chests.yml: level `" + key + "` is missing chance!");
            }
            int itemValue = levelSection.getInt("item-value");
            int chance = levelSection.getInt("chance");
            incompleteLevels.put(key, new ChestLevel(key, itemValue, chance, null));
        } else {
            throw new SkyConfigurationException(
                    "Invalid chests.yml: non-map thing in levels: " + levelsSection.get(key));
        }
    }

    for (String key : itemsSection.getKeys(false)) {
        if (itemsSection.isList(key)) {
            ChestLevel incompleteLevel = incompleteLevels.remove(key);
            if (incompleteLevel == null) {
                throw new SkyConfigurationException("Invalid chests.yml: level `" + key
                        + "` has a section under items, but no section under levels!");
            }
            List<?> objectList = itemsSection.getList(key);
            List<SkyKitItem> itemList = new ArrayList<>(objectList.size());
            for (Object o : objectList) {
                if (o instanceof Map) {
                    @SuppressWarnings("unchecked")
                    SkyKitItem item = SkyKitDecoder.decodeItem((Map<String, Object>) o);
                    itemList.add(item);
                } else if (o instanceof String) {
                    String string = o.toString();
                    String materialString;
                    int amount;
                    if (!string.contains(",")) {
                        materialString = string;
                        amount = 1;
                    } else {
                        String[] split = string.split(",", 2);
                        materialString = split[0];
                        try {
                            amount = Integer.parseInt(split[1]);
                        } catch (NumberFormatException ex) {
                            throw new SkyConfigurationException(
                                    "Invalid amount number for item in chests.yml: not an integer: "
                                            + split[1]);
                        }
                    }
                    Material material = Material.matchMaterial(materialString);
                    if (material == null) {
                        throw new SkyConfigurationException("Error in chests.yml: the type string '"
                                + materialString
                                + "' is invalid. Check https://dabo.guru/projects/skywars/configuring-kits for a list of valid material names (at the bottom of the page).");
                    }
                    itemList.add(new SkyKitItemConfig(material, amount, null, null));
                } else {
                    throw new SkyConfigurationException(
                            "Invalid thing in items list for level in chests.yml: " + o);
                }
            }
            if (itemList.isEmpty()) {
                throw new SkyConfigurationException(
                        "Invalid chests.yml: level `" + key + "` items list is empty!");
            }
            levels.add(new ChestLevel(key, incompleteLevel.itemValue, incompleteLevel.chance, itemList));
        } else {
            throw new SkyConfigurationException(
                    "Invalid chests.yml: non-list thing in items: " + itemsSection.get(key));
        }
    }

    if (!incompleteLevels.isEmpty()) {
        if (incompleteLevels.size() == 1) {
            throw new SkyConfigurationException(
                    "Invalid chests.yml: level " + incompleteLevels.keySet().iterator().next()
                            + " has a section under levels, but no section under items!");
        } else {
            throw new SkyConfigurationException(
                    "Invalid chests.yml: multiple levels (" + new ArrayList<>(incompleteLevels.keySet())
                            + ") have sections under levels but no sections under items!");
        }
    }
}

From source file:com.MainFiles.Functions.java

public String generateReversal(HashMap<String, String> ISOdetails) throws IOException, InterruptedException {
    String response = "";
    String strResponse = "";
    try {/* w  w w .  j a  v  a  2s . c om*/
        // int attempts = Integer.parseInt(ISOdetails.get("trials"));
        String CORR_ID = this.generateCorelationID();
        ISOdetails.remove("CorrelationID");

        ISOdetails.put("0", "0420");
        ISOdetails.put("7", this.anyDate("MMddHHmmss"));
        ISOdetails.put("11", this.anyDate("MMddHHmmss"));
        ISOdetails.put("32", SOURCE_ID);
        ISOdetails.put("CorrelationID", CORR_ID);

        this.log("Reversal" + ISOdetails.get("CorrelationID"), "REVERSE");

        boolean sentToWebLogicAgain = false;
        HashMap ParamsFromAdapterAgain;
        ParamsFromAdapterAgain = new HashMap();

        QueueWriter queueWriter = new QueueWriter(QUEUE_REQUEST, PROVIDER_URL);

        int trials = 0;
        do {
            sentToWebLogicAgain = queueWriter.sendObject((HashMap) ISOdetails, CORR_ID);
            trials++;
        } while (sentToWebLogicAgain == false & trials < 3);

        if (sentToWebLogicAgain) {
            System.out
                    .println("[SENT: Reversal Transaction Sent to ESB   -:]" + ISOdetails.get("CorrelationID"));
            long Start = System.currentTimeMillis();
            long Stop = Start + (20 * 1000);
            do {
                Thread.currentThread().sleep(100);
                ParamsFromAdapterAgain = this.getWeblogicMessageFromQueue(ISOdetails.get("CorrelationID"));
            } while (ParamsFromAdapterAgain.isEmpty() && System.currentTimeMillis() < Stop);

            if (ParamsFromAdapterAgain.isEmpty()) {

                System.out.println("[SENT: Transaction Responses Failed from ESB-:] " + ISOdetails.get("37"));

                //Send Failed Response to POS
                String TransactionType = getTransactionType(ISOdetails.get("3").toString());

                strResponse += this.strResponseHeader(ISOdetails.get("68").toString()) + "#";
                strResponse += "AGENT ID:  " + ISOdetails.get("104").toString() + "#";
                strResponse += "TRAN NUM:  " + ISOdetails.get("37").toString() + "#";
                strResponse += "--------------------------------" + "#";
                strResponse += "                                " + "#";
                strResponse += padEqual(TransactionType.toUpperCase()) + "#";
                strResponse += "                                " + "#";
                strResponse += "   NO RESPONSE FROM ESB GATEWAY " + "#";
                strResponse += " " + "#";
                strResponse += this.strResponseFooter(ISOdetails.get("104").toString()) + "#";
                SendPOSResponse(strResponse, ISOdetails.get("37").toString());

            } else {
                response = this.genHashDelimiterString(ParamsFromAdapterAgain,
                        ISOdetails.get("CorrelationID").toString());
            }
        }
    } catch (Exception ex) {
        this.log("\nINFO fn_Updateagentpassword() :: " + ex.getMessage() + "\n" + this.StackTraceWriter(ex),
                "ERROR");
    }
    return response;
}

From source file:edu.amc.sakai.user.JLDAPDirectoryProvider.java

/**
 * Similar to iterating over <code>users</code> passing
 * each element to {@link #getUser(UserEdit)}, removing the
 * {@link org.sakaiproject.user.api.UserEdit} if that method 
 * returns <code>false</code>. 
 * //  w  w w .ja  v a2 s.  co m
 * <p>Adds search retry capability if any one lookup fails 
 * with a directory error. Empties <code>users</code> and 
 * returns if a retry exits exceptionally
 * <p>
 */
public void getUsers(Collection<UserEdit> users) {
    if (M_log.isDebugEnabled()) {
        M_log.debug("getUsers(): [Collection size = " + users.size() + "]");
    }

    LDAPConnection conn = null;
    boolean abortiveSearch = false;
    int maxQuerySize = getMaxObjectsToQueryFor();
    UserEdit userEdit = null;

    HashMap<String, UserEdit> usersToSearchInLDAP = new HashMap<String, UserEdit>();
    List<UserEdit> usersToRemove = new ArrayList<UserEdit>();
    try {
        int cnt = 0;
        for (Iterator<UserEdit> userEdits = users.iterator(); userEdits.hasNext();) {
            userEdit = (UserEdit) userEdits.next();
            String eid = userEdit.getEid();

            if (!(isSearchableEid(eid))) {
                userEdits.remove();
                //proceed ahead with this (perhaps the final) iteration
                //usersToSearchInLDAP needs to be processed unless empty
            } else {
                usersToSearchInLDAP.put(eid, userEdit);
                cnt++;
            }

            // We need to make sure this query isn't larger than maxQuerySize
            if ((!userEdits.hasNext() || cnt == maxQuerySize) && !usersToSearchInLDAP.isEmpty()) {
                if (conn == null) {
                    conn = ldapConnectionManager.getConnection();
                }

                String filter = ldapAttributeMapper.getManyUsersInOneSearch(usersToSearchInLDAP.keySet());
                List<LdapUserData> ldapUsers = searchDirectory(filter, null, null, null, null, maxQuerySize);

                for (LdapUserData ldapUserData : ldapUsers) {
                    String ldapEid = ldapUserData.getEid();

                    if (StringUtils.isEmpty(ldapEid)) {
                        continue;
                    }
                    ldapEid = ldapEid.toLowerCase();

                    UserEdit ue = usersToSearchInLDAP.get(ldapEid);
                    mapUserDataOntoUserEdit(ldapUserData, ue);
                    usersToSearchInLDAP.remove(ldapEid);
                }

                // see if there are any users that we could not find in the LDAP query
                for (Map.Entry<String, UserEdit> entry : usersToSearchInLDAP.entrySet()) {
                    usersToRemove.add(entry.getValue());
                }

                // clear the HashMap and reset the counter
                usersToSearchInLDAP.clear();
                cnt = 0;
            }
        }

        // Finally clean up the original collection and remove and users we could not find
        for (UserEdit userRemove : usersToRemove) {
            if (M_log.isDebugEnabled()) {
                M_log.debug("JLDAP getUsers could not find user: " + userRemove.getEid());
            }
            users.remove(userRemove);
        }

    } catch (LDAPException e) {
        abortiveSearch = true;
        throw new RuntimeException("getUsers(): LDAPException during search [eid = "
                + (userEdit == null ? null : userEdit.getEid()) + "][result code = " + e.resultCodeToString()
                + "][error message = " + e.getLDAPErrorMessage() + "]", e);
    } catch (Exception e) {
        abortiveSearch = true;
        throw new RuntimeException("getUsers(): RuntimeException during search eid = "
                + (userEdit == null ? null : userEdit.getEid()) + "]", e);
    } finally {

        if (conn != null) {
            if (M_log.isDebugEnabled()) {
                M_log.debug("getUsers(): returning connection to connection manager");
            }
            ldapConnectionManager.returnConnection(conn);
        }

        // no sense in returning a partially complete search result
        if (abortiveSearch) {
            if (M_log.isDebugEnabled()) {
                M_log.debug("getUsers(): abortive search, clearing received users collection");
            }
            users.clear();
        }
    }

}

From source file:com.evolveum.midpoint.model.common.stringpolicy.ValuePolicyProcessor.java

/**
 * Count cardinality/*from  w  ww  .j  a  va 2 s. com*/
 */
private Map<Integer, List<String>> cardinalityCounter(Map<StringLimitType, List<String>> lims,
        List<String> password, Boolean skipMatchedLims, boolean uniquenessReached, OperationResult op) {
    HashMap<String, Integer> counter = new HashMap<>();

    Map<StringLimitType, List<String>> mustBeFirst = new HashMap<>();
    for (Map.Entry<StringLimitType, List<String>> entry : lims.entrySet()) {
        final StringLimitType key = entry.getKey();
        int counterKey = 1;
        List<String> chars = entry.getValue();
        int i = 0;
        if (null != password) {
            i = charIntersectionCounter(entry.getValue(), password);
        }
        // If max is exceed then error unable to continue
        if (key.getMaxOccurs() != null && i > key.getMaxOccurs()) {
            OperationResult o = new OperationResult("Limitation check :" + key.getDescription());
            o.recordFatalError("Exceeded maximal value for this limitation. " + i + ">" + key.getMaxOccurs());
            op.addSubresult(o);
            return null;
            // if max is all ready reached or skip enabled for minimal skip
            // counting
        } else if (key.getMaxOccurs() != null && i == key.getMaxOccurs()) {
            continue;
            // other cases minimum is not reached
        } else if ((key.getMinOccurs() == null || i >= key.getMinOccurs()) && !skipMatchedLims) {
            continue;
        }
        for (String s : chars) {
            if (null == password || !password.contains(s) || uniquenessReached) {
                // if (null == counter.get(s)) {
                counter.put(s, counterKey);
                // } else {
                // counter.put(s, counter.get(s) + 1);
                // }
            }
        }
        counterKey++;
    }

    // If need to remove disabled chars (already reached limitations)
    if (null != password) {
        for (StringLimitType l : lims.keySet()) {
            int i = charIntersectionCounter(lims.get(l), password);
            if (l.getMaxOccurs() != null && i > l.getMaxOccurs()) {
                OperationResult o = new OperationResult("Limitation check :" + l.getDescription());
                o.recordFatalError("Exceeded maximal value for this limitation. " + i + ">" + l.getMaxOccurs());
                op.addSubresult(o);
                return null;
            } else if (l.getMaxOccurs() != null && i == l.getMaxOccurs()) {
                // limitation matched remove all used chars
                LOGGER.trace("Skip " + l.getDescription());
                for (String charToRemove : lims.get(l)) {
                    counter.remove(charToRemove);
                }
            }
        }
    }

    // Transpone to better format
    Map<Integer, List<String>> ret = new HashMap<>();
    for (String s : counter.keySet()) {
        // if not there initialize
        ret.computeIfAbsent(counter.get(s), k -> new ArrayList<>());
        ret.get(counter.get(s)).add(s);
    }
    return ret;
}

From source file:net.sf.taverna.t2.security.credentialmanager.impl.CredentialManagerImpl.java

protected Map<URI, URI> getFragmentMappedURIsForAllUsernameAndPasswordPairs() throws CMException {
    synchronized (Security.class) {// FIXME synchonization on strange thing!
        if (cachedServiceURIsMap == null) {
            HashMap<URI, URI> map = new HashMap<>();
            // Get all service URIs that have username and password in the
            // Keystore
            for (URI serviceURI : getServiceURIsForAllUsernameAndPasswordPairs()) {
                // Always store 1-1, with or without fragment
                map.put(serviceURI, serviceURI);
                if (serviceURI.getFragment() == null)
                    continue;

                // Look up the no-fragment uri as an additional mapping
                URI noFragment;/*w ww.  j  a v  a 2s. c o  m*/
                try {
                    noFragment = dnParser.setFragmentForURI(serviceURI, null);
                } catch (URISyntaxException e) {
                    logger.warn("Could not reset fragment for service URI " + serviceURI);
                    continue;
                }
                if (map.containsKey(noFragment)) {
                    if (map.get(noFragment).getFragment() != null) {
                        // No mapping for duplicates
                        map.remove(noFragment);
                        continue;
                    } // else it's noFragment -> noFragment, which is OK
                } else {
                    // Brand new, put it in
                    map.put(noFragment, serviceURI);
                }
            }
            cachedServiceURIsMap = map;
        }
        return cachedServiceURIsMap;
    }
}

From source file:com.conferenceengineer.android.iosched.io.SessionsHandler.java

private ArrayList<ContentProviderOperation> buildContentProviderOperations(SessionsResponse sessions,
        SessionsResponse starredSessions, TracksResponse tracks) {

    // If there was no starred sessions response (e.g. there was an auth issue,
    // or this is a local sync), keep all the locally starred sessions.
    boolean retainLocallyStarredSessions = (starredSessions == null);

    final ArrayList<ContentProviderOperation> batch = Lists.newArrayList();

    // Build lookup table for starredSessions mappings
    HashSet<String> starredSessionsMap = new HashSet<String>();
    if (starredSessions != null) {
        List<SessionResponse> starredSessionList = starredSessions.getSessions();
        if (starredSessionList != null) {
            for (SessionResponse session : starredSessionList) {
                String sessionId = session.getId();
                starredSessionsMap.add(sessionId);
            }// w ww  .j av a 2 s  . co m
        }
    }

    // Build lookup table for track mappings
    // Assumes that sessions can only have one track. Not guarenteed by the Conference API,
    // but is being enforced by conference organizer policy.
    HashMap<String, TrackResponse> trackMap = new HashMap<String, TrackResponse>();
    if (tracks != null) {
        for (TrackResponse track : tracks.getTracks()) {
            List<String> sessionIds = track.getSessions();
            if (sessionIds != null) {
                for (String sessionId : sessionIds) {
                    trackMap.put(sessionId, track);
                }
            }
        }
    }

    if (sessions != null) {
        List<SessionResponse> sessionList = sessions.getSessions();
        int numSessions = sessionList.size();

        if (numSessions > 0) {
            LOGI(TAG, "Updating sessions data");

            Set<String> starredSessionIds = new HashSet<String>();
            if (retainLocallyStarredSessions) {
                Cursor starredSessionsCursor = mContext.getContentResolver().query(Sessions.CONTENT_STARRED_URI,
                        new String[] { ScheduleContract.Sessions.SESSION_ID }, null, null, null);
                while (starredSessionsCursor.moveToNext()) {
                    starredSessionIds.add(starredSessionsCursor.getString(0));
                }
                starredSessionsCursor.close();
            }

            // Clear out existing sessions
            batch.add(ContentProviderOperation
                    .newDelete(ScheduleContract.addCallerIsSyncAdapterParameter(Sessions.CONTENT_URI)).build());

            // Maintain a list of created session block IDs
            Set<String> blockIds = new HashSet<String>();

            // Maintain a map of insert operations for sandbox-only blocks
            HashMap<String, ContentProviderOperation> sandboxBlocks = new HashMap<String, ContentProviderOperation>();

            for (SessionResponse session : sessionList) {
                int flags = 0;
                String sessionId = session.getId();
                if (retainLocallyStarredSessions) {
                    flags = (starredSessionIds.contains(sessionId) ? PARSE_FLAG_FORCE_SCHEDULE_ADD
                            : PARSE_FLAG_FORCE_SCHEDULE_REMOVE);
                }
                if (session.getFlags() != 0) {
                    // Allow data set flags to override locally
                    // set ones (e.g. single talk slot additions).
                    flags = session.getFlags();
                }

                if (TextUtils.isEmpty(sessionId)) {
                    LOGW(TAG, "Found session with empty ID in API response.");
                    continue;
                }

                // Session title
                String sessionTitle = session.getTitle();
                String sessionSubtype = session.getSubtype();
                if (EVENT_TYPE_CODELAB.equals(sessionSubtype)) {
                    sessionTitle = mContext.getString(R.string.codelab_title_template, sessionTitle);
                }

                // Whether or not it's in the schedule
                boolean inSchedule = starredSessionsMap.contains(sessionId);
                if ((flags & PARSE_FLAG_FORCE_SCHEDULE_ADD) != 0
                        || (flags & PARSE_FLAG_FORCE_SCHEDULE_REMOVE) != 0) {
                    inSchedule = (flags & PARSE_FLAG_FORCE_SCHEDULE_ADD) != 0;
                }

                if (EVENT_TYPE_KEYNOTE.equals(sessionSubtype)) {
                    // Keynotes are always in your schedule.
                    inSchedule = true;
                }

                // Clean up session abstract
                String sessionAbstract = session.getDescription();
                if (sessionAbstract != null) {
                    sessionAbstract = sessionAbstract.replace('\r', '\n');
                }

                // Hashtags
                TrackResponse track = trackMap.get(sessionId);
                String hashtag = null;
                if (track != null) {
                    hashtag = ParserUtils.sanitizeId(track.getTitle());
                }

                // Get block id
                long sessionStartTime = session.getStartTimestamp().longValue() * 1000;
                long sessionEndTime = session.getEndTimestamp().longValue() * 1000;
                String blockId = ScheduleContract.Blocks.generateBlockId(sessionStartTime, sessionEndTime);

                if (!blockIds.contains(blockId) && !EVENT_TYPE_SANDBOX.equals(sessionSubtype)) {
                    // New non-sandbox block
                    if (sandboxBlocks.containsKey(blockId)) {
                        sandboxBlocks.remove(blockId);
                    }
                    String blockType;
                    String blockTitle;
                    if (EVENT_TYPE_KEYNOTE.equals(sessionSubtype)) {
                        blockType = ScheduleContract.Blocks.BLOCK_TYPE_KEYNOTE;
                        blockTitle = mContext.getString(R.string.schedule_block_title_keynote);
                    } else if (EVENT_TYPE_CODELAB.equals(sessionSubtype)) {
                        blockType = ScheduleContract.Blocks.BLOCK_TYPE_CODELAB;
                        blockTitle = mContext.getString(R.string.schedule_block_title_code_labs);
                    } else if (EVENT_TYPE_OFFICE_HOURS.equals(sessionSubtype)) {
                        blockType = ScheduleContract.Blocks.BLOCK_TYPE_OFFICE_HOURS;
                        blockTitle = mContext.getString(R.string.schedule_block_title_office_hours);
                    } else {
                        blockType = ScheduleContract.Blocks.BLOCK_TYPE_SESSION;
                        blockTitle = mContext.getString(R.string.schedule_block_title_sessions);
                    }

                    batch.add(ContentProviderOperation.newInsert(ScheduleContract.Blocks.CONTENT_URI)
                            .withValue(ScheduleContract.Blocks.BLOCK_ID, blockId)
                            .withValue(ScheduleContract.Blocks.BLOCK_TYPE, blockType)
                            .withValue(ScheduleContract.Blocks.BLOCK_TITLE, blockTitle)
                            .withValue(ScheduleContract.Blocks.BLOCK_START, sessionStartTime)
                            .withValue(ScheduleContract.Blocks.BLOCK_END, sessionEndTime).build());
                    blockIds.add(blockId);

                } else if (!sandboxBlocks.containsKey(blockId) && !blockIds.contains(blockId)
                        && EVENT_TYPE_SANDBOX.equals(sessionSubtype)) {
                    // New sandbox-only block, add insert operation to map
                    String blockType = ScheduleContract.Blocks.BLOCK_TYPE_SANDBOX;
                    String blockTitle = mContext.getString(R.string.schedule_block_title_sandbox);
                    sandboxBlocks.put(blockId,
                            ContentProviderOperation.newInsert(ScheduleContract.Blocks.CONTENT_URI)
                                    .withValue(ScheduleContract.Blocks.BLOCK_ID, blockId)
                                    .withValue(ScheduleContract.Blocks.BLOCK_TYPE, blockType)
                                    .withValue(ScheduleContract.Blocks.BLOCK_TITLE, blockTitle)
                                    .withValue(ScheduleContract.Blocks.BLOCK_START, sessionStartTime)
                                    .withValue(ScheduleContract.Blocks.BLOCK_END, sessionEndTime).build());

                }

                // Insert session info
                final ContentProviderOperation.Builder builder;
                if (EVENT_TYPE_SANDBOX.equals(sessionSubtype)) {
                    // Sandbox companies go in the special sandbox table

                    builder = ContentProviderOperation
                            .newInsert(ScheduleContract
                                    .addCallerIsSyncAdapterParameter(ScheduleContract.Sandbox.CONTENT_URI))
                            .withValue(SyncColumns.UPDATED, System.currentTimeMillis())
                            .withValue(ScheduleContract.Sandbox.COMPANY_ID, sessionId)
                            .withValue(ScheduleContract.Sandbox.COMPANY_NAME, sessionTitle)
                            .withValue(ScheduleContract.Sandbox.COMPANY_DESC, sessionAbstract)
                            .withValue(ScheduleContract.Sandbox.COMPANY_URL, session.getWebLink())
                            .withValue(ScheduleContract.Sandbox.COMPANY_LOGO_URL, session.getIconUrl())
                            .withValue(ScheduleContract.Sandbox.ROOM_ID, sanitizeId(session.getLocation()))
                            .withValue(ScheduleContract.Sandbox.TRACK_ID,
                                    (track != null ? track.getId() : null))
                            .withValue(ScheduleContract.Sandbox.BLOCK_ID, blockId);
                    batch.add(builder.build());

                } else {
                    // All other fields go in the normal sessions table
                    builder = ContentProviderOperation
                            .newInsert(ScheduleContract.addCallerIsSyncAdapterParameter(Sessions.CONTENT_URI))
                            .withValue(SyncColumns.UPDATED, System.currentTimeMillis())
                            .withValue(Sessions.SESSION_ID, sessionId)
                            .withValue(Sessions.SESSION_TYPE, sessionSubtype)
                            .withValue(Sessions.SESSION_LEVEL, null) // Not available
                            .withValue(Sessions.SESSION_TITLE, sessionTitle)
                            .withValue(Sessions.SESSION_ABSTRACT, sessionAbstract)
                            .withValue(Sessions.SESSION_HASHTAGS, hashtag)
                            .withValue(Sessions.SESSION_TAGS, null) // Not available
                            .withValue(Sessions.SESSION_URL, session.getWebLink())
                            .withValue(Sessions.SESSION_MODERATOR_URL, null) // Not available
                            .withValue(Sessions.SESSION_REQUIREMENTS, null) // Not available
                            .withValue(Sessions.SESSION_STARRED, inSchedule)
                            .withValue(Sessions.SESSION_YOUTUBE_URL, null) // Not available
                            .withValue(Sessions.SESSION_PDF_URL, null) // Not available
                            .withValue(Sessions.SESSION_NOTES_URL, null) // Not available
                            .withValue(Sessions.ROOM_ID, sanitizeId(session.getLocation()))
                            .withValue(Sessions.BLOCK_ID, blockId);

                    batch.add(builder.build());
                }

                // Replace all session speakers
                final Uri sessionSpeakersUri = Sessions.buildSpeakersDirUri(sessionId);
                batch.add(ContentProviderOperation
                        .newDelete(ScheduleContract.addCallerIsSyncAdapterParameter(sessionSpeakersUri))
                        .build());

                List<String> presenterIds = session.getPresenterIds();
                if (presenterIds != null) {
                    for (String presenterId : presenterIds) {
                        batch.add(ContentProviderOperation.newInsert(sessionSpeakersUri)
                                .withValue(SessionsSpeakers.SESSION_ID, sessionId)
                                .withValue(SessionsSpeakers.SPEAKER_ID, presenterId).build());
                    }
                }

                // Add track mapping
                if (track != null) {
                    String trackId = track.getId();
                    if (trackId != null) {
                        final Uri sessionTracksUri = ScheduleContract.addCallerIsSyncAdapterParameter(
                                ScheduleContract.Sessions.buildTracksDirUri(sessionId));
                        batch.add(ContentProviderOperation.newInsert(sessionTracksUri)
                                .withValue(ScheduleDatabase.SessionsTracks.SESSION_ID, sessionId)
                                .withValue(ScheduleDatabase.SessionsTracks.TRACK_ID, trackId).build());
                    }
                }

                // Codelabs: Add mapping to codelab table
                if (EVENT_TYPE_CODELAB.equals(sessionSubtype)) {
                    final Uri sessionTracksUri = ScheduleContract.addCallerIsSyncAdapterParameter(
                            ScheduleContract.Sessions.buildTracksDirUri(sessionId));
                    batch.add(ContentProviderOperation.newInsert(sessionTracksUri)
                            .withValue(ScheduleDatabase.SessionsTracks.SESSION_ID, sessionId)
                            .withValue(ScheduleDatabase.SessionsTracks.TRACK_ID, "CODE_LABS").build());
                }
            }

            // Insert sandbox-only blocks
            batch.addAll(sandboxBlocks.values());
        }
    }

    return batch;
}

From source file:com.splicemachine.derby.impl.sql.execute.actions.DDLConstantOperation.java

/**
 * Adjust dependencies of a table on ANSI UDTs. We only add one dependency
 * between a table and a UDT. If the table already depends on the UDT, we don't add
 * a redundant dependency./*from   w w  w.  j  av  a2s .c  o  m*/
 */
protected void adjustUDTDependencies(Activation activation, ColumnInfo[] columnInfos, boolean dropWholeTable)
        throws StandardException {
    if ((!dropWholeTable) && (columnInfos == null)) {
        return;
    }

    LanguageConnectionContext lcc = activation.getLanguageConnectionContext();
    TransactionController tc = lcc.getTransactionExecute();
    DataDictionary dd = lcc.getDataDictionary();
    TableDescriptor td = activation.getDDLTableDescriptor();

    int changedColumnCount = columnInfos == null ? 0 : columnInfos.length;
    HashMap addUdtMap = new HashMap();
    HashMap dropUdtMap = new HashMap();
    HashSet addColumnNames = new HashSet();
    HashSet dropColumnNames = new HashSet();

    // first find all of the new ansi udts which the table must depend on
    // and the old ones which are candidates for removal
    for (int i = 0; i < changedColumnCount; i++) {
        ColumnInfo ci = columnInfos[i];

        // skip this column if it is not a UDT
        AliasDescriptor ad = dd.getAliasDescriptorForUDT(tc, columnInfos[i].dataType);
        if (ad == null) {
            continue;
        }

        String key = ad.getObjectID().toString();

        if (ci.action == ColumnInfo.CREATE) {
            addColumnNames.add(ci.name);

            // no need to add the descriptor if it is already on the list
            if (addUdtMap.get(key) != null) {
                continue;
            }

            addUdtMap.put(key, ad);
        } else if (ci.action == ColumnInfo.DROP) {
            dropColumnNames.add(ci.name);
            dropUdtMap.put(key, ad);
        }
    }

    // nothing to do if there are no changed columns of udt type
    // and this is not a DROP TABLE command
    if ((!dropWholeTable) && (addUdtMap.size() == 0) && (dropUdtMap.size() == 0)) {
        return;
    }

    //
    // Now prune from the add list all udt descriptors for which we already have dependencies.
    // These are the udts for old columns. This supports the ALTER TABLE ADD COLUMN
    // case.
    //
    // Also prune from the drop list add udt descriptors which will still be
    // referenced by the remaining columns.
    //
    ColumnDescriptorList cdl = td.getColumnDescriptorList();
    int totalColumnCount = cdl.size();

    for (int i = 0; i < totalColumnCount; i++) {
        ColumnDescriptor cd = cdl.elementAt(i);

        // skip columns that are being added and dropped. we only want the untouched columns
        if (addColumnNames.contains(cd.getColumnName()) || dropColumnNames.contains(cd.getColumnName())) {
            continue;
        }

        // nothing to do if the old column isn't a UDT
        AliasDescriptor ad = dd.getAliasDescriptorForUDT(tc, cd.getType());
        if (ad == null) {
            continue;
        }

        String key = ad.getObjectID().toString();

        // ha, it is a UDT.
        if (dropWholeTable) {
            dropUdtMap.put(key, ad);
        } else {
            if (addUdtMap.get(key) != null) {
                addUdtMap.remove(key);
            }
            if (dropUdtMap.get(key) != null) {
                dropUdtMap.remove(key);
            }
        }
    }

    adjustUDTDependencies(lcc, dd, td, addUdtMap, dropUdtMap);
}

From source file:org.mindswap.swoop.renderer.entity.NLVisitor.java

private void reconcileNonIsaThingNodes(NLNode node) {
    HashMap links = node.getLinks();
    Set keys = links.keySet();//from  www  . j a  v  a  2 s .  c  o  m
    NLLink isaLink = new NLLink("", NLVisitor.LINK_SUBCLASS);

    boolean thingFound = false;
    Set isaLinks = new HashSet();
    //Set nonIsaLinks = new HashSet();
    Set nonIsaThingLinks = new HashSet(); // may be more than one, unlike is-a
    for (Iterator iter = new HashSet(keys).iterator(); iter.hasNext();) {
        // cycle through each link
        NLLink link = (NLLink) iter.next();
        NLNode target = (NLNode) links.get(link);

        System.out.println("LINK: " + link.keyword + " (" + link.linkType + "), TARGET: " + target.keyword);

        if (!link.equals(isaLink)) {
            //nonIsaLinks.add( link );

            if (target.getKeyword().equals("Thing") || target.getKeyword().equals("thing")) {
                System.out.println("NON-ISA thing found");
                nonIsaThingLinks.add(link);
            }
        }
    }

    // pulling-up named classes for non-isa thing links
    for (Iterator nt = nonIsaThingLinks.iterator(); nt.hasNext();) {
        NLLink curr = (NLLink) nt.next();

        NLNode thingNode = (NLNode) links.get(curr);
        HashMap tLinks = thingNode.getLinks();

        Set tNonThingTargets = new HashSet();
        Set tKeys = tLinks.keySet();
        for (Iterator i = tKeys.iterator(); i.hasNext();) {
            NLLink tLink = (NLLink) i.next();
            NLNode tTarget = (NLNode) tLinks.get(tLink);

            if (tLink.equals(isaLink)) {
                if (!tTarget.getKeyword().equals("Thing") && !tTarget.getKeyword().equals("thing")) {
                    System.out.println("pull-up");

                    System.out.println(links);

                    links.remove(curr);

                    links.put(curr, tTarget);

                    tTarget.links.putAll(thingNode.links);
                    tTarget.links.remove(tLink);
                }
            }
        }
    }

    // Recurse
    keys = links.keySet();
    for (Iterator iter = keys.iterator(); iter.hasNext();) {
        NLLink link = (NLLink) iter.next();
        NLNode target = (NLNode) links.get(link);

        System.out.println("NEW LINK: " + link.keyword + " (" + link.linkType + "), TARGET: " + target.keyword);

        reconcileNonIsaThingNodes(target);
    }
}

From source file:io.pravega.segmentstore.server.writer.SegmentAggregatorTests.java

/**
 * Tests a scenario where data that is about to be added already exists in Storage. This would most likely happen
 * in a recovery situation, where we committed the data but did not properly ack/truncate it from the DataSource.
 *///from   ww  w  .  j a v a2 s  .  com
@Test
@SuppressWarnings("checkstyle:CyclomaticComplexity")
public void testRecovery() throws Exception {
    final int appendCount = 100;

    @Cleanup
    TestContext context = new TestContext(DEFAULT_CONFIG);
    context.storage.create(context.segmentAggregator.getMetadata().getName(), TIMEOUT).join();
    for (SegmentAggregator a : context.transactionAggregators) {
        context.storage.create(a.getMetadata().getName(), TIMEOUT).join();
    }

    // Store written data by segment - so we can check it later.
    HashMap<Long, ByteArrayOutputStream> dataBySegment = new HashMap<>();
    ArrayList<StorageOperation> operations = new ArrayList<>();
    val expectedMergeOpAck = new ArrayList<Map.Entry<Long, Long>>();

    // Create a segment and all its Transactions (do not initialize yet).
    ByteArrayOutputStream parentData = new ByteArrayOutputStream();
    dataBySegment.put(context.segmentAggregator.getMetadata().getId(), parentData);

    // All Transactions have appends (First 1/3 of Transactions just have appends, that exist in Storage as well)
    for (int i = 0; i < context.transactionAggregators.length; i++) {
        SegmentAggregator transactionAggregator = context.transactionAggregators[i];
        long transactionId = transactionAggregator.getMetadata().getId();
        ByteArrayOutputStream writtenData = new ByteArrayOutputStream();
        dataBySegment.put(transactionId, writtenData);

        for (int appendId = 0; appendId < appendCount; appendId++) {
            StorageOperation appendOp = generateAppendAndUpdateMetadata(appendId, transactionId, context);
            operations.add(appendOp);
            getAppendData(appendOp, writtenData, context);
        }

        // Second and third 1/3s of Transactions are sealed, with the seals in storage, but we'll still add them.
        boolean isSealed = i >= context.transactionAggregators.length / 3;
        if (isSealed) {
            operations.add(generateSealAndUpdateMetadata(transactionId, context));
        }

        // Last 1/3 of Transactions are also merged.
        boolean isMerged = isSealed && (i >= context.transactionAggregators.length * 2 / 3);
        if (isMerged) {
            operations.add(generateMergeTransactionAndUpdateMetadata(transactionId, context));
            ByteArrayOutputStream transactionData = dataBySegment.get(transactionId);
            parentData.write(transactionData.toByteArray());
            transactionData.close();
            dataBySegment.remove(transactionId);
            expectedMergeOpAck.add(new AbstractMap.SimpleImmutableEntry<>(
                    context.segmentAggregator.getMetadata().getId(), transactionId));
        }
    }

    // Populate the storage.
    for (Map.Entry<Long, ByteArrayOutputStream> e : dataBySegment.entrySet()) {
        context.storage
                .write(writeHandle(context.containerMetadata.getStreamSegmentMetadata(e.getKey()).getName()), 0,
                        new ByteArrayInputStream(e.getValue().toByteArray()), e.getValue().size(), TIMEOUT)
                .join();
    }

    for (SegmentAggregator a : context.transactionAggregators) {
        if (a.getMetadata().isSealed()) {
            context.storage.seal(writeHandle(a.getMetadata().getName()), TIMEOUT).join();
        }

        if (a.getMetadata().isMerged() || a.getMetadata().isDeleted()) {
            context.storage.delete(writeHandle(a.getMetadata().getName()), TIMEOUT).join();
        }
    }

    // Now initialize the SegmentAggregators
    context.segmentAggregator.initialize(TIMEOUT, executorService()).join();
    for (SegmentAggregator a : context.transactionAggregators) {
        a.initialize(TIMEOUT, executorService()).join();
    }

    // Add all operations we had so far.
    val actualMergeOpAck = new ArrayList<Map.Entry<Long, Long>>();
    context.dataSource.setCompleteMergeCallback((target, source) -> actualMergeOpAck
            .add(new AbstractMap.SimpleImmutableEntry<Long, Long>(target, source)));
    for (StorageOperation o : operations) {
        int transactionIndex = (int) (o.getStreamSegmentId() - TRANSACTION_ID_START);
        SegmentAggregator a = transactionIndex < 0 ? context.segmentAggregator
                : context.transactionAggregators[transactionIndex];
        a.add(o);
    }
    context.dataSource.setCompleteMergeCallback(null);

    // And now finish up the operations (merge all Transactions).
    for (SegmentAggregator a : context.transactionAggregators) {
        if (!a.getMetadata().isSealed()) {
            a.add(generateSealAndUpdateMetadata(a.getMetadata().getId(), context));
        }

        if (!a.getMetadata().isMerged()) {
            context.segmentAggregator
                    .add(generateMergeTransactionAndUpdateMetadata(a.getMetadata().getId(), context));
            ByteArrayOutputStream transactionData = dataBySegment.get(a.getMetadata().getId());
            parentData.write(transactionData.toByteArray());
            transactionData.close();
            dataBySegment.remove(a.getMetadata().getId());
        }
    }

    flushAllSegments(context);

    // Verify that in the end, the contents of the parents is as expected.
    verifyParentSegmentData(parentData, context);
    AssertExtensions.assertListEquals(
            "Unexpected callback calls to completeMerge for already processed operations.", expectedMergeOpAck,
            actualMergeOpAck, Map.Entry::equals);
}