Example usage for java.util HashMap containsValue

List of usage examples for java.util HashMap containsValue

Introduction

In this page you can find the example usage for java.util HashMap containsValue.

Prototype

public boolean containsValue(Object value) 

Source Link

Document

Returns true if this map maps one or more keys to the specified value.

Usage

From source file:it.fub.jardin.server.DbUtils.java

public int importFile(MailUtility mailUtility, final Credentials credentials, final int resultsetId,
        final File importFile, final String ts, final String fs, final String tipologia, final String type,
        final String condition, String operazione) throws HiddenException, VisibleException, SQLException {

    // this.getUser(credentials);

    int opCode = 0;
    BufferedReader in = null;//from ww  w.java 2 s . co  m

    try {
        in = new BufferedReader(new FileReader(importFile));
    } catch (FileNotFoundException e) {
        e.printStackTrace();
        throw new HiddenException("file " + importFile.getName() + " non trovato");
    }

    String recordLine;
    String[] columns = null;
    List<BaseModelData> recordList = new ArrayList<BaseModelData>();
    ArrayList<String> regExSpecialChars = new ArrayList<String>();
    regExSpecialChars.add("\\");
    regExSpecialChars.add("^");
    regExSpecialChars.add("$");
    regExSpecialChars.add("{");
    regExSpecialChars.add("}");
    regExSpecialChars.add("[");
    regExSpecialChars.add("}");
    regExSpecialChars.add("(");
    regExSpecialChars.add(")");
    regExSpecialChars.add(".");
    regExSpecialChars.add("*");
    regExSpecialChars.add("+");
    regExSpecialChars.add("?");
    regExSpecialChars.add("|");
    regExSpecialChars.add("<");
    regExSpecialChars.add(">");
    regExSpecialChars.add("-");
    regExSpecialChars.add("&");

    try {
        // recordLine = in.readLine();
        if (tipologia.compareToIgnoreCase("fix") == 0) {
            // TODO gestione campo a lunghezza fissa da db!
        } else {

        }

        // recordLine = in.readLine();
        CSVParser csvp = new CSVParser(in);
        String delim = new String(fs);
        String quote = new String(ts);
        String commentDelims = new String("#");
        csvp.changeDelimiter(delim.charAt(0));
        csvp.changeQuote(quote.charAt(0));
        csvp.setCommentStart(commentDelims);

        String[] t = null;
        columns = csvp.getLine(); // la prima riga deve contenere i nomi delle
        // colonne

        ArrayList<String> colsCheck = new ArrayList<String>();
        for (String col : columns) {
            boolean present = false;
            HashMap<Integer, String> rsFieldList = getResultsetFields(resultsetId);

            if (rsFieldList.containsValue(col)) {
                present = true;
            }
            if (!present) {
                colsCheck.add(col);
            }
        }

        if (colsCheck.size() != 0) {
            throw new VisibleException("Attenzione!!! Colonna '" + colsCheck.get(0) + "' non riconosciuta");
        }

        int lineFailed = 0;
        try {
            while ((t = csvp.getLine()) != null) {
                lineFailed++;
                BaseModelData bm = new BaseModelData();
                // System.out.println("lunghezza riga: " + t.length);
                // System.out.print("" + csvp.lastLineNumber() + ":");
                for (int i = 0; i < t.length; i++) {
                    // System.out.println("valorizzazione campo: " +
                    // columns[i] + " = "
                    // + t[i]);
                    bm.set(columns[i], t[i]);
                    // System.out.println("\"" + t[i] + "\";");
                }
                recordList.add(bm);
            }
        } catch (ArrayIndexOutOfBoundsException ex) {
            // Log.warn("Troppi campi nel file: " + t.length + " alla riga "
            // + (lineFailed + 1), ex);
            throw new VisibleException("Troppi campi nel file: " + t.length + " alla riga " + (lineFailed + 1));
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }

    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
        throw new HiddenException("impossibile leggere il file: " + importFile.getName());
    }
    if (type.compareToIgnoreCase(UploadDialog.TYPE_INSERT) == 0) {
        opCode = this.setObjects(resultsetId, recordList, credentials.getUsername());
    } else {
        opCode = this.updateObjects(resultsetId, recordList, condition, credentials.getUsername());
    }
    if (opCode > 0) {
        JardinLogger.info(credentials.getUsername(), "Objects successfull setted for resultset " + resultsetId);
        try {
            this.notifyChanges(mailUtility, resultsetId, recordList, operazione, credentials.getUsername());
        } catch (SQLException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    } else
        JardinLogger.error(credentials.getUsername(),
                "Error in setting objects for resultset " + resultsetId + "!");
    return opCode;
}

From source file:edu.umich.ctools.sectionsUtilityTool.SectionsUtilityToolServlet.java

public void storeContext(Context context, HttpServletRequest request) {
    Map<String, String> ltiValues = new HashMap<String, String>();

    ViewToolContext vtc = (ViewToolContext) context;
    HttpServletResponse response = vtc.getResponse();
    HttpSession session = request.getSession(true);
    M_log.debug("session id: " + session.getId());

    HashMap<String, Object> customValuesMap = new HashMap<String, Object>();

    customValuesMap.put(CUSTOM_CANVAS_COURSE_ID, request.getParameter(CUSTOM_CANVAS_COURSE_ID));
    customValuesMap.put(CUSTOM_CANVAS_ENROLLMENT_STATE, request.getParameter(CUSTOM_CANVAS_ENROLLMENT_STATE));
    customValuesMap.put(CUSTOM_CANVAS_USER_LOGIN_ID, request.getParameter(CUSTOM_CANVAS_USER_LOGIN_ID));
    customValuesMap.put(LIS_PERSON_CONTACT_EMAIL_PRIMARY,
            request.getParameter(LIS_PERSON_CONTACT_EMAIL_PRIMARY));
    customValuesMap.put(LIS_PERSON_NAME_FAMILY, request.getParameter(LIS_PERSON_NAME_FAMILY));
    customValuesMap.put(LIS_PERSON_NAME_GIVEN, request.getParameter(LIS_PERSON_NAME_GIVEN));
    customValuesMap.put(CUSTOM_CANVAS_USER_ID, request.getParameter(CUSTOM_CANVAS_USER_ID));
    customValuesMap.put(SESSION_ROLES_FOR_ADDING_TEACHER,
            appExtPropertiesFile.getProperty(ROLE_CAN_ADD_TEACHER));

    TcSessionData tc = (TcSessionData) session.getAttribute(TC_SESSION_DATA);

    OauthCredentials oac = oacf.getOauthCredentials(ltiKey);

    if (tc == null) {
        tc = new TcSessionData(request, oac, customValuesMap);
    }/*from  ww  w .  ja  v  a  2 s  .c  o  m*/

    session.setAttribute(TC_SESSION_DATA, tc);
    M_log.debug("TC Session Data: " + tc.getUserId());

    // sanity check the result
    if (tc.getUserId() == null || tc.getUserId().length() == 0) {
        String msg = "Canvas Course Manager: tc session data is bad - userId is empty.";
        response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
        M_log.error(msg);
        try {
            doError(request, response, "Canvas Course Manager LTI: tc session data is bad: userId is empty.");
        } catch (IOException e) {
            M_log.error("fillContext: IOException: ", e);
        }
        return;
    }

    if (customValuesMap.containsValue(null)) {
        String msg = "Canvas Course Manager: Found launch parameters null.";
        response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
        M_log.error(msg);
        try {
            doError(request, response, "Canvas Course Manager LTI: null Launch parameter found.");
        } catch (IOException e) {
            M_log.error("fillContext: IOException: ", e);
        }
        return;
    }

    // Verify this is an LTI launch request and some of the required parameters (if not stub testing)
    if (!isStubTesting) {
        if (!SectionUtilityToolFilter.BASIC_LTI_LAUNCH_REQUEST
                .equals(request.getParameter(SectionUtilityToolFilter.LTI_MESSAGE_TYPE))
                || !LTI_1P0_CONST.equals(request.getParameter(LTI_VERSION)) || ltiKey == null) {
            try {
                M_log.debug("LTI request Message: "
                        + request.getParameter(SectionUtilityToolFilter.LTI_MESSAGE_TYPE));
                M_log.debug("LTI request Version: " + request.getParameter(LTI_VERSION));
                M_log.debug("LTI Key: " + ltiKey);
                doError(request, response,
                        "Missing required parameter:   LTI Message Type, LTI Version, or Consumer Key is incorrect.");
            } catch (IOException e) {
                M_log.error("fillContext: IOException: ", e);
            }
            return;
        }

        OauthCredentials oc = tc.getOauthCredentials();

        Boolean validMessage = checkForValidMessage(request, oc);
        if (!validMessage) {
            String msg = "Launch data does not validate";
            M_log.error(msg);
            return;
        }
    }

    // Fill context with the required lti values.
    // The VelocityViewServlet will take care of sending the processing on
    // to the proper velocity template.
    fillCcmValuesForContext(ltiValues, request);

    context.put("ltiValues", ltiValues);
}

From source file:de.decoit.visa.rdf.RDFManager.java

/**
 * Process the contents of an RDF model. All objects of the property
 * 'device' will be converted to NetworkComponent objects. Their type is
 * defined by the 'type' property: 'switch' will cause an NCSwitch to be
 * created, 'vm' will create a NCVM object. All objects of the property
 * 'interface' will be converted to interfaces on the component that is
 * defined by the subject of the 'interface' property. The interfaces will
 * be IP configured if an 'address' property of type 'ipv4' or 'ipv6' is
 * present. All 'connected' statements will be converted to NetworkCable
 * objects that connect the interfaces which are object and subject to the
 * statement./*ww w. ja  v  a  2  s  .  c  om*/
 *
 * @param pMod The named model URI which will be processed. Can be set to
 *            null to use the default model.
 * @return A set of the local names of all non-switch devices.
 * @throws RDFSourceException if the model contains semantic errors
 */
private HashSet<String> processModel(String pMod) throws RDFSourceException {
    addGroupStatements(pMod);

    HashSet<String> rv = new HashSet<>();

    // Get all networks in the topology
    QueryExecution qexec = QueryExecutionFactory.create(getNetworkListSPARQL(pMod), ds);
    ArrayList<QuerySolution> globalNetworkList = resultSetToList(qexec.execSelect());
    qexec.close();

    for (QuerySolution qs : globalNetworkList) {
        String addr = qs.getLiteral("nwaddr").getString();
        int netmask;
        IPVersion version = IPVersion.getVersion(qs.getLiteral("nwtype").getString());

        if (qs.get("nwmask") != null) {
            netmask = qs.getLiteral("nwmask").getInt();
        } else {
            netmask = 24;
        }

        TEBackend.TOPOLOGY_STORAGE.createNetwork(addr, netmask, version);
    }

    // Get all VLANs in the topology
    qexec = QueryExecutionFactory.create(getVLANListSPARQL(pMod), ds);
    ArrayList<QuerySolution> globalVLANList = resultSetToList(qexec.execSelect());
    qexec.close();

    for (QuerySolution qs : globalVLANList) {
        try {
            Resource vlanRes = qs.getResource("vlan");
            String vlan = vlanRes.getLocalName();
            Literal idLit = qs.getLiteral("id");
            int id = idLit.getInt();

            VLAN newVLAN = TEBackend.TOPOLOGY_STORAGE.createVLAN(vlan, id);

            if (newVLAN.getID() != id) {
                Model nMod = ds.getNamedModel(pMod);
                nMod.removeAll(vlanRes, VISA.ID, null);
                nMod.add(vlanRes, VISA.ID, nMod.createLiteral(String.valueOf(newVLAN.getID())));
            }
        } catch (IllegalArgumentException ex) {
            TEBackend.logException(ex, log);

            StringBuilder sb = new StringBuilder("VLAN ");
            sb.append(qs.getResource("vlan").getLocalName());
            sb.append(" will be missing in topology!");
            log.error(sb.toString());
        }
    }

    // All connections between ports will be stored here
    HashMap<String, String> connections = new HashMap<>();

    // Get all connections in the topology
    qexec = QueryExecutionFactory.create(getConnectionListSPARQL(pMod), ds);
    ResultSet connectionList = qexec.execSelect();

    while (connectionList.hasNext()) {
        QuerySolution qs = connectionList.next();

        String src = qs.getResource("sif").getLocalName();
        String target = qs.getResource("oif").getLocalName();

        // Only add the connection to the map if the source is not present
        // in the map
        if (!connections.containsKey(src) && !connections.containsValue(src)) {
            connections.put(src, target);
        }
    }

    qexec.close();

    HashSet<NCSwitch> switchSet = new HashSet<>();

    // Get all devices in the topology
    qexec = QueryExecutionFactory.create(getDevListSPARQL(rootNode, pMod), ds);
    ArrayList<QuerySolution> deviceList = resultSetToList(qexec.execSelect());
    qexec.close();

    // Iterate over all devices and process their information
    for (QuerySolution qs : deviceList) {
        ArrayList<HashMap<String, String>> ports = new ArrayList<>();

        Resource dev = qs.getResource("dev");
        String devLocalName = dev.getLocalName();

        // Get literal properties of the node
        String devName = qs.getLiteral("name").getString();
        String devTypeLit = qs.getLiteral("type").getString();
        String devGroup = qs.getLiteral("cgname").getString();

        String devType;
        if (devTypeLit.equals("switch")) {
            devType = devTypeLit;
        } else {
            if (qs.getLiteral("phys").equals(VISA.BOOL_TRUE)) {
                devType = "host";
            } else {
                devType = "vm";
            }
        }

        // Get all interfaces of this device
        QueryExecution ifListQexec = QueryExecutionFactory.create(getInterfaceListSPARQL(dev, pMod), ds);
        ArrayList<QuerySolution> interfaceList = resultSetToList(ifListQexec.execSelect());
        ifListQexec.close();

        // Iterate over all interfaces
        for (QuerySolution ifQS : interfaceList) {
            Resource ifRes = ifQS.getResource("if");

            HashMap<String, String> port = new HashMap<>();

            // Get local name of the interface
            String ifLocalName = ifRes.getLocalName();
            port.put("localName", ifLocalName);

            if (devType.equals("switch")) {
                // Switch interfaces always point upwards
                port.put("orientation", PortOrientation.TOP.toString());

                // VLAN processing is only useful on switch interfaces
                QueryExecution vlanQexec = QueryExecutionFactory
                        .create(getInterfaceDetailVLANSPARQL(ifRes, pMod), ds);
                ArrayList<QuerySolution> vlanList = resultSetToList(vlanQexec.execSelect());
                vlanQexec.close();

                // Process VLANs if there are any
                if (vlanList.size() > 0) {
                    StringBuilder sbVLAN = new StringBuilder();

                    // Store the VLAN local names as a single string
                    // separated by ";"
                    boolean first = true;
                    for (QuerySolution vlanQs : vlanList) {
                        Resource vlanRes = vlanQs.getResource("vlan");
                        if (!first) {
                            sbVLAN.append(";");
                        } else {
                            first = false;
                        }
                        sbVLAN.append(vlanRes.getLocalName());
                    }

                    port.put("vlan", sbVLAN.toString());
                }
            } else {
                // Add a random interface orientation to the map, this will
                // be replaced later
                int rnd = (int) Math.ceil(Math.random() * 4);
                switch (rnd) {
                case 1:
                    port.put("orientation", PortOrientation.TOP.toString());
                    break;
                case 2:
                    port.put("orientation", PortOrientation.BOTTOM.toString());
                    break;
                case 3:
                    port.put("orientation", PortOrientation.LEFT.toString());
                    break;
                case 4:
                    port.put("orientation", PortOrientation.RIGHT.toString());
                    break;
                default:
                    port.put("orientation", PortOrientation.TOP.toString());
                }

                // Only non-switch components will have their interface
                // orientations updated after model processing
                rv.add(devLocalName);
            }

            // Get all address nodes connected to this interface
            QueryExecution addrQexec = QueryExecutionFactory
                    .create(getInterfaceDetailAddressSPARQL(ifRes, pMod), ds);
            ResultSet addrIt = addrQexec.execSelect();

            // Iterate over all address nodes
            while (addrIt.hasNext()) {
                QuerySolution addrQs = addrIt.next();

                // IP address and version are always present
                port.put("IPAddress", addrQs.getLiteral("addrip").getString());
                // port.put("IPVersion",
                // addrQs.getLiteral("addrtype").getString());

                // Check if a network resource is connected to this address
                // node
                if (addrQs.get("nettype") != null && addrQs.get("netip") != null
                        && addrQs.get("netmask") != null) {
                    // Network resource is present, read the values
                    // port.put("IPSubnetMask",
                    // addrQs.getLiteral("netmask").getString());
                    port.put("IPNetwork", addrQs.getLiteral("netip").getString());
                } else {
                    if (addrQs.getLiteral("addrtype").getString().equals(IPVersion.V4.toString())) {
                        // No network resource found, use default values:
                        // Assume a subnet mask length of 24bit as default
                        // if no
                        // subnet mask was defined
                        // port.put("IPSubnetMask", "24");

                        // Using a 24bit subnet mask the network address is
                        // "xxx.yyy.zzz.0"
                        // Extract the subnet from the IP address
                        Pattern ipPattern = Pattern.compile("^(.{1,3}\\..{1,3}\\..{1,3}\\.).{1,3}$");
                        Matcher ipMatcher = ipPattern.matcher(port.get("IPAddress"));
                        ipMatcher.matches();

                        // Append the 0 as network address
                        StringBuilder sbNetwork = new StringBuilder(ipMatcher.group(1));
                        sbNetwork.append("0");

                        if (TEBackend.TOPOLOGY_STORAGE.getNetwork(sbNetwork.toString()) == null) {
                            IPNetwork ipNet = TEBackend.TOPOLOGY_STORAGE.createNetwork(sbNetwork.toString(), 24,
                                    IPVersion.getVersion(addrQs.getLiteral("addrtype").getString()));

                            port.put("IPNetwork", ipNet.getNetworkAddressString());
                        } else {
                            port.put("IPNetwork", sbNetwork.toString());
                        }
                    } else if (addrQs.getLiteral("addrtype").getString().equals(IPVersion.V6.toString())) {
                        // TODO handle IPv6 networks!
                        throw new UnsupportedOperationException(
                                "IPv6 addresses without network node are not supported yet");
                    } else {
                        throw new IllegalStateException("Unknown IP version detected");
                    }
                }

                // TODO consider multiple address nodes per interface
                // Break loop since multiple address nodes are not supported
                break;
            }

            ports.add(port);
        }

        try {
            NetworkComponent newNC;

            switch (devType) {
            case "vm":
                newNC = TEBackend.TOPOLOGY_STORAGE.createVM(ports, devName, null, null, devLocalName);
                break;
            case "host":
                newNC = TEBackend.TOPOLOGY_STORAGE.createHost(ports, devName, null, null, devLocalName);
                break;
            case "switch":
                newNC = TEBackend.TOPOLOGY_STORAGE.createSwitch(ports, devName,
                        new Dimension2D(ports.size(), 3), null, devLocalName);
                switchSet.add((NCSwitch) newNC);
                break;
            default:
                throw new RDFSourceException("Unknown component type provided");
            }

            // Add the new component to the group it is assigned to
            newNC.getConfig().setComponentGroup(devGroup);
        } catch (IllegalArgumentException | RDFSourceException ex) {
            TEBackend.logException(ex, log);

            StringBuilder sb = new StringBuilder("Component ");
            sb.append(devLocalName);
            sb.append(" will be missing in topology!");
            log.error(sb.toString());
        }
    }

    // Create cables for the connections
    for (Map.Entry<String, String> conn : connections.entrySet()) {
        // Get source and target interfaces
        NetworkComponent.Interface src = TEBackend.TOPOLOGY_STORAGE.getInterface(conn.getKey());
        NetworkComponent.Interface target = TEBackend.TOPOLOGY_STORAGE.getInterface(conn.getValue());

        GroupInterface gIf = null;

        // If the connected devices belong to different groups, create a
        // connection using a GroupInterface
        if (!src.getComponentGroup().equals(target.getComponentGroup())) {
            // Determine which component is inner and outer, only create
            // GroupInterface connection if the outer component is no switch
            if (src.getComponentGroup().equals("0.0.0.0") && !(src.getComponent() instanceof NCSwitch)) {
                if (log.isDebugEnabled()) {
                    StringBuilder sb = new StringBuilder("Group-Interface connection: ");
                    sb.append(target.getComponentGroup());
                    sb.append(" (inner) <-> ");
                    sb.append(src.getComponentGroup());
                    sb.append(" (outer)");

                    log.debug(sb.toString());

                    sb = new StringBuilder("Interfaces: ");
                    sb.append(target.getIdentifier());
                    sb.append(" (inner) <-> ");
                    sb.append(src.getIdentifier());
                    sb.append(" (outer)");

                    log.debug(sb.toString());
                }

                gIf = TEBackend.TOPOLOGY_STORAGE.getComponentGroupByName(target.getComponentGroup())
                        .createOuterConnection(target, src);
            } else if (target.getComponentGroup().equals("0.0.0.0")
                    && !(target.getComponent() instanceof NCSwitch)) {
                if (log.isDebugEnabled()) {
                    StringBuilder sb = new StringBuilder("Group-Interface connection: ");
                    sb.append(src.getComponentGroup());
                    sb.append(" (inner) <-> ");
                    sb.append(target.getComponentGroup());
                    sb.append(" (outer)");

                    log.debug(sb.toString());

                    sb = new StringBuilder("Interfaces: ");
                    sb.append(src.getIdentifier());
                    sb.append(" (inner) <-> ");
                    sb.append(target.getIdentifier());
                    sb.append(" (outer)");

                    log.debug(sb.toString());
                }

                gIf = TEBackend.TOPOLOGY_STORAGE.getComponentGroupByName(src.getComponentGroup())
                        .createOuterConnection(src, target);
            } else {
                log.error("Connection between two non-global groups detected");

                if (log.isDebugEnabled()) {
                    StringBuilder sb = new StringBuilder("Connected groups: ");
                    sb.append(src.getComponentGroup());
                    sb.append(" <-> ");
                    sb.append(target.getComponentGroup());

                    log.debug(sb.toString());

                    sb = new StringBuilder("Interfaces: ");
                    sb.append(src.getIdentifier());
                    sb.append(" <-> ");
                    sb.append(target.getIdentifier());

                    log.debug(sb.toString());
                }
            }
        }

        // Create the NetworkCable
        TEBackend.TOPOLOGY_STORAGE.createCable(src, target, gIf);
    }

    // Create group switches where necessary
    for (NCSwitch ncs : switchSet) {
        ncs.createGroupSwitches();
    }

    return rv;
}

From source file:guineu.modules.filter.Alignment.RANSACGCGC.RansacGCGCAlignerTask.java

/**
 *
 * @param peakList/*from ww  w .ja  v  a 2 s  .c o  m*/
 * @return
 */
private HashMap<PeakListRow, PeakListRow> getAlignmentMap(Dataset peakList) {

    // Create a table of mappings for best scores
    HashMap<PeakListRow, PeakListRow> alignmentMapping = new HashMap<PeakListRow, PeakListRow>();

    if (alignedPeakList.getNumberRows() < 1) {
        return alignmentMapping;
    }

    // Create a sorted set of scores matching
    TreeSet<RowVsRowGCGCScore> scoreSet = new TreeSet<RowVsRowGCGCScore>();

    // RANSAC algorithm
    List<AlignGCGCStructMol> list = ransacPeakLists(alignedPeakList, peakList);
    PolynomialFunction function = this.getPolynomialFunction(list,
            ((SimpleGCGCDataset) alignedPeakList).getRowsRTRange());

    PeakListRow allRows[] = peakList.getRows().toArray(new PeakListRow[0]);

    for (PeakListRow row : allRows) {
        double rt = 0;
        if (!this.useOnlyRTI) {
            try {
                rt = function.value(((SimplePeakListRowGCGC) row).getRT1());
                if (Double.isNaN(rt) || rt == -1) {
                    rt = ((SimplePeakListRowGCGC) row).getRT1();
                }
            } catch (Exception ee) {
            }
        } else {
            try {
                rt = function.value(((SimplePeakListRowGCGC) row).getRTI());
                if (Double.isNaN(rt) || rt == -1) {
                    rt = ((SimplePeakListRowGCGC) row).getRTI();
                }
            } catch (Exception ee) {
            }
        }
        PeakListRow candidateRows[] = null;
        if (!this.useOnlyRTI) {
            Range RTIRange = this.rtiTolerance.getToleranceRange(((SimplePeakListRowGCGC) row).getRTI());
            Range RT1Range = this.rtToleranceAfterRTcorrection.getToleranceRange(rt);
            Range RT2Range = this.rt2Tolerance.getToleranceRange(((SimplePeakListRowGCGC) row).getRT2());
            // Get all rows of the aligned peaklist within parameter limits
            candidateRows = ((SimpleGCGCDataset) alignedPeakList).getRowsInsideRT1RT2RTIRange(RT1Range,
                    RT2Range, RTIRange);
        } else {
            Range RTIRange = this.rtiTolerance.getToleranceRange(((SimplePeakListRowGCGC) row).getRTI());
            candidateRows = ((SimpleGCGCDataset) alignedPeakList).getRowsInsideRT1RT2RTIRange(RTIRange);
        }
        for (PeakListRow candidate : candidateRows) {
            RowVsRowGCGCScore score;
            try {
                score = new RowVsRowGCGCScore(row, candidate, rtiTolerance.getTolerance(),
                        rtToleranceAfterRTcorrection.getTolerance(), rt);

                scoreSet.add(score);
                errorMessage = score.getErrorMessage();

            } catch (Exception e) {
                e.printStackTrace();
                setStatus(TaskStatus.ERROR);
                return null;
            }
        }
        progress = (double) processedRows++ / (double) totalRows;
    }

    // Iterate scores by descending order
    Iterator<RowVsRowGCGCScore> scoreIterator = scoreSet.iterator();
    while (scoreIterator.hasNext()) {

        RowVsRowGCGCScore score = scoreIterator.next();

        // Check if the row is already mapped
        if (alignmentMapping.containsKey(score.getPeakListRow())) {
            continue;
        }

        // Check if the spectra score is unacceptable
        if (score.score == -10) {
            continue;
        }

        // Check if the aligned row is already filled
        if (alignmentMapping.containsValue(score.getAlignedRow())) {
            continue;
        }

        alignmentMapping.put(score.getPeakListRow(), score.getAlignedRow());

    }

    return alignmentMapping;
}

From source file:pro.dbro.bart.TheActivity.java

private routeResponse updateRouteResponseWithEtd(routeResponse input) {
    int numRoutes = input.routes.size();
    /***** Preliminary Argument Checks *****/
    // If response has no routes (due to filtering by removeExpiredRoutes), return
    if (numRoutes == 0)
        return input;

    // If there is no cached etdResponse to update with, return
    //TODO: Confirm that currentEtdResponse has all ready been verified fresh
    if (currentEtdResponse == null)
        return input;

    // If etdResponse indicates a closed station, return
    if (currentEtdResponse.message != null) {
        if (currentEtdResponse.message.contains("No data matched your criteria."))
            return input;
    }//from  w w  w  . j  ava  2 s  . com

    /***** End Preliminary Argument Checks *****/

    // BUGFIX: Using Date().getTime() could possibly return a time different than BART's API Locale
    // Bart doesn't provide timezone info in their date responses, so consider whether to coerce their responses to PST
    // In this instance, we can simply use the time returned with the etd response
    //long now = new Date().getTime();
    long now = input.date.getTime();
    int numEtds = currentEtdResponse.etds.size();
    int lastLeg;
    HashMap<Integer, Integer> routeToEtd = new HashMap<Integer, Integer>();
    //find proper destination etds in currentEtdResponse
    //match times in routeResponse to times in proper etds

    // ASSUMPTION: etds and routes are sorted by time, increasing

    // For each route
    for (int x = 0; x < numRoutes; x++) {
        lastLeg = ((route) input.routes.get(x)).legs.size() - 1;
        // For each possible etd match
        for (int y = 0; y < numEtds; y++) {
            // DEBUG
            try {
                //Check that destination train is listed in terminal-station format. Ex: "Fremont" CounterEx: 'SFO/Milbrae'
                if (!BART.STATION_MAP.containsKey(((etd) currentEtdResponse.etds.get(y)).destination)) {
                    // If this is not a known silly-named train terminal station
                    if (!BART.KNOWN_SILLY_TRAINS
                            .containsKey(((etd) currentEtdResponse.etds.get(y)).destination)) {
                        // Let's try and guess what it is
                        boolean station_guessed = false;
                        for (int z = 0; z < BART.STATIONS.length; z++) {

                            // Can we match a station name within the silly-train name?
                            // haystack.indexOf(needle1);
                            if ((((etd) currentEtdResponse.etds.get(y)).destination)
                                    .indexOf(BART.STATIONS[z]) != -1) {
                                // Set the etd destination to the guessed real station name
                                ((etd) currentEtdResponse.etds.get(y)).destination = BART.STATIONS[z];
                                station_guessed = true;
                            }
                        }
                        if (!station_guessed) {
                            break; //We have to give up on updating routes based on this utterly silly-named etd
                        }
                    } else {
                        // Set the etd destination station to the real station name
                        ((etd) currentEtdResponse.etds.get(y)).destination = BART.KNOWN_SILLY_TRAINS
                                .get(((etd) currentEtdResponse.etds.get(y)).destination);
                        //break;
                    }
                } // end STATION_MAP silly-name train check and replace

                // Comparing BART station abbreviations
                if (BART.STATION_MAP.get(((etd) currentEtdResponse.etds.get(y)).destination)
                        .compareTo(((leg) ((route) input.routes.get(x)).legs.get(0)).trainHeadStation) == 0) {
                    //If matching etd is not all ready matched to a route, match it to this one
                    if (!routeToEtd.containsKey(x) && !routeToEtd.containsValue(y)) {
                        routeToEtd.put(x, y);
                        //Log.v("routeToEtd","Route: " + String.valueOf(x)+ " Etd: " + String.valueOf(y));
                    } else {
                        //if the etd is all ready claimed by a route, go to next etd
                        continue;
                    }
                } else if (BART.STATION_MAP.get(((etd) currentEtdResponse.etds.get(y)).destination).compareTo(
                        ((leg) ((route) input.routes.get(x)).legs.get(lastLeg)).trainHeadStation) == 0) {
                    if (!routeToEtd.containsKey(x) && !routeToEtd.containsValue(y)) {
                        routeToEtd.put(x, y);
                        //Log.v("routeToEtd","Route: " + String.valueOf(x)+ " Etd: " + String.valueOf(y));
                    } else {
                        //if the etd is all ready claimed by a route, go to next etd
                        continue;
                    }
                }

            } catch (Throwable T) {
                // Likely, a train with destination listed as a
                // special tuple and not an actual station name
                // was encountered 
                //Log.v("WTF", "Find me");
            }
        } // end etd for loop

    } // end route for loop

    Integer[] routesToUpdate = (Integer[]) ((routeToEtd.keySet()).toArray(new Integer[0]));
    for (int x = 0; x < routeToEtd.size(); x++) {
        //Log.v("routeToEtd","Update Route: " + String.valueOf(routesToUpdate[x])+ " w/Etd: " + String.valueOf(routeToEtd.get(x)));
        // etd ETA - route ETA (ms)
        //Log.v("updateRR", "etd: "+ new Date((now + ((etd)currentEtdResponse.etds.get(routeToEtd.get(routesToUpdate[x]))).minutesToArrival*60*1000)).toString()+" route: "+ new Date(((route)input.routes.get(routesToUpdate[x])).departureDate.getTime()).toString());
        long timeCorrection = (now
                + ((etd) currentEtdResponse.etds.get(routeToEtd.get(routesToUpdate[x]))).minutesToArrival * 60
                        * 1000)
                - ((route) input.routes.get(routesToUpdate[x])).departureDate.getTime();
        //Log.v("updateRRCorrection",String.valueOf(timeCorrection/(1000*60))+"m");
        // Adjust the arrival date based on the difference in departure dates
        ((route) input.routes.get(routesToUpdate[x])).arrivalDate
                .setTime(((route) input.routes.get(routesToUpdate[x])).arrivalDate.getTime() + timeCorrection);
        // Adjust departure date similarly
        ((route) input.routes.get(routesToUpdate[x])).departureDate.setTime(
                ((route) input.routes.get(routesToUpdate[x])).departureDate.getTime() + timeCorrection);
        //((route)input.routes.get(routesToUpdate[x])).departureDate = new Date(now + ((etd)currentEtdResponse.etds.get(routeToEtd.get(routesToUpdate[x]))).minutesToArrival*60*1000);

        // Update all leg times
        for (int y = 0; y < input.routes.get(routesToUpdate[x]).legs.size(); y++) {
            // Adjust leg's board time
            ((leg) ((route) input.routes.get(routesToUpdate[x])).legs.get(y)).boardTime.setTime(
                    ((leg) ((route) input.routes.get(routesToUpdate[x])).legs.get(y)).boardTime.getTime()
                            + timeCorrection);
            // Adjust leg's disembark time
            ((leg) ((route) input.routes.get(routesToUpdate[x])).legs.get(y)).disembarkTime.setTime(
                    ((leg) ((route) input.routes.get(routesToUpdate[x])).legs.get(y)).disembarkTime.getTime()
                            + timeCorrection);
        }
    }
    input.sortRoutes();
    return input;

    // OLD method of updating, for humor

    // for every first leg train of each route
    //ArrayList routesToUpdate = new ArrayList();
    /*
    for(int y=0;y<numRoutes;y++){
     // if the etd train matches the first leg of this route, update it's departureTime with etd value
     // OR if the etd train matches the last leg of this route, update with first leg
     lastLeg = ((route)input.routes.get(y)).legs.size()-1;
     if (STATION_MAP.get(((etd)currentEtdResponse.etds.get(x)).destination).compareTo(((leg)((route)input.routes.get(y)).legs.get(0)).trainHeadStation) == 0 ){
        routesToUpdate.add(y);
        if (!etdsToUpdateWith.contains(x))
           etdsToUpdateWith.add(x);
     }
     else if (STATION_MAP.get(((etd)currentEtdResponse.etds.get(x)).destination).compareTo(((leg)((route)input.routes.get(y)).legs.get(lastLeg)).trainHeadStation) == 0 ){
        routesToUpdate.add(y);
        if (!etdsToUpdateWith.contains(x))
           etdsToUpdateWith.add(x);
     }
    }
    for(int y=0;y<routesToUpdate.size();y++){
     if(y==etdsToUpdateWith.size())
        break;
     //TODO: verify boardTime is what routeResponse timer views are set by
     ((route)input.routes.get((Integer) routesToUpdate.get(y))).departureDate = new Date(now + ((etd)currentEtdResponse.etds.get((Integer) etdsToUpdateWith.get(y))).minutesToArrival*60*1000);
     //TODO: evaluate whether the first leg boardTime also needs to be updated. I think it does for UsherService
     ((leg)((route)input.routes.get((Integer) routesToUpdate.get(y))).legs.get(0)).boardTime = new Date(now + ((etd)currentEtdResponse.etds.get((Integer) etdsToUpdateWith.get(y))).minutesToArrival*60*1000);
    }
    }*/

}

From source file:org.apache.sysml.lops.compile.Dag.java

private static void deleteUpdatedTransientReadVariables(StatementBlock sb, ArrayList<Lop> nodeV,
        ArrayList<Instruction> inst) throws DMLRuntimeException {

    if (sb == null)
        return;/*from w w w . j a  va 2  s  . co m*/

    if (LOG.isTraceEnabled())
        LOG.trace("In delete updated variables");

    // CANDIDATE list of variables which could have been updated in this statement block 
    HashMap<String, Lop> labelNodeMapping = new HashMap<String, Lop>();

    // ACTUAL list of variables whose value is updated, AND the old value of the variable 
    // is no longer accessible/used.
    HashSet<String> updatedLabels = new HashSet<String>();
    HashMap<String, Lop> updatedLabelsLineNum = new HashMap<String, Lop>();

    // first capture all transient read variables
    for (Lop node : nodeV) {

        if (node.getExecLocation() == ExecLocation.Data && ((Data) node).isTransient()
                && ((Data) node).getOperationType() == OperationTypes.READ
                && ((Data) node).getDataType() == DataType.MATRIX) {

            // "node" is considered as updated ONLY IF the old value is not used any more
            // So, make sure that this READ node does not feed into any (transient/persistent) WRITE
            boolean hasWriteParent = false;
            for (Lop p : node.getOutputs()) {
                if (p.getExecLocation() == ExecLocation.Data) {
                    // if the "p" is of type Data, then it has to be a WRITE
                    hasWriteParent = true;
                    break;
                }
            }

            if (!hasWriteParent) {
                // node has no parent of type WRITE, so this is a CANDIDATE variable 
                // add it to labelNodeMapping so that it is considered in further processing  
                labelNodeMapping.put(node.getOutputParameters().getLabel(), node);
            }
        }
    }

    // capture updated transient write variables
    for (Lop node : nodeV) {

        if (node.getExecLocation() == ExecLocation.Data && ((Data) node).isTransient()
                && ((Data) node).getOperationType() == OperationTypes.WRITE
                && ((Data) node).getDataType() == DataType.MATRIX
                && labelNodeMapping.containsKey(node.getOutputParameters().getLabel()) // check to make sure corresponding (i.e., with the same label/name) transient read is present
                && !labelNodeMapping.containsValue(node.getInputs().get(0)) // check to avoid cases where transient read feeds into a transient write 
        ) {
            updatedLabels.add(node.getOutputParameters().getLabel());
            updatedLabelsLineNum.put(node.getOutputParameters().getLabel(), node);

        }
    }

    // generate RM instructions
    Instruction rm_inst = null;
    for (String label : updatedLabels) {
        rm_inst = VariableCPInstruction.prepareRemoveInstruction(label);
        rm_inst.setLocation(updatedLabelsLineNum.get(label));

        if (LOG.isTraceEnabled())
            LOG.trace(rm_inst.toString());
        inst.add(rm_inst);
    }

}

From source file:edu.isi.karma.er.helper.SPARQLGeneratorUtil.java

/**
 * @author shri//from www.j a  va2s . c o  m
 * This method will genereate a sparql query to select the list of columns (in the order they are provided)
 * 
 * @param root This object of TriplesMap is the root from which this method begins to fetch columns
 * @param columns This ArrayList<String> has the list of columns to be fetched. These columns are identifyed by their complete URL as defined in the ontology. <br /> 
 * For example: <http://isi.edu/integration/karma/ontologies/model/accelerometer#AccelerometerReading>. Now there may exists many instance of a class in within the same ontology. 
 * */
public String get_query(TriplesMap root, ArrayList<HashMap<String, String>> columns, boolean distinct_query) {

    ArrayList<Object> queue = new ArrayList<>();
    queue.add(root);
    StringBuffer query = new StringBuffer();
    this.var_count = 1;
    this.prefix_list = new HashMap<>();
    this.select_params = new StringBuffer();
    //      ArrayList<String> select_param = new ArrayList<String>();
    HashMap<TriplesMap, String> markedTriples = new HashMap<>();

    ArrayList<String> visited_columns = new ArrayList<>();
    this.ParentMapingInfoList = new HashMap<>();

    // save the column predicate url and the column name to be dislayed
    HashMap<Predicate, String> predicateList = new HashMap<>();
    HashMap<String, String> columnList = new HashMap<>();
    if (columns != null && !columns.isEmpty()) {
        //         for (String k : columns) {
        //            int index = 0;
        //            if(k.indexOf("#") > 0) {
        //               index = k.lastIndexOf('#')+1;
        //            } else if(k.indexOf("/") > 0) {
        //               index = k.lastIndexOf('/')+1;
        //            }
        //            columnList.put(k, k.substring(index, k.length()));
        //         }
        for (HashMap<String, String> col : columns) {
            columnList.put(col.get("name"), col.get("url"));
        }
    }

    // using a BFS approach, we traverse the tree from the root node and add triples/predicates to the queue
    while (!queue.isEmpty()) {
        Object currentObj = queue.remove(0);

        // if this is a tripleMap, then add all its RefObjects to the queue
        // for the predicates, add only the ones that satisfy the criteria of being <...hasValue>
        if (currentObj instanceof TriplesMap) {
            String var = "x" + var_count;
            TriplesMap triple = (TriplesMap) currentObj;
            boolean foundHasValue = false;
            List<PredicateObjectMap> predicates = triple.getPredicateObjectMaps();

            for (PredicateObjectMap p_map : predicates) {

                if (p_map.getObject().hasRefObjectMap()) {
                    RefObjectMap objMap = p_map.getObject().getRefObjectMap();
                    queue.add(objMap.getParentTriplesMap());

                    logger.info(triple.getSubject().getId() + "  ---> "
                            + objMap.getParentTriplesMap().getSubject().getId());

                    // maintain a list of mapping properties between triples
                    ParentMapingInfoList.put(objMap.getParentTriplesMap().getSubject().getId(),
                            new ParentMapingInfo(triple, p_map.getPredicate()));

                } else {
                    queue.add(p_map.getPredicate());
                    predicateList.put(p_map.getPredicate(), var);
                    foundHasValue = true;
                }
            }
            // if this triple is marked to be included in the query,
            // we add it to the markedTriples list and add to the query string
            // for its class type Eg. 
            // Prefix pref1: <.../.../Input>
            // x2 a pref1:
            if (foundHasValue) {
                markedTriples.put(triple, var);
                String rdfsTypes = triple.getSubject().getRdfsType().get(0).toString();
                this.prefix_list.put("pref" + var_count, rdfsTypes);
                query.append(" ?" + var + " a pref" + var_count + ": .");

                // if the parent of this triple is also marked for the query
                // then we add the relation to between triples to the query. Eg.

                //               TriplesMap parentTriple = parent.get(triple.getSubject().getId());
                ParentMapingInfo parentTriple = ParentMapingInfoList.get(triple.getSubject().getId());

                // from the current node, keep poping out till we reach the last node in the 
                // parent map to see if any of the parents are connected
                if (parentTriple != null) {
                    String sq = checkParentMarked(triple, markedTriples, var);
                    if (sq.length() > 1) {
                        query.append(sq);
                    }

                }
                //               if( parentTriple != null && markedTriples.containsKey(parentTriple.parent)) {
                //                  String predicate = parentTriple.predicate.getTemplate().toString();
                ////                  PredicateObjectMap parentPredicate = getPredicateBetweenTriples(triple, parentTriple);
                //                  if(predicate != null) {
                //                     query.append(" ?" + markedTriples.get(parentTriple.parent) + " " + 
                //                        predicate + " ?"+var + " . ");
                //                  } else {
                //                     System.out.println("predicate is null from parent : " + triple.getSubject().getRdfsType().toString());
                //                  }
                //               }

            }
            var_count++;
        }
        // if it is a predicate Object, create a variable in in the query string
        else if (currentObj instanceof Predicate) {
            Predicate predicate = (Predicate) currentObj;
            String k = predicate.getTemplate().toString();
            k = k.replace('<', ' ').replace('>', ' ').trim();
            if (columns != null && !columns.isEmpty()) {
                //               if(columnList.containsKey(k)) {
                if (columnList.containsValue(k)) {
                    Iterator<String> itr = columnList.keySet().iterator();
                    while (itr.hasNext()) {
                        String cName = itr.next();
                        if (columnList.get(cName).equals(k) && !visited_columns.contains(cName)) {
                            // get the column name from the end of this url - either the last '/' or the '#'
                            query.append(" ?" + predicateList.get(predicate)).append(" ")
                                    .append(predicate.getTemplate()).append(" ?").append(cName + " . ");
                            //columnList.remove(cName);
                            visited_columns.add(cName);
                            var_count++;
                            break;
                        }
                    }
                    // get the column name from the end of this url - either the last '/' or the '#'
                    //                  query.append(" ?" + predicateList.get(predicate))
                    //                     .append(" ")
                    //                     .append(predicate.getTemplate())
                    //                     .append(" ?")
                    //                     .append(columnList.get(k) + " . ");
                    //                  var_count++;
                } else {
                    logger.info("ColumnList does not contain : " + k + " " + currentObj);
                }
            } else {
                int index = 0;
                if (k.indexOf("#") > 0) {
                    index = k.lastIndexOf('#') + 1;
                } else if (k.indexOf("/") > 0) {
                    index = k.lastIndexOf('/') + 1;
                }
                query.append(" ?" + predicateList.get(predicate)).append(" ").append(predicate.getTemplate())
                        .append(" ?").append(k.substring(index, k.length())).append(" .");
                var_count++;
            }

        }
        // if this is a RefObject add the Child Triple to the queue
        else if (currentObj instanceof RefObjectMap) {
            RefObjectMap refObj = (RefObjectMap) currentObj;
            TriplesMap t = refObj.getParentTriplesMap();
            queue.add(t);

        }
    }

    // append the list of prefixes
    Iterator<String> itr = this.prefix_list.keySet().iterator();
    StringBuffer sQuery = new StringBuffer();
    while (itr.hasNext()) {
        String key = itr.next();
        sQuery.append(" PREFIX ").append(key).append(": ").append(this.prefix_list.get(key));
    }
    // append the columns to be selected in the order they are specified
    sQuery.append(" select ");
    if (distinct_query) {
        sQuery.append(" distinct ");
    }
    for (HashMap<String, String> s : columns) {
        sQuery.append(" ?" + s.get("name"));
    }
    sQuery.append(" where { ").append(query.toString()).append(" } ");
    logger.info("Generated Query : " + sQuery);
    return sQuery.toString();
}

From source file:com.ibm.bi.dml.lops.compile.Dag.java

private void deleteUpdatedTransientReadVariables(StatementBlock sb, ArrayList<N> nodeV,
        ArrayList<Instruction> inst) throws DMLRuntimeException, DMLUnsupportedOperationException {

    if (sb == null)
        return;/*from w ww  .j  a v  a  2 s  . c om*/

    LOG.trace("In delete updated variables");

    /*
    Set<String> in = sb.liveIn().getVariables().keySet();
    Set<String> out = sb.liveOut().getVariables().keySet();
    Set<String> updated = sb.variablesUpdated().getVariables().keySet();
            
    Set<String> intersection = in;
    intersection.retainAll(out);
    intersection.retainAll(updated);
            
    for (String var : intersection) {
       inst.add(VariableCPInstruction.prepareRemoveInstruction(var));
    }
    */

    // CANDIDATE list of variables which could have been updated in this statement block 
    HashMap<String, N> labelNodeMapping = new HashMap<String, N>();

    // ACTUAL list of variables whose value is updated, AND the old value of the variable 
    // is no longer accessible/used.
    HashSet<String> updatedLabels = new HashSet<String>();
    HashMap<String, N> updatedLabelsLineNum = new HashMap<String, N>();

    // first capture all transient read variables
    for (int i = 0; i < nodeV.size(); i++) {
        N node = nodeV.get(i);

        if (node.getExecLocation() == ExecLocation.Data && ((Data) node).isTransient()
                && ((Data) node).getOperationType() == OperationTypes.READ
                && ((Data) node).getDataType() == DataType.MATRIX) {

            // "node" is considered as updated ONLY IF the old value is not used any more
            // So, make sure that this READ node does not feed into any (transient/persistent) WRITE
            boolean hasWriteParent = false;
            for (Lop p : node.getOutputs()) {
                if (p.getExecLocation() == ExecLocation.Data) {
                    // if the "p" is of type Data, then it has to be a WRITE
                    hasWriteParent = true;
                    break;
                }
            }

            if (!hasWriteParent) {
                // node has no parent of type WRITE, so this is a CANDIDATE variable 
                // add it to labelNodeMapping so that it is considered in further processing  
                labelNodeMapping.put(node.getOutputParameters().getLabel(), node);
            }
        }
    }

    // capture updated transient write variables
    for (int i = 0; i < nodeV.size(); i++) {
        N node = nodeV.get(i);

        if (node.getExecLocation() == ExecLocation.Data && ((Data) node).isTransient()
                && ((Data) node).getOperationType() == OperationTypes.WRITE
                && ((Data) node).getDataType() == DataType.MATRIX
                && labelNodeMapping.containsKey(node.getOutputParameters().getLabel()) // check to make sure corresponding (i.e., with the same label/name) transient read is present
                && !labelNodeMapping.containsValue(node.getInputs().get(0)) // check to avoid cases where transient read feeds into a transient write 
        ) {
            updatedLabels.add(node.getOutputParameters().getLabel());
            updatedLabelsLineNum.put(node.getOutputParameters().getLabel(), node);

        }
    }

    // generate RM instructions
    Instruction rm_inst = null;
    for (String label : updatedLabels) {
        rm_inst = VariableCPInstruction.prepareRemoveInstruction(label);
        rm_inst.setLocation(updatedLabelsLineNum.get(label));

        LOG.trace(rm_inst.toString());
        inst.add(rm_inst);
    }

}