Example usage for java.util TreeMap get

List of usage examples for java.util TreeMap get

Introduction

In this page you can find the example usage for java.util TreeMap get.

Prototype

public V get(Object key) 

Source Link

Document

Returns the value to which the specified key is mapped, or null if this map contains no mapping for the key.

Usage

From source file:uk.ac.leeds.ccg.andyt.projects.moses.process.RegressionReport_UK1.java

public void writeOutResidualsHSARHP_ISARCEP(File observed_File, File expected_File) throws Exception {
    File outputFile = new File(observed_File.getParentFile(), "residuals.csv");
    FileOutputStream a_FileOutputStream = new FileOutputStream(outputFile);
    TreeMap<String, double[]> a_SAROptimistaionConstraints = loadCASOptimistaionConstraints(observed_File);
    TreeMap<String, double[]> a_CASOptimistaionConstraints = loadCASOptimistaionConstraints(expected_File);
    String line = OutputDataHandler_OptimisationConstraints.getHSARHP_ISARCEPHeader();
    String[] variableNames = line.split(",");
    a_FileOutputStream.write(line.getBytes());
    a_FileOutputStream.write(StreamTokenizer.TT_EOL);
    a_FileOutputStream.flush();/*from w  ww.  j  a  v a 2  s  .c  o m*/
    String oa;
    double[] a_SARExpectedRow;
    double[] a_CASObservedRow;
    double[] a_Residual;
    Iterator<String> iterator_String = a_SAROptimistaionConstraints.keySet().iterator();
    while (iterator_String.hasNext()) {
        oa = iterator_String.next();
        line = oa + ",";
        a_SARExpectedRow = a_SAROptimistaionConstraints.get(oa);
        a_CASObservedRow = a_CASOptimistaionConstraints.get(oa);
        a_Residual = new double[a_SARExpectedRow.length];
        for (int i = 0; i < a_SARExpectedRow.length; i++) {
            a_Residual[i] = a_SARExpectedRow[i] - a_CASObservedRow[i];
            if (i == a_SARExpectedRow.length - 1) {
                line += a_Residual[i];
            } else {
                line += a_Residual[i] + ",";
            }
        }
        a_FileOutputStream.write(line.getBytes());
        a_FileOutputStream.write(StreamTokenizer.TT_EOL);
        a_FileOutputStream.flush();
    }
}

From source file:com.sfs.whichdoctor.dao.AddressVerificationDAOImpl.java

/**
 * Creates the address verification bean.
 *
 * @param address the address//from w w w .  j a  va 2  s .  co m
 * @return the int
 * @throws WhichDoctorDaoException the which doctor dao exception
 */
public final int create(final AddressBean address) throws WhichDoctorDaoException {

    if (address == null) {
        throw new WhichDoctorDaoException("The address cannot be null");
    }
    if (address.getGUID() < 1) {
        throw new WhichDoctorDaoException("The address requires a valid GUID");
    }
    if (StringUtils.isBlank(address.getCountry())) {
        throw new WhichDoctorDaoException("A valid country is required");
    }
    if (StringUtils.isBlank(address.getAddressField(0))) {
        throw new WhichDoctorDaoException("An address requires at least one line");
    }

    // The message for any unprocessed records
    String processedMessage = "Address verification request superceeded by " + "a WhichDoctor change";

    // Load the parent person or organisation
    int personIdentifier = 0;
    String personName = "";
    String organisationName = "";

    boolean parentFound = false;

    try {
        PersonBean person = this.personDAO.loadGUID(address.getReferenceGUID());
        if (person != null) {
            personIdentifier = person.getPersonIdentifier();
            personName = person.getPreferredName() + " " + person.getLastName();
            parentFound = true;
        }
    } catch (WhichDoctorDaoException wde) {
        dataLogger.info("No person found for the address: " + wde.getMessage());
    }

    if (!parentFound) {
        try {
            OrganisationBean org = this.organisationDAO.loadGUID(address.getReferenceGUID());
            if (org != null) {
                organisationName = org.getName();
                parentFound = true;
            }
        } catch (WhichDoctorDaoException wde) {
            dataLogger.info("No organisation found for the address: " + wde.getMessage());
        }
    }

    if (!parentFound) {
        throw new WhichDoctorDaoException(
                "No valid person or organisation is " + "associated with this address");
    }

    /* Identify the pending and processed ProcessStatusId */
    int pendingProcessStatusId = 0;
    int processedProcessStatusId = 0;

    try {
        ObjectTypeBean object = this.getObjectTypeDAO().load("Address Verification Process Status", "",
                PENDING);
        pendingProcessStatusId = object.getObjectTypeId();
    } catch (SFSDaoException sfe) {
        dataLogger.error("Error loading address verification process status: " + sfe.getMessage());
        throw new WhichDoctorDaoException("A valid address verification process status is required");
    }

    try {
        ObjectTypeBean object = this.getObjectTypeDAO().load("Address Verification Process Status", "",
                PROCESSED);
        processedProcessStatusId = object.getObjectTypeId();
    } catch (SFSDaoException sfe) {
        dataLogger.error("Error loading address verification process status: " + sfe.getMessage());
        throw new WhichDoctorDaoException("A valid address verification process status is required");
    }

    int addressVerificationId = 0;
    int createCount = 0;

    // Create the new record
    Timestamp sqlTimeStamp = new Timestamp(Calendar.getInstance().getTimeInMillis());

    TreeMap<Integer, String> addressMap = new TreeMap<Integer, String>();
    addressMap.put(0, address.getAddressField(0));
    addressMap.put(1, address.getAddressField(1));
    addressMap.put(2, address.getAddressField(2));
    addressMap.put(3, address.getAddressField(3));
    addressMap.put(4, address.getAddressField(4));
    addressMap.put(5, address.getAddressField(5));

    // Remove the suburb and city values from the map
    addressMap.put(address.getAddressFieldCount() - 1, "");
    addressMap.put(address.getAddressFieldCount() - 2, "");

    try {
        createCount = this.getJdbcTemplateWriter().update(this.getSQL().getValue("addressVerification/create"),
                new Object[] { address.getGUID(), address.getReferenceGUID(), personIdentifier, personName,
                        organisationName, pendingProcessStatusId, sqlTimeStamp, addressMap.get(0),
                        addressMap.get(1), addressMap.get(2), addressMap.get(3), addressMap.get(4),
                        addressMap.get(5), address.getSuburb(), address.getCity(),
                        address.getStateAbbreviation(), address.getCountry(), address.getPostCode() });

    } catch (DataAccessException de) {
        dataLogger.error("Error creating address verification record: " + de.getMessage());
        throw new WhichDoctorDaoException(
                "Error creating address verification " + "record: " + de.getMessage());
    }

    if (createCount > 0) {
        // Find the maximum address verification id
        addressVerificationId = this.getJdbcTemplateReader()
                .queryForInt(this.getSQL().getValue("addressVerification/findMax"));

        // Set the processed flag to true for any pending address
        // verification records for this address guid
        this.getJdbcTemplateWriter().update(this.getSQL().getValue("addressVerification/updateProcess"),
                new Object[] { processedProcessStatusId, processedMessage, address.getGUID(),
                        addressVerificationId, pendingProcessStatusId });

        // Update the address verification status
        this.addressDAO.updateVerificationStatus("Pending verification", address.getGUID());
    }

    return addressVerificationId;
}

From source file:hydrograph.ui.dataviewer.filter.FilterHelper.java

/**
 * Checks if column is modifiable./*  www. ja  v  a  2s . c om*/
 * 
 * @param groupSelectionMap
 *            the group selection map
 * @param selectionList
 *            the selection list
 * @return true, if is column modifiable
 */
public boolean isColumnModifiable(TreeMap<Integer, List<List<Integer>>> groupSelectionMap,
        List<Integer> selectionList) {
    boolean retValue = false;
    for (int i = groupSelectionMap.lastKey(); i >= 0; i--) {
        retValue = true;
        List<List<Integer>> groups = new ArrayList<>(groupSelectionMap.get(i));
        for (List<Integer> grp : groups) {
            if (ListUtils.intersection(selectionList, grp).size() > 0) {
                retValue = false;
            }
        }

        if (retValue) {
            groupSelectionMap.get(i).add(selectionList);
            break;
        }
    }
    return retValue;
}

From source file:hydrograph.ui.dataviewer.filter.FilterHelper.java

/**
 * Rearrange groups./*from   w w w  .  j  av  a 2s . c o m*/
 * 
 * @param groupSelectionMap
 *            the group selection map
 * @param selectionList
 *            the selection list
 */
public void rearrangeGroups(TreeMap<Integer, List<List<Integer>>> groupSelectionMap,
        List<Integer> selectionList) {
    List<Integer> tempList = new ArrayList<>();
    int lastKey = groupSelectionMap.lastKey();
    for (int i = lastKey; i >= 0; i--) {
        List<List<Integer>> groups = groupSelectionMap.get(i);
        for (int j = 0; j <= groups.size() - 1; j++) {
            if (selectionList.size() < groups.get(j).size()
                    && ListUtils.intersection(selectionList, groups.get(j)).size() > 0) {
                tempList.addAll(groups.get(j));
                groups.get(j).clear();
                groups.set(j, new ArrayList<Integer>(selectionList));
                selectionList.clear();
                selectionList.addAll(tempList);
            }
            tempList.clear();
        }
    }
}

From source file:hydrograph.ui.dataviewer.filter.FilterHelper.java

/**
 * Rearrange groups after delete row.// www .j ava2  s . com
 * 
 * @param groupSelectionMap
 *            the group selection map
 * @param selectionList
 *            the selection list
 * @return true, if successful
 */
public boolean rearrangeGroupsAfterDeleteRow(TreeMap<Integer, List<List<Integer>>> groupSelectionMap,
        List<Integer> selectionList) {
    boolean retValue = false;
    int lastKey = groupSelectionMap.lastKey();
    int count = 0;
    for (int i = lastKey; i >= 0; i--) {
        List<List<Integer>> groups = groupSelectionMap.get(i);
        for (int j = 0; j <= groups.size() - 1; j++) {
            if (selectionList.size() == groups.get(j).size()
                    && ListUtils.isEqualList(selectionList, groups.get(j))) {
                count++;
                if (count >= 2) {
                    retValue = true;
                }
            }
        }
    }
    return retValue;
}

From source file:org.cloudfoundry.client.lib.CloudFoundryClient.java

public Map<String, String> getCrashLogs(String appName) throws CloudFoundryException {
    String filePath = "";// TODO - where am I supposed to get this value?
    int index = 0;// TODO - where am I supposed to get this value?
    String urlPath = getFileUrlPath(index, filePath);
    CrashesInfo crashes = getCrashes(appName);
    if (crashes.getCrashes().isEmpty()) {
        return Collections.emptyMap();
    }//from  www  . ja  v  a 2  s.c o  m
    TreeMap<Date, String> crashInstances = new TreeMap<Date, String>();
    for (CrashInfo crash : crashes.getCrashes()) {
        crashInstances.put(crash.getSince(), crash.getInstance());
    }
    String instance = crashInstances.get(crashInstances.lastKey());
    return doGetLogs(urlPath, appName, instance);
}

From source file:com.att.aro.core.packetanalysis.impl.VideoUsageAnalysisImpl.java

private void validateManifests() {
    if (videoUsage != null) {
        TreeMap<Double, AROManifest> manifestMap = videoUsage.getAroManifestMap();
        Set<Double> manifestKeySet = manifestMap.keySet();
        for (Double key : manifestKeySet) {
            AROManifest manifest = manifestMap.get(key);
            manifest.setValid(true);//w w w .j a v  a  2 s  .com
            if (manifest.getVideoEventList().isEmpty()) {
                manifest.setValid(false);
            }
            for (VideoEvent event : manifest.getVideoEventList().values()) {
                if (event.getSegment() < 0) {
                    manifest.setValid(false);
                }
            }
        }
    }
}

From source file:de.uzk.hki.da.sb.SIPFactory.java

private Feedback checkMetadataForLicense(int jobId, File sourceFolder, File packageFolder) {
    boolean premisLicenseBool = contractRights.getCclincense() != null;
    boolean metsLicenseBool = false;
    boolean lidoLicenseBool = false;
    boolean publicationBool = contractRights.getPublicRights().getAllowPublication();

    TreeMap<File, String> metadataFileWithType;
    try {//from   w w w .j av  a 2 s.  c o m
        metadataFileWithType = new FormatDetectionService(sourceFolder).getMetadataFileWithType();

        if (metadataFileWithType.containsValue(C.CB_PACKAGETYPE_METS)) {
            ArrayList<File> metsFiles = new ArrayList<File>();

            ArrayList<MetsLicense> licenseMetsFile = new ArrayList<MetsLicense>();
            for (File f : metadataFileWithType.keySet())
                if (metadataFileWithType.get(f).equals(C.CB_PACKAGETYPE_METS))
                    metsFiles.add(f);
            for (File f : metsFiles) {// assuming more as usual mets is allowed (check is done by FormatDetectionService) e.g. publicMets for testcase-creation
                SAXBuilder builder = XMLUtils.createNonvalidatingSaxBuilder();
                Document metsDoc = builder.build(f);
                MetsParser mp = new MetsParser(metsDoc);
                licenseMetsFile.add(mp.getLicenseForWholeMets());
            }
            Collections.sort(licenseMetsFile, new NullLastComparator<MetsLicense>());
            if (licenseMetsFile.get(0) == null) // all licenses are null
                metsLicenseBool = false;
            else if (!licenseMetsFile.get(0).equals(licenseMetsFile.get(licenseMetsFile.size() - 1))) // first and last lic have to be same in sorted array
                return Feedback.INVALID_LICENSE_DATA_IN_METADATA;
            else
                metsLicenseBool = true;
        } else if (metadataFileWithType.containsValue(C.CB_PACKAGETYPE_LIDO)) {
            ArrayList<File> lidoFiles = new ArrayList<File>();

            ArrayList<LidoLicense> licenseLidoFile = new ArrayList<LidoLicense>();
            for (File f : metadataFileWithType.keySet())
                if (metadataFileWithType.get(f).equals(C.CB_PACKAGETYPE_LIDO))
                    lidoFiles.add(f);
            for (File f : lidoFiles) {// assuming more as one metadata is allowed (check is done by FormatDetectionService) 
                SAXBuilder builder = XMLUtils.createNonvalidatingSaxBuilder();
                Document metsDoc = builder.build(f);
                LidoParser lp = new LidoParser(metsDoc);
                licenseLidoFile.add(lp.getLicenseForWholeLido());
            }
            Collections.sort(licenseLidoFile, new NullLastComparator<LidoLicense>());
            if (licenseLidoFile.get(0) == null) // all licenses are null
                lidoLicenseBool = false;
            else if (!licenseLidoFile.get(0).equals(licenseLidoFile.get(licenseLidoFile.size() - 1))) // first and last lic have to be same in sorted array
                return Feedback.INVALID_LICENSE_DATA_IN_METADATA;
            else
                lidoLicenseBool = true;
        }
    } catch (Exception e) {
        e.printStackTrace();
        return Feedback.INVALID_LICENSE_DATA_IN_METADATA;
    }
    //activate to be able to create non licensed test sips
    //publicationBool=false;
    //premisLicenseBool=false;
    //publicationBool=false;
    if (premisLicenseBool && (metsLicenseBool || lidoLicenseBool)) {
        return Feedback.DUPLICATE_LICENSE_DATA;
    }

    if (publicationBool && !premisLicenseBool && !metsLicenseBool && !lidoLicenseBool) {
        return Feedback.PUBLICATION_NO_LICENSE;
    }

    logger.info(
            "License is satisfiable: Premis-License:" + premisLicenseBool + " Mets-License:" + metsLicenseBool
                    + " Lido-License:" + lidoLicenseBool + " Publication-Decision:" + publicationBool);

    return Feedback.SUCCESS;
}

From source file:hydrograph.ui.dataviewer.filter.FilterConditionsDialog.java

private CellLabelProvider getDummyColumn(final TableViewer tableViewer, final List<Condition> conditionsList,
        final Integer columnIndex, final TreeMap<Integer, List<List<Integer>>> groupSelectionMap) {
    return new CellLabelProvider() {

        @Override//from   w w  w .j a  v a 2  s. c o  m
        public void update(ViewerCell cell) {
            final TableItem item = (TableItem) cell.getItem();
            List<List<Integer>> checkedGrpRowIndices = groupSelectionMap.get(columnIndex);
            if (cell.getColumnIndex() == (columnIndex + 3)) {
                if (null != checkedGrpRowIndices && !checkedGrpRowIndices.isEmpty()) {
                    List tempList = new ArrayList();
                    for (List<Integer> checkedIndex : checkedGrpRowIndices) {
                        tempList.addAll(checkedIndex);
                    }
                    int indexOf = tableViewer.getTable().indexOf(item);
                    if (tempList.contains(indexOf)) {
                        for (int i = 0; i < checkedGrpRowIndices.size(); i++) {
                            if ((checkedGrpRowIndices.get(i)).contains(indexOf)) {
                                cell.setBackground(FilterHelper.INSTANCE.getColor(i));
                                break;
                            }
                        }
                    } else {
                        cell.setBackground(CustomColorRegistry.INSTANCE.getColorFromRegistry(255, 255, 255));
                    }
                }
            }
        }
    };
}

From source file:org.commoncrawl.service.listcrawler.CrawlHistoryManager.java

private static void syncAndValidateItems(TreeMap<URLFP, ProxyCrawlHistoryItem> items,
        CrawlHistoryManager logManager) throws IOException {
    // ok now sync the list
    final TreeMap<URLFP, ProxyCrawlHistoryItem> syncedItemList = new TreeMap<URLFP, ProxyCrawlHistoryItem>();

    try {//from  w  ww.  j a  v a 2 s  .c om
        logManager.syncList(0L, Sets.newTreeSet(items.keySet()), new ItemUpdater() {

            @Override
            public void updateItemState(URLFP fingerprint, ProxyCrawlHistoryItem item) throws IOException {
                try {
                    syncedItemList.put((URLFP) fingerprint.clone(), (ProxyCrawlHistoryItem) item.clone());
                } catch (CloneNotSupportedException e) {
                    e.printStackTrace();
                }
            }

        });
    } catch (IOException e) {
        LOG.error(CCStringUtils.stringifyException(e));
        Assert.assertTrue(false);
    }

    // assert that the key set is equal
    Assert.assertEquals(items.keySet(), syncedItemList.keySet());
    // ok now validate that the values are equal
    for (Map.Entry<URLFP, ProxyCrawlHistoryItem> item : items.entrySet()) {
        ProxyCrawlHistoryItem other = syncedItemList.get(item.getKey());
        Assert.assertEquals(item.getValue(), other);
    }
}