Example usage for java.util TreeMap get

List of usage examples for java.util TreeMap get

Introduction

In this page you can find the example usage for java.util TreeMap get.

Prototype

public V get(Object key) 

Source Link

Document

Returns the value to which the specified key is mapped, or null if this map contains no mapping for the key.

Usage

From source file:com.act.lcms.db.analysis.WaveformAnalysis.java

/**
 * This function takes in a standard molecules's intensity vs time data and a collection of negative controls data
 * and plots the SNR value at each time period, assuming the time jitter effects are negligible (more info on this
 * is here: https://github.com/20n/act/issues/136). Based on the snr values, it rank orders the metlin ions of the
 * molecule.// w w w .  j a v a  2  s  .c  o  m
 * @param ionToIntensityData A map of chemical to intensity/time data
 * @param standardChemical The chemical that is the standard of analysis
 * @return A sorted linked hash map of Metlin ion to (intensity, time) pairs from highest intensity to lowest
 */
public static LinkedHashMap<String, XZ> performSNRAnalysisAndReturnMetlinIonsRankOrderedBySNR(
        ChemicalToMapOfMetlinIonsToIntensityTimeValues ionToIntensityData, String standardChemical,
        Map<String, List<Double>> restrictedTimeWindows) {

    TreeMap<Double, List<String>> sortedIntensityToIon = new TreeMap<>(Collections.reverseOrder());
    Map<String, XZ> ionToSNR = new HashMap<>();

    for (String ion : ionToIntensityData.getMetlinIonsOfChemical(standardChemical).keySet()) {

        // We first compress the ion spectra by 5 seconds (this number was gotten from trial and error on labelled
        // spectra). Then, we do feature detection of peaks in the compressed data.
        List<XZ> standardIntensityTime = detectPeaksInIntensityTimeWaveform(
                compressIntensityAndTimeGraphsAndFindMaxIntensityInEveryTimeWindow(
                        ionToIntensityData.getMetlinIonsOfChemical(standardChemical).get(ion),
                        COMPRESSION_CONSTANT).getLeft(),
                PEAK_DETECTION_THRESHOLD);

        List<List<XZ>> negativeIntensityTimes = new ArrayList<>();
        for (String chemical : ionToIntensityData.getIonList()) {
            if (!chemical.equals(standardChemical)) {
                negativeIntensityTimes.add(compressIntensityAndTimeGraphsAndFindMaxIntensityInEveryTimeWindow(
                        ionToIntensityData.getMetlinIonsOfChemical(chemical).get(ion), COMPRESSION_CONSTANT)
                                .getLeft());
            }
        }

        List<XZ> rmsOfNegativeValues = rmsOfIntensityTimeGraphs(negativeIntensityTimes);

        List<Double> listOfTimeWindows = new ArrayList<>();
        if (restrictedTimeWindows != null && restrictedTimeWindows.get(ion) != null) {
            listOfTimeWindows.addAll(restrictedTimeWindows.get(ion));
        }

        Boolean canUpdateMaxSNRAndTime = true;
        Boolean useRestrictedTimeWindowAnalysis = false;

        // If there are restricted time windows, set the default to not update SNR until certain conditions are met.
        if (listOfTimeWindows.size() > 0) {
            useRestrictedTimeWindowAnalysis = true;
            canUpdateMaxSNRAndTime = false;
        }

        Double maxSNR = 0.0;
        Double maxTime = 0.0;

        // For each of the peaks detected in the positive control, find the spectral intensity values from the negative
        // controls and calculate SNR based on that.
        for (XZ positivePosition : standardIntensityTime) {

            Double time = positivePosition.getTime();

            XZ negativeControlPosition = null;
            for (XZ position : rmsOfNegativeValues) {
                if (position.getTime() > time - POSITION_TIME_WINDOW_IN_SECONDS
                        && position.getTime() < time + POSITION_TIME_WINDOW_IN_SECONDS) {
                    negativeControlPosition = position;
                    break;
                }
            }

            Double snr = Math.pow(positivePosition.getIntensity() / negativeControlPosition.getIntensity(), 2);

            // If the given time point overlaps with one of the restricted time windows, we can update the snr calculations.
            for (Double restrictedTimeWindow : listOfTimeWindows) {
                if ((time > restrictedTimeWindow - RESTRICTED_RETENTION_TIME_WINDOW_IN_SECONDS)
                        && (time < restrictedTimeWindow + RESTRICTED_RETENTION_TIME_WINDOW_IN_SECONDS)) {
                    canUpdateMaxSNRAndTime = true;
                    break;
                }
            }

            if (canUpdateMaxSNRAndTime) {
                maxSNR = Math.max(maxSNR, snr);
                maxTime = Math.max(maxTime, time);
            }

            if (useRestrictedTimeWindowAnalysis) {
                canUpdateMaxSNRAndTime = false;
            }
        }

        ionToSNR.put(ion, new XZ(maxTime, maxSNR));

        List<String> ionValues = sortedIntensityToIon.get(maxSNR);
        if (ionValues == null) {
            ionValues = new ArrayList<>();
            sortedIntensityToIon.put(maxSNR, ionValues);
        }

        ionValues.add(ion);
    }

    LinkedHashMap<String, XZ> result = new LinkedHashMap<>(sortedIntensityToIon.size());
    for (Map.Entry<Double, List<String>> entry : sortedIntensityToIon.entrySet()) {
        List<String> ions = entry.getValue();
        for (String ion : ions) {
            result.put(ion, ionToSNR.get(ion));
        }
    }

    return result;
}

From source file:com.google.gwt.emultest.java.util.TreeMapTest.java

public void testGet_ComparableKey() {
    TreeMap<String, Object> map = new TreeMap<String, Object>();
    ConflictingKey conflictingKey = new ConflictingKey("conflictingKey");
    assertNull(map.get(conflictingKey));
    map.put("something", "value");
    assertNull(map.get(conflictingKey));
}

From source file:com.repeatability.pdf.PDFTextStripper.java

/**
 * This will process a TextPosition object and add the text to the list of characters on a page. It takes care of
 * overlapping text.// www. jav a  2s .  co m
 *
 * @param text The text to process.
 */
@Override
protected void processTextPosition(TextPosition text) {
    boolean showCharacter = true;
    if (suppressDuplicateOverlappingText) {
        showCharacter = false;
        String textCharacter = text.getUnicode();
        float textX = text.getX();
        float textY = text.getY();
        TreeMap<Float, TreeSet<Float>> sameTextCharacters = characterListMapping.get(textCharacter);
        if (sameTextCharacters == null) {
            sameTextCharacters = new TreeMap<Float, TreeSet<Float>>();
            characterListMapping.put(textCharacter, sameTextCharacters);
        }
        // RDD - Here we compute the value that represents the end of the rendered
        // text. This value is used to determine whether subsequent text rendered
        // on the same line overwrites the current text.
        //
        // We subtract any positive padding to handle cases where extreme amounts
        // of padding are applied, then backed off (not sure why this is done, but there
        // are cases where the padding is on the order of 10x the character width, and
        // the TJ just backs up to compensate after each character). Also, we subtract
        // an amount to allow for kerning (a percentage of the width of the last
        // character).
        boolean suppressCharacter = false;
        float tolerance = text.getWidth() / textCharacter.length() / 3.0f;

        SortedMap<Float, TreeSet<Float>> xMatches = sameTextCharacters.subMap(textX - tolerance,
                textX + tolerance);
        for (TreeSet<Float> xMatch : xMatches.values()) {
            SortedSet<Float> yMatches = xMatch.subSet(textY - tolerance, textY + tolerance);
            if (!yMatches.isEmpty()) {
                suppressCharacter = true;
                break;
            }
        }
        if (!suppressCharacter) {
            TreeSet<Float> ySet = sameTextCharacters.get(textX);
            if (ySet == null) {
                ySet = new TreeSet<Float>();
                sameTextCharacters.put(textX, ySet);
            }
            ySet.add(textY);
            showCharacter = true;
        }
    }
    if (showCharacter) {
        // if we are showing the character then we need to determine which article it belongs to
        int foundArticleDivisionIndex = -1;
        int notFoundButFirstLeftAndAboveArticleDivisionIndex = -1;
        int notFoundButFirstLeftArticleDivisionIndex = -1;
        int notFoundButFirstAboveArticleDivisionIndex = -1;
        float x = text.getX();
        float y = text.getY();
        if (shouldSeparateByBeads) {
            for (int i = 0; i < beadRectangles.size() && foundArticleDivisionIndex == -1; i++) {
                PDRectangle rect = beadRectangles.get(i);
                if (rect != null) {
                    if (rect.contains(x, y)) {
                        foundArticleDivisionIndex = i * 2 + 1;
                    } else if ((x < rect.getLowerLeftX() || y < rect.getUpperRightY())
                            && notFoundButFirstLeftAndAboveArticleDivisionIndex == -1) {
                        notFoundButFirstLeftAndAboveArticleDivisionIndex = i * 2;
                    } else if (x < rect.getLowerLeftX() && notFoundButFirstLeftArticleDivisionIndex == -1) {
                        notFoundButFirstLeftArticleDivisionIndex = i * 2;
                    } else if (y < rect.getUpperRightY() && notFoundButFirstAboveArticleDivisionIndex == -1) {
                        notFoundButFirstAboveArticleDivisionIndex = i * 2;
                    }
                } else {
                    foundArticleDivisionIndex = 0;
                }
            }
        } else {
            foundArticleDivisionIndex = 0;
        }
        int articleDivisionIndex;
        if (foundArticleDivisionIndex != -1) {
            articleDivisionIndex = foundArticleDivisionIndex;
        } else if (notFoundButFirstLeftAndAboveArticleDivisionIndex != -1) {
            articleDivisionIndex = notFoundButFirstLeftAndAboveArticleDivisionIndex;
        } else if (notFoundButFirstLeftArticleDivisionIndex != -1) {
            articleDivisionIndex = notFoundButFirstLeftArticleDivisionIndex;
        } else if (notFoundButFirstAboveArticleDivisionIndex != -1) {
            articleDivisionIndex = notFoundButFirstAboveArticleDivisionIndex;
        } else {
            articleDivisionIndex = charactersByArticle.size() - 1;
        }

        List<TextPosition> textList = charactersByArticle.get(articleDivisionIndex);

        // In the wild, some PDF encoded documents put diacritics (accents on
        // top of characters) into a separate Tj element. When displaying them
        // graphically, the two chunks get overlayed. With text output though,
        // we need to do the overlay. This code recombines the diacritic with
        // its associated character if the two are consecutive.
        if (textList.isEmpty()) {
            textList.add(text);
        } else {
            // test if we overlap the previous entry.
            // Note that we are making an assumption that we need to only look back
            // one TextPosition to find what we are overlapping.
            // This may not always be true. */
            TextPosition previousTextPosition = textList.get(textList.size() - 1);
            if (text.isDiacritic() && previousTextPosition.contains(text)) {
                previousTextPosition.mergeDiacritic(text);
            }
            // If the previous TextPosition was the diacritic, merge it into this
            // one and remove it from the list.
            else if (previousTextPosition.isDiacritic() && text.contains(previousTextPosition)) {
                text.mergeDiacritic(previousTextPosition);
                textList.remove(textList.size() - 1);
                textList.add(text);
            } else {
                textList.add(text);
            }
        }
    }
}

From source file:org.apache.hadoop.yarn.server.timeline.RollingLevelDBTimelineStore.java

/**
 * Put a single entity. If there is an error, add a TimelinePutError to the
 * given response./*from   w  w  w. ja  va2s . co  m*/
 *
 * @param entityUpdates
 *          a map containing all the scheduled writes for this put to the
 *          entity db
 * @param indexUpdates
 *          a map containing all the scheduled writes for this put to the
 *          index db
 */
private long putEntities(TreeMap<Long, RollingWriteBatch> entityUpdates,
        TreeMap<Long, RollingWriteBatch> indexUpdates, TimelineEntity entity, TimelinePutResponse response) {

    long putCount = 0;
    List<EntityIdentifier> relatedEntitiesWithoutStartTimes = new ArrayList<EntityIdentifier>();
    byte[] revStartTime = null;
    Map<String, Set<Object>> primaryFilters = null;
    try {
        List<TimelineEvent> events = entity.getEvents();
        // look up the start time for the entity
        Long startTime = getAndSetStartTime(entity.getEntityId(), entity.getEntityType(), entity.getStartTime(),
                events);
        if (startTime == null) {
            // if no start time is found, add an error and return
            TimelinePutError error = new TimelinePutError();
            error.setEntityId(entity.getEntityId());
            error.setEntityType(entity.getEntityType());
            error.setErrorCode(TimelinePutError.NO_START_TIME);
            response.addError(error);
            return putCount;
        }

        // Must have a domain
        if (StringUtils.isEmpty(entity.getDomainId())) {
            TimelinePutError error = new TimelinePutError();
            error.setEntityId(entity.getEntityId());
            error.setEntityType(entity.getEntityType());
            error.setErrorCode(TimelinePutError.NO_DOMAIN);
            response.addError(error);
            return putCount;
        }

        revStartTime = writeReverseOrderedLong(startTime);
        long roundedStartTime = entitydb.computeCurrentCheckMillis(startTime);
        RollingWriteBatch rollingWriteBatch = entityUpdates.get(roundedStartTime);
        if (rollingWriteBatch == null) {
            DB db = entitydb.getDBForStartTime(startTime);
            if (db != null) {
                WriteBatch writeBatch = db.createWriteBatch();
                rollingWriteBatch = new RollingWriteBatch(db, writeBatch);
                entityUpdates.put(roundedStartTime, rollingWriteBatch);
            }
        }
        if (rollingWriteBatch == null) {
            // if no start time is found, add an error and return
            TimelinePutError error = new TimelinePutError();
            error.setEntityId(entity.getEntityId());
            error.setEntityType(entity.getEntityType());
            error.setErrorCode(TimelinePutError.EXPIRED_ENTITY);
            response.addError(error);
            return putCount;
        }
        WriteBatch writeBatch = rollingWriteBatch.getWriteBatch();

        // Save off the getBytes conversion to avoid unnecessary cost
        byte[] entityIdBytes = entity.getEntityId().getBytes(UTF_8);
        byte[] entityTypeBytes = entity.getEntityType().getBytes(UTF_8);
        byte[] domainIdBytes = entity.getDomainId().getBytes(UTF_8);

        // write entity marker
        byte[] markerKey = KeyBuilder.newInstance(3).add(entityTypeBytes, true).add(revStartTime)
                .add(entityIdBytes, true).getBytesForLookup();
        writeBatch.put(markerKey, EMPTY_BYTES);
        ++putCount;

        // write domain id entry
        byte[] domainkey = KeyBuilder.newInstance(4).add(entityTypeBytes, true).add(revStartTime)
                .add(entityIdBytes, true).add(DOMAIN_ID_COLUMN).getBytes();
        writeBatch.put(domainkey, domainIdBytes);
        ++putCount;

        // write event entries
        if (events != null) {
            for (TimelineEvent event : events) {
                byte[] revts = writeReverseOrderedLong(event.getTimestamp());
                byte[] key = KeyBuilder.newInstance().add(entityTypeBytes, true).add(revStartTime)
                        .add(entityIdBytes, true).add(EVENTS_COLUMN).add(revts)
                        .add(event.getEventType().getBytes(UTF_8)).getBytes();
                byte[] value = fstConf.asByteArray(event.getEventInfo());
                writeBatch.put(key, value);
                ++putCount;
            }
        }

        // write primary filter entries
        primaryFilters = entity.getPrimaryFilters();
        if (primaryFilters != null) {
            for (Entry<String, Set<Object>> primaryFilter : primaryFilters.entrySet()) {
                for (Object primaryFilterValue : primaryFilter.getValue()) {
                    byte[] key = KeyBuilder.newInstance(6).add(entityTypeBytes, true).add(revStartTime)
                            .add(entityIdBytes, true).add(PRIMARY_FILTERS_COLUMN).add(primaryFilter.getKey())
                            .add(fstConf.asByteArray(primaryFilterValue)).getBytes();
                    writeBatch.put(key, EMPTY_BYTES);
                    ++putCount;
                }
            }
        }

        // write other info entries
        Map<String, Object> otherInfo = entity.getOtherInfo();
        if (otherInfo != null) {
            for (Entry<String, Object> info : otherInfo.entrySet()) {
                byte[] key = KeyBuilder.newInstance(5).add(entityTypeBytes, true).add(revStartTime)
                        .add(entityIdBytes, true).add(OTHER_INFO_COLUMN).add(info.getKey()).getBytes();
                byte[] value = fstConf.asByteArray(info.getValue());
                writeBatch.put(key, value);
                ++putCount;
            }
        }

        // write related entity entries
        Map<String, Set<String>> relatedEntities = entity.getRelatedEntities();
        if (relatedEntities != null) {
            for (Entry<String, Set<String>> relatedEntityList : relatedEntities.entrySet()) {
                String relatedEntityType = relatedEntityList.getKey();
                for (String relatedEntityId : relatedEntityList.getValue()) {
                    // look up start time of related entity
                    Long relatedStartTimeLong = getStartTimeLong(relatedEntityId, relatedEntityType);
                    // delay writing the related entity if no start time is found
                    if (relatedStartTimeLong == null) {
                        relatedEntitiesWithoutStartTimes
                                .add(new EntityIdentifier(relatedEntityId, relatedEntityType));
                        continue;
                    }

                    byte[] relatedEntityStartTime = writeReverseOrderedLong(relatedStartTimeLong);
                    long relatedRoundedStartTime = entitydb.computeCurrentCheckMillis(relatedStartTimeLong);
                    RollingWriteBatch relatedRollingWriteBatch = entityUpdates.get(relatedRoundedStartTime);
                    if (relatedRollingWriteBatch == null) {
                        DB db = entitydb.getDBForStartTime(relatedStartTimeLong);
                        if (db != null) {
                            WriteBatch relatedWriteBatch = db.createWriteBatch();
                            relatedRollingWriteBatch = new RollingWriteBatch(db, relatedWriteBatch);
                            entityUpdates.put(relatedRoundedStartTime, relatedRollingWriteBatch);
                        }
                    }
                    if (relatedRollingWriteBatch == null) {
                        // if no start time is found, add an error and return
                        TimelinePutError error = new TimelinePutError();
                        error.setEntityId(entity.getEntityId());
                        error.setEntityType(entity.getEntityType());
                        error.setErrorCode(TimelinePutError.EXPIRED_ENTITY);
                        response.addError(error);
                        continue;
                    }
                    // This is the existing entity
                    byte[] relatedDomainIdBytes = relatedRollingWriteBatch.getDB()
                            .get(createDomainIdKey(relatedEntityId, relatedEntityType, relatedEntityStartTime));
                    // The timeline data created by the server before 2.6 won't have
                    // the domain field. We assume this timeline data is in the
                    // default timeline domain.
                    String domainId = null;
                    if (relatedDomainIdBytes == null) {
                        domainId = TimelineDataManager.DEFAULT_DOMAIN_ID;
                    } else {
                        domainId = new String(relatedDomainIdBytes, UTF_8);
                    }
                    if (!domainId.equals(entity.getDomainId())) {
                        // in this case the entity will be put, but the relation will be
                        // ignored
                        TimelinePutError error = new TimelinePutError();
                        error.setEntityId(entity.getEntityId());
                        error.setEntityType(entity.getEntityType());
                        error.setErrorCode(TimelinePutError.FORBIDDEN_RELATION);
                        response.addError(error);
                        continue;
                    }
                    // write "forward" entry (related entity -> entity)
                    byte[] key = createRelatedEntityKey(relatedEntityId, relatedEntityType,
                            relatedEntityStartTime, entity.getEntityId(), entity.getEntityType());
                    WriteBatch relatedWriteBatch = relatedRollingWriteBatch.getWriteBatch();
                    relatedWriteBatch.put(key, EMPTY_BYTES);
                    ++putCount;
                }
            }
        }

        // write index entities
        RollingWriteBatch indexRollingWriteBatch = indexUpdates.get(roundedStartTime);
        if (indexRollingWriteBatch == null) {
            DB db = indexdb.getDBForStartTime(startTime);
            if (db != null) {
                WriteBatch indexWriteBatch = db.createWriteBatch();
                indexRollingWriteBatch = new RollingWriteBatch(db, indexWriteBatch);
                indexUpdates.put(roundedStartTime, indexRollingWriteBatch);
            }
        }
        if (indexRollingWriteBatch == null) {
            // if no start time is found, add an error and return
            TimelinePutError error = new TimelinePutError();
            error.setEntityId(entity.getEntityId());
            error.setEntityType(entity.getEntityType());
            error.setErrorCode(TimelinePutError.EXPIRED_ENTITY);
            response.addError(error);
            return putCount;
        }
        WriteBatch indexWriteBatch = indexRollingWriteBatch.getWriteBatch();
        putCount += writePrimaryFilterEntries(indexWriteBatch, primaryFilters, markerKey, EMPTY_BYTES);
    } catch (IOException e) {
        LOG.error("Error putting entity " + entity.getEntityId() + " of type " + entity.getEntityType(), e);
        TimelinePutError error = new TimelinePutError();
        error.setEntityId(entity.getEntityId());
        error.setEntityType(entity.getEntityType());
        error.setErrorCode(TimelinePutError.IO_EXCEPTION);
        response.addError(error);
    }

    for (EntityIdentifier relatedEntity : relatedEntitiesWithoutStartTimes) {
        try {
            Long relatedEntityStartAndInsertTime = getAndSetStartTime(relatedEntity.getId(),
                    relatedEntity.getType(), readReverseOrderedLong(revStartTime, 0), null);
            if (relatedEntityStartAndInsertTime == null) {
                throw new IOException("Error setting start time for related entity");
            }
            long relatedStartTimeLong = relatedEntityStartAndInsertTime;
            long relatedRoundedStartTime = entitydb.computeCurrentCheckMillis(relatedStartTimeLong);
            RollingWriteBatch relatedRollingWriteBatch = entityUpdates.get(relatedRoundedStartTime);
            if (relatedRollingWriteBatch == null) {
                DB db = entitydb.getDBForStartTime(relatedStartTimeLong);
                if (db != null) {
                    WriteBatch relatedWriteBatch = db.createWriteBatch();
                    relatedRollingWriteBatch = new RollingWriteBatch(db, relatedWriteBatch);
                    entityUpdates.put(relatedRoundedStartTime, relatedRollingWriteBatch);
                }
            }
            if (relatedRollingWriteBatch == null) {
                // if no start time is found, add an error and return
                TimelinePutError error = new TimelinePutError();
                error.setEntityId(entity.getEntityId());
                error.setEntityType(entity.getEntityType());
                error.setErrorCode(TimelinePutError.EXPIRED_ENTITY);
                response.addError(error);
                continue;
            }
            WriteBatch relatedWriteBatch = relatedRollingWriteBatch.getWriteBatch();
            byte[] relatedEntityStartTime = writeReverseOrderedLong(relatedEntityStartAndInsertTime);
            // This is the new entity, the domain should be the same
            byte[] key = createDomainIdKey(relatedEntity.getId(), relatedEntity.getType(),
                    relatedEntityStartTime);
            relatedWriteBatch.put(key, entity.getDomainId().getBytes(UTF_8));
            ++putCount;
            relatedWriteBatch.put(createRelatedEntityKey(relatedEntity.getId(), relatedEntity.getType(),
                    relatedEntityStartTime, entity.getEntityId(), entity.getEntityType()), EMPTY_BYTES);
            ++putCount;
            relatedWriteBatch.put(createEntityMarkerKey(relatedEntity.getId(), relatedEntity.getType(),
                    relatedEntityStartTime), EMPTY_BYTES);
            ++putCount;
        } catch (IOException e) {
            LOG.error("Error putting related entity " + relatedEntity.getId() + " of type "
                    + relatedEntity.getType() + " for entity " + entity.getEntityId() + " of type "
                    + entity.getEntityType(), e);
            TimelinePutError error = new TimelinePutError();
            error.setEntityId(entity.getEntityId());
            error.setEntityType(entity.getEntityType());
            error.setErrorCode(TimelinePutError.IO_EXCEPTION);
            response.addError(error);
        }
    }

    return putCount;
}

From source file:com.enonic.vertical.adminweb.handlers.ContentBaseHandlerServlet.java

private void handlerPreviewSiteList(HttpServletRequest request, HttpServletResponse response,
        AdminService admin, ExtendedMap formItems, User user)
        throws VerticalAdminException, VerticalEngineException {
    Map<String, Object> parameters = new HashMap<String, Object>();
    parameters.put("page", formItems.get("page"));
    int unitKey = formItems.getInt("selectedunitkey", -1);
    int siteKey = formItems.getInt("menukey", -1);

    int contentKey = formItems.getInt("contentkey", -1);
    int contentTypeKey;
    if (contentKey >= 0) {
        parameters.put("contentkey", contentKey);
        contentTypeKey = admin.getContentTypeKey(contentKey);
        parameters.put("sessiondata", formItems.getBoolean("sessiondata", false));
    } else {/*  w w  w .  jav a  2s  .com*/
        contentTypeKey = formItems.getInt("contenttypekey", -1);
    }
    parameters.put("contenttypekey", contentTypeKey);

    int versionKey = formItems.getInt("versionkey", -1);
    if (versionKey != -1) {
        parameters.put("versionkey", versionKey);
    }

    Document doc = XMLTool.domparse(admin.getAdminMenu(user, -1));
    Element rootSitesElement = doc.getDocumentElement();
    Element[] allSiteElements = XMLTool.getElements(rootSitesElement);
    int defaultPageTemplateKey = -1;
    if (allSiteElements.length > 0) {
        TreeMap<String, Element> allSitesMap = new TreeMap<String, Element>();
        for (Element siteElement : allSiteElements) {
            int mKey = Integer.valueOf(siteElement.getAttribute("key"));
            if (admin.hasContentPageTemplates(mKey, contentTypeKey)) {
                String name = siteElement.getAttribute("name");
                allSitesMap.put(name, siteElement);
            }
            rootSitesElement.removeChild(siteElement);
        }

        if (allSitesMap.size() > 0) {
            Element firstMenuElem = allSitesMap.get(allSitesMap.firstKey());
            if (siteKey < 0) {
                siteKey = Integer.valueOf(firstMenuElem.getAttribute("key"));
            }

            for (Element siteElement : allSitesMap.values()) {
                rootSitesElement.appendChild(siteElement);
                int key = Integer.parseInt(siteElement.getAttribute("key"));
                if (key == siteKey) {
                    String defaultPageTemplateAttr = siteElement.getAttribute("defaultpagetemplate");
                    if (defaultPageTemplateAttr != null && !defaultPageTemplateAttr.equals("")) {
                        defaultPageTemplateKey = Integer.parseInt(defaultPageTemplateAttr);
                    }

                }
            }
        }
    }

    addCommonParameters(admin, user, request, parameters, unitKey, siteKey);

    if (siteKey >= 0) {
        int[] excludeTypeKeys = { 1, 2, 3, 4, 6 };
        String pageTemplateXML = admin.getPageTemplatesByMenu(siteKey, excludeTypeKeys);
        Document ptDoc = XMLTool.domparse(pageTemplateXML);
        XMLTool.mergeDocuments(doc, ptDoc, true);

        if (contentKey >= 0) {
            Document chDoc = XMLTool.domparse(admin.getContentHomes(contentKey));
            XMLTool.mergeDocuments(doc, chDoc, true);
        }

        if (formItems.containsKey("pagetemplatekey")) {
            int pageTemplateKey = formItems.getInt("pagetemplatekey");
            parameters.put("pagetemplatekey", String.valueOf(pageTemplateKey));
        } else {
            if (contentTypeKey >= 0) {
                org.jdom.Document pageTemplateDocument = XMLTool.jdomparse(pageTemplateXML);
                org.jdom.Element root = pageTemplateDocument.getRootElement();
                List<org.jdom.Element> pageTemplates = root.getChildren("pagetemplate");
                Set<KeyValue> pageTemplateKeys = new HashSet<KeyValue>();
                for (org.jdom.Element pageTemplate : pageTemplates) {

                    int pageTemplateKey = Integer.parseInt(pageTemplate.getAttribute("key").getValue());
                    org.jdom.Element contentTypesNode = pageTemplate.getChild("contenttypes");
                    List<org.jdom.Element> contentTypeElements = contentTypesNode.getChildren("contenttype");

                    if (checkMatchingContentType(contentTypeKey, contentTypeElements)) {
                        KeyValue keyValue = new KeyValue(pageTemplateKey, pageTemplate.getChildText("name"));
                        pageTemplateKeys.add(keyValue);
                    }
                }
                if (pageTemplateKeys.size() > 0) {
                    KeyValue[] keys = new KeyValue[pageTemplateKeys.size()];
                    keys = pageTemplateKeys.toArray(keys);
                    Arrays.sort(keys);
                    parameters.put("pagetemplatekey", keys[0].key);
                } else {
                    if (defaultPageTemplateKey < 0) {
                        throw new VerticalAdminException("Unable to resolve page template. "
                                + "No matching page template found and default page template is not set.");
                    }
                    parameters.put("pagetemplatekey", String.valueOf(defaultPageTemplateKey));
                }

            }
        }

        if (formItems.containsKey("menuitemkey")) {
            parameters.put("menuitemkey", formItems.get("menuitemkey"));
        }
    }

    transformXML(request, response, doc, "contenttype_preview_list.xsl", parameters);
}

From source file:org.dasein.cloud.aws.compute.EC2Instance.java

@Override
public @Nonnull Iterable<VmStatistics> getVMStatisticsForPeriod(@Nonnull String instanceId, long startTimestamp,
        long endTimestamp) throws InternalException, CloudException {
    APITrace.begin(getProvider(), "getVMStatisticsForPeriod");
    try {/*w  w w. j  a v  a 2 s .c o  m*/
        if (endTimestamp < 1L) {
            endTimestamp = System.currentTimeMillis() + 1000L;
        }
        if (startTimestamp < (System.currentTimeMillis() - CalendarWrapper.DAY)) {
            startTimestamp = System.currentTimeMillis() - CalendarWrapper.DAY;
            if (startTimestamp > (endTimestamp - (2L * CalendarWrapper.MINUTE))) {
                endTimestamp = startTimestamp + (2L * CalendarWrapper.MINUTE);
            }
        } else if (startTimestamp > (endTimestamp - (2L * CalendarWrapper.MINUTE))) {
            startTimestamp = endTimestamp - (2L * CalendarWrapper.MINUTE);
        }
        TreeMap<Integer, VmStatistics> statMap = new TreeMap<Integer, VmStatistics>();
        int minutes = (int) ((endTimestamp - startTimestamp) / CalendarWrapper.MINUTE);

        for (int i = 1; i <= minutes; i++) {
            statMap.put(i, new VmStatistics());
        }
        Set<Metric> metrics = calculate("CPUUtilization", "Percent", instanceId, false, startTimestamp,
                endTimestamp);
        for (Metric m : metrics) {
            int minute = 1 + (int) ((m.timestamp - startTimestamp) / CalendarWrapper.MINUTE);
            VmStatistics stats = statMap.get(minute);

            if (stats == null) {
                stats = new VmStatistics();
                statMap.put(minute, stats);
            }
            stats.setAverageCpuUtilization(m.average);
            stats.setMaximumCpuUtilization(m.maximum);
            stats.setMinimumCpuUtilization(m.minimum);
            stats.setStartTimestamp(m.timestamp);
            stats.setEndTimestamp(m.timestamp);
            stats.setSamples(m.samples);
        }
        String id = instanceId;
        boolean idIsVolumeId = false;
        VirtualMachine vm = getVirtualMachine(instanceId);
        if (vm != null && vm.isPersistent()) {
            if (vm.getProviderVolumeIds(getProvider()).length > 0) {
                id = vm.getProviderVolumeIds(getProvider())[0];
                idIsVolumeId = true;
            }
        }
        metrics = calculate(idIsVolumeId ? "VolumeReadBytes" : "DiskReadBytes", "Bytes", id, idIsVolumeId,
                startTimestamp, endTimestamp);
        for (Metric m : metrics) {
            int minute = 1 + (int) ((m.timestamp - startTimestamp) / CalendarWrapper.MINUTE);
            VmStatistics stats = statMap.get(minute);

            if (stats == null) {
                stats = new VmStatistics();
                statMap.put(minute, stats);
            }
            stats.setAverageDiskReadBytes(m.average);
            stats.setMinimumDiskReadBytes(m.minimum);
            stats.setMaximumDiskReadBytes(m.maximum);
            if (stats.getSamples() < 1) {
                stats.setSamples(m.samples);
            }
        }
        metrics = calculate(idIsVolumeId ? "VolumeReadOps" : "DiskReadOps", "Count", id, idIsVolumeId,
                startTimestamp, endTimestamp);
        for (Metric m : metrics) {
            int minute = 1 + (int) ((m.timestamp - startTimestamp) / CalendarWrapper.MINUTE);
            VmStatistics stats = statMap.get(minute);

            if (stats == null) {
                stats = new VmStatistics();
                statMap.put(minute, stats);
            }
            stats.setAverageDiskReadOperations(m.average);
            stats.setMinimumDiskReadOperations(m.minimum);
            stats.setMaximumDiskReadOperations(m.maximum);
            if (stats.getSamples() < 1) {
                stats.setSamples(m.samples);
            }
        }
        metrics = calculate(idIsVolumeId ? "VolumeWriteBytes" : "DiskWriteBytes", "Bytes", id, idIsVolumeId,
                startTimestamp, endTimestamp);
        for (Metric m : metrics) {
            int minute = 1 + (int) ((m.timestamp - startTimestamp) / CalendarWrapper.MINUTE);
            VmStatistics stats = statMap.get(minute);

            if (stats == null) {
                stats = new VmStatistics();
                statMap.put(minute, stats);
            }
            stats.setAverageDiskWriteBytes(m.average);
            stats.setMinimumDiskWriteBytes(m.minimum);
            stats.setMaximumDiskWriteBytes(m.maximum);
            if (stats.getSamples() < 1) {
                stats.setSamples(m.samples);
            }
        }
        metrics = calculate(idIsVolumeId ? "VolumeWriteOps" : "DiskWriteOps", "Count", id, idIsVolumeId,
                startTimestamp, endTimestamp);
        for (Metric m : metrics) {
            int minute = 1 + (int) ((m.timestamp - startTimestamp) / CalendarWrapper.MINUTE);
            VmStatistics stats = statMap.get(minute);

            if (stats == null) {
                stats = new VmStatistics();
                statMap.put(minute, stats);
            }
            stats.setAverageDiskWriteOperations(m.average);
            stats.setMinimumDiskWriteOperations(m.minimum);
            stats.setMaximumDiskWriteOperations(m.maximum);
            if (stats.getSamples() < 1) {
                stats.setSamples(m.samples);
            }
        }
        metrics = calculate("NetworkIn", "Bytes", instanceId, false, startTimestamp, endTimestamp);
        for (Metric m : metrics) {
            int minute = 1 + (int) ((m.timestamp - startTimestamp) / CalendarWrapper.MINUTE);
            VmStatistics stats = statMap.get(minute);

            if (stats == null) {
                stats = new VmStatistics();
                statMap.put(minute, stats);
            }
            stats.setAverageNetworkIn(m.average);
            stats.setMinimumNetworkIn(m.minimum);
            stats.setMaximumNetworkIn(m.maximum);
            if (stats.getSamples() < 1) {
                stats.setSamples(m.samples);
            }
        }
        metrics = calculate("NetworkOut", "Bytes", instanceId, false, startTimestamp, endTimestamp);
        for (Metric m : metrics) {
            int minute = 1 + (int) ((m.timestamp - startTimestamp) / CalendarWrapper.MINUTE);
            VmStatistics stats = statMap.get(minute);

            if (stats == null) {
                stats = new VmStatistics();
                statMap.put(minute, stats);
            }
            stats.setAverageNetworkOut(m.average);
            stats.setMinimumNetworkOut(m.minimum);
            stats.setMaximumNetworkOut(m.maximum);
            if (stats.getSamples() < 1) {
                stats.setSamples(m.samples);
            }
        }
        ArrayList<VmStatistics> list = new ArrayList<VmStatistics>();
        for (Map.Entry<Integer, VmStatistics> entry : statMap.entrySet()) {
            VmStatistics stats = entry.getValue();

            if (stats != null && stats.getSamples() > 0) {
                list.add(stats);
            }
        }
        return list;
    } finally {
        APITrace.end();
    }
}

From source file:edu.hawaii.soest.hioos.isus.ISUSSource.java

/**
 * A method that processes the data object passed and flushes the
 * data to the DataTurbine given the sensor properties in the XMLConfiguration
 * passed in./*from w w  w.  ja  v  a  2 s .c o m*/
 *
 * @param xmlConfig - the XMLConfiguration object containing the list of
 *                    sensor properties
 * @param frameMap  - the parsed data as a HierarchicalMap object
 */
public boolean process(XMLConfiguration xmlConfig, HierarchicalMap frameMap) {

    logger.debug("ISUSSource.process() called.");
    // do not execute the stream if there is no connection
    if (!isConnected())
        return false;

    boolean success = false;

    try {

        // add channels of data that will be pushed to the server.  
        // Each sample will be sent to the Data Turbine as an rbnb frame.  Information
        // on each channel is found in the XMLConfiguration file (email.account.properties.xml)
        // and the StorXParser object (to get the data string)
        ChannelMap rbnbChannelMap = new ChannelMap(); // used to flush channels
        ChannelMap registerChannelMap = new ChannelMap(); // used to register channels
        int channelIndex = 0;

        String sensorName = null;
        String sensorSerialNumber = null;
        String sensorDescription = null;
        boolean isImmersed = false;
        String[] calibrationURLs = null;
        String calibrationURL = null;
        String type = null;

        List sensorList = xmlConfig.configurationsAt("account.logger.sensor");

        for (Iterator sIterator = sensorList.iterator(); sIterator.hasNext();) {
            //  
            HierarchicalConfiguration sensorConfig = (HierarchicalConfiguration) sIterator.next();
            sensorSerialNumber = sensorConfig.getString("serialNumber");

            // find the correct sensor configuration properties
            if (sensorSerialNumber.equals(frameMap.get("serialNumber"))) {

                sensorName = sensorConfig.getString("name");
                sensorDescription = sensorConfig.getString("description");
                isImmersed = new Boolean(sensorConfig.getString("isImmersed")).booleanValue();
                calibrationURLs = sensorConfig.getStringArray("calibrationURL");
                type = (String) frameMap.get("type");

                // find the correct calibrationURL from the list given the type
                for (String url : calibrationURLs) {

                    if (url.indexOf(type) > 0) {
                        calibrationURL = url;
                        break;

                    } else {
                        logger.debug("There was no match for " + type);
                    }
                }

                // get a Calibration instance to interpret raw sensor values
                Calibration calibration = new Calibration();

                if (calibration.parse(calibrationURL)) {

                    // Build the RBNB channel map 

                    // get the sample date and convert it to seconds since the epoch
                    Date frameDate = (Date) frameMap.get("date");
                    Calendar frameDateTime = Calendar.getInstance();
                    frameDateTime.setTime(frameDate);
                    double sampleTimeAsSecondsSinceEpoch = (double) (frameDateTime.getTimeInMillis() / 1000);
                    // and create a string formatted date for the given time zone
                    DATE_FORMAT.setTimeZone(TZ);
                    String frameDateAsString = DATE_FORMAT.format(frameDate).toString();

                    // get the sample data from the frame map
                    ByteBuffer rawFrame = (ByteBuffer) frameMap.get("rawFrame");
                    ISUSFrame isusFrame = (ISUSFrame) frameMap.get("parsedFrameObject");
                    String serialNumber = isusFrame.getSerialNumber();
                    String sampleDate = isusFrame.getSampleDate();
                    String sampleTime = isusFrame.getSampleTime();
                    SimpleDateFormat dtFormat = new SimpleDateFormat();
                    Date sampleDateTime = isusFrame.getSampleDateTime();
                    dtFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
                    dtFormat.applyPattern("MM/dd/yy");
                    String sampleDateUTC = dtFormat.format(sampleDateTime);
                    dtFormat.applyPattern("HH:mm:ss");
                    String sampleTimeUTC = dtFormat.format(sampleDateTime);
                    dtFormat.setTimeZone(TimeZone.getTimeZone("HST"));
                    dtFormat.applyPattern("MM/dd/yy");
                    String sampleDateHST = dtFormat.format(sampleDateTime);
                    dtFormat.applyPattern("HH:mm:ss");
                    String sampleTimeHST = dtFormat.format(sampleDateTime);
                    dtFormat.applyPattern("dd-MMM-yy HH:mm");
                    String sampleDateTimeHST = dtFormat.format(sampleDateTime);

                    double rawNitrogenConcentration = isusFrame.getNitrogenConcentration();
                    double rawAuxConcentration1 = isusFrame.getAuxConcentration1();
                    double rawAuxConcentration2 = isusFrame.getAuxConcentration2();
                    double rawAuxConcentration3 = isusFrame.getAuxConcentration3();
                    double rawRmsError = isusFrame.getRmsError();
                    double rawInsideTemperature = isusFrame.getInsideTemperature();
                    double rawSpectrometerTemperature = isusFrame.getSpectrometerTemperature();
                    double rawLampTemperature = isusFrame.getLampTemperature();
                    int rawLampTime = isusFrame.getLampTime();
                    double rawHumidity = isusFrame.getHumidity();
                    double rawLampVoltage12 = isusFrame.getLampVoltage12();
                    double rawInternalPowerVoltage5 = isusFrame.getInternalPowerVoltage5();
                    double rawMainPowerVoltage = isusFrame.getMainPowerVoltage();
                    double rawReferenceAverage = isusFrame.getReferenceAverage();
                    double rawReferenceVariance = isusFrame.getReferenceVariance();
                    double rawSeaWaterDarkCounts = isusFrame.getSeaWaterDarkCounts();
                    double rawSpectrometerAverage = isusFrame.getSpectrometerAverage();
                    int checksum = isusFrame.getChecksum();

                    //// apply calibrations to the observed data
                    double nitrogenConcentration = calibration.apply(rawNitrogenConcentration, isImmersed,
                            "NITRATE");
                    double auxConcentration1 = calibration.apply(rawAuxConcentration1, isImmersed, "AUX1");
                    double auxConcentration2 = calibration.apply(rawAuxConcentration2, isImmersed, "AUX2");
                    double auxConcentration3 = calibration.apply(rawAuxConcentration3, isImmersed, "AUX3");
                    double rmsError = calibration.apply(rawRmsError, isImmersed, "RMSe");
                    double insideTemperature = calibration.apply(rawInsideTemperature, isImmersed, "T_INT");
                    double spectrometerTemperature = calibration.apply(rawSpectrometerTemperature, isImmersed,
                            "T_SPEC");
                    double lampTemperature = calibration.apply(rawLampTemperature, isImmersed, "T_LAMP");
                    int lampTime = rawLampTime;
                    double humidity = calibration.apply(rawHumidity, isImmersed, "HUMIDITY");
                    double lampVoltage12 = calibration.apply(rawLampVoltage12, isImmersed, "VOLT_12");
                    double internalPowerVoltage5 = calibration.apply(rawInternalPowerVoltage5, isImmersed,
                            "VOLT_5");
                    double mainPowerVoltage = calibration.apply(rawMainPowerVoltage, isImmersed, "VOLT_MAIN");
                    double referenceAverage = calibration.apply(rawReferenceAverage, isImmersed, "REF_AVG");
                    double referenceVariance = calibration.apply(rawReferenceVariance, isImmersed, "REF_STD");
                    double seaWaterDarkCounts = calibration.apply(rawSeaWaterDarkCounts, isImmersed, "SW_DARK");
                    double spectrometerAverage = calibration.apply(rawSpectrometerAverage, isImmersed,
                            "SPEC_AVG");

                    // iterate through the individual wavelengths
                    List<String> variableNames = calibration.getVariableNames();
                    TreeMap<String, Double> wavelengthsMap = new TreeMap<String, Double>();
                    Collections.sort(variableNames);
                    int rawWavelengthCounts = 0;
                    int count = 1;

                    for (String name : variableNames) {

                        // just handle the wavelength channels
                        if (name.startsWith("UV_")) {
                            rawWavelengthCounts = isusFrame.getChannelWavelengthCounts(count);

                            double value = calibration.apply(rawWavelengthCounts, isImmersed, name);
                            count++;
                            wavelengthsMap.put(name, new Double(value));

                        }

                    }

                    String sampleString = "";
                    sampleString += sampleDate + "\t";
                    sampleString += sampleDateUTC + "\t";
                    sampleString += sampleTime + "\t";
                    sampleString += sampleTimeUTC + "\t";
                    sampleString += sampleDateHST + "\t";
                    sampleString += sampleTimeHST + "\t";
                    sampleString += sampleDateTimeHST + "\t";
                    sampleString += String.format("%-15.11f", nitrogenConcentration) + "\t";
                    //sampleString += String.format("%15.11f", auxConcentration1)     + "\t";
                    //sampleString += String.format("%15.11f", auxConcentration2)     + "\t";
                    //sampleString += String.format("%15.11f", auxConcentration3)     + "\t";
                    sampleString += String.format("%15.11f", rmsError) + "\t";
                    sampleString += String.format("%15.11f", insideTemperature) + "\t";
                    sampleString += String.format("%15.11f", spectrometerTemperature) + "\t";
                    sampleString += String.format("%15.11f", lampTemperature) + "\t";
                    sampleString += String.format("%6d", lampTime) + "\t";
                    sampleString += String.format("%15.11f", humidity) + "\t";
                    sampleString += String.format("%15.11f", lampVoltage12) + "\t";
                    sampleString += String.format("%15.11f", internalPowerVoltage5) + "\t";
                    sampleString += String.format("%15.11f", mainPowerVoltage) + "\t";
                    sampleString += String.format("%15.11f", referenceAverage) + "\t";
                    sampleString += String.format("%15.11f", referenceVariance) + "\t";
                    sampleString += String.format("%15.11f", seaWaterDarkCounts) + "\t";
                    sampleString += String.format("%15.11f", spectrometerAverage) + "\t";

                    Set<String> wavelengths = wavelengthsMap.keySet();
                    Iterator wIterator = wavelengths.iterator();

                    while (wIterator.hasNext()) {
                        String name = (String) wIterator.next();
                        Double wavelengthValue = (Double) wavelengthsMap.get(name);
                        sampleString += String.format("%6d", wavelengthValue.intValue()) + "\t";
                        channelIndex = registerChannelMap.Add(name);
                        registerChannelMap.PutUserInfo(channelIndex, "units=counts");
                        channelIndex = rbnbChannelMap.Add(name);
                        rbnbChannelMap.PutMime(channelIndex, "application/octet-stream");
                        rbnbChannelMap.PutDataAsFloat64(channelIndex,
                                new double[] { wavelengthValue.doubleValue() });

                    }

                    sampleString += String.format("%03d", checksum);
                    sampleString += "\n";

                    // add the sample timestamp to the rbnb channel map
                    //registerChannelMap.PutTime(sampleTimeAsSecondsSinceEpoch, 0d);
                    rbnbChannelMap.PutTime(sampleTimeAsSecondsSinceEpoch, 0d);

                    // add the BinaryRawSatlanticFrameData channel to the channelMap
                    channelIndex = registerChannelMap.Add("BinaryRawSatlanticFrameData");
                    registerChannelMap.PutUserInfo(channelIndex, "units=none");
                    channelIndex = rbnbChannelMap.Add("BinaryRawSatlanticFrameData");
                    rbnbChannelMap.PutMime(channelIndex, "application/octet-stream");
                    rbnbChannelMap.PutDataAsByteArray(channelIndex, rawFrame.array());

                    // add the DecimalASCIISampleData channel to the channelMap
                    channelIndex = registerChannelMap.Add(getRBNBChannelName());
                    registerChannelMap.PutUserInfo(channelIndex, "units=none");
                    channelIndex = rbnbChannelMap.Add(getRBNBChannelName());
                    rbnbChannelMap.PutMime(channelIndex, "text/plain");
                    rbnbChannelMap.PutDataAsString(channelIndex, sampleString);

                    // add the serialNumber channel to the channelMap
                    channelIndex = registerChannelMap.Add("serialNumber");
                    registerChannelMap.PutUserInfo(channelIndex, "units=none");
                    channelIndex = rbnbChannelMap.Add("serialNumber");
                    rbnbChannelMap.PutMime(channelIndex, "text/plain");
                    rbnbChannelMap.PutDataAsString(channelIndex, serialNumber);

                    // add the sampleDateUTC channel to the channelMap
                    channelIndex = registerChannelMap.Add("sampleDateUTC");
                    registerChannelMap.PutUserInfo(channelIndex, "units=YYYYDDD");
                    channelIndex = rbnbChannelMap.Add("sampleDateUTC");
                    rbnbChannelMap.PutMime(channelIndex, "text/plain");
                    rbnbChannelMap.PutDataAsString(channelIndex, sampleDate);

                    // add the sampleTimeUTC channel to the channelMap
                    channelIndex = registerChannelMap.Add("sampleTimeUTC");
                    registerChannelMap.PutUserInfo(channelIndex, "units=hh.hhhhhh");
                    channelIndex = rbnbChannelMap.Add("sampleTimeUTC");
                    rbnbChannelMap.PutMime(channelIndex, "text/plain");
                    rbnbChannelMap.PutDataAsString(channelIndex, sampleTimeUTC);

                    // add the nitrogenConcentration channel to the channelMap
                    channelIndex = registerChannelMap.Add("nitrogenConcentration");
                    registerChannelMap.PutUserInfo(channelIndex, "units=uM");
                    channelIndex = rbnbChannelMap.Add("nitrogenConcentration");
                    rbnbChannelMap.PutMime(channelIndex, "application/octet-stream");
                    rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { nitrogenConcentration });

                    // add the auxConcentration1 channel to the channelMap
                    channelIndex = registerChannelMap.Add("auxConcentration1");
                    registerChannelMap.PutUserInfo(channelIndex, "units=none");
                    channelIndex = rbnbChannelMap.Add("auxConcentration1");
                    rbnbChannelMap.PutMime(channelIndex, "application/octet-stream");
                    rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { auxConcentration1 });

                    // add the auxConcentration3 channel to the channelMap
                    channelIndex = registerChannelMap.Add("auxConcentration2");
                    registerChannelMap.PutUserInfo(channelIndex, "units=none");
                    channelIndex = rbnbChannelMap.Add("auxConcentration2");
                    rbnbChannelMap.PutMime(channelIndex, "application/octet-stream");
                    rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { auxConcentration2 });

                    // add the serialNumber channel to the channelMap
                    channelIndex = registerChannelMap.Add("auxConcentration3");
                    registerChannelMap.PutUserInfo(channelIndex, "units=none");
                    channelIndex = rbnbChannelMap.Add("auxConcentration3");
                    rbnbChannelMap.PutMime(channelIndex, "application/octet-stream");
                    rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { auxConcentration3 });

                    // add the rmsError channel to the channelMap
                    channelIndex = registerChannelMap.Add("rmsError");
                    registerChannelMap.PutUserInfo(channelIndex, "units=none");
                    channelIndex = rbnbChannelMap.Add("rmsError");
                    rbnbChannelMap.PutMime(channelIndex, "application/octet-stream");
                    rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { rmsError });

                    // add the insideTemperature channel to the channelMap
                    channelIndex = registerChannelMap.Add("insideTemperature");
                    registerChannelMap.PutUserInfo(channelIndex, "units=Celsius");
                    channelIndex = rbnbChannelMap.Add("insideTemperature");
                    rbnbChannelMap.PutMime(channelIndex, "application/octet-stream");
                    rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { insideTemperature });

                    // add the spectrometerTemperature channel to the channelMap
                    channelIndex = registerChannelMap.Add("spectrometerTemperature");
                    registerChannelMap.PutUserInfo(channelIndex, "units=Celsius");
                    channelIndex = rbnbChannelMap.Add("spectrometerTemperature");
                    rbnbChannelMap.PutMime(channelIndex, "application/octet-stream");
                    rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { spectrometerTemperature });

                    // add the lampTemperature channel to the channelMap
                    channelIndex = registerChannelMap.Add("lampTemperature");
                    registerChannelMap.PutUserInfo(channelIndex, "units=Celsius");
                    channelIndex = rbnbChannelMap.Add("lampTemperature");
                    rbnbChannelMap.PutMime(channelIndex, "application/octet-stream");
                    rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { lampTemperature });

                    // add the lampTime channel to the channelMap
                    channelIndex = registerChannelMap.Add("lampTime");
                    registerChannelMap.PutUserInfo(channelIndex, "units=seconds");
                    channelIndex = rbnbChannelMap.Add("lampTime");
                    rbnbChannelMap.PutMime(channelIndex, "application/octet-stream");
                    rbnbChannelMap.PutDataAsInt32(channelIndex, new int[] { lampTime });

                    // add the humidity channel to the channelMap
                    channelIndex = registerChannelMap.Add("humidity");
                    registerChannelMap.PutUserInfo(channelIndex, "units=%");
                    channelIndex = rbnbChannelMap.Add("humidity");
                    rbnbChannelMap.PutMime(channelIndex, "application/octet-stream");
                    rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { humidity });

                    // add the lampVoltage12 channel to the channelMap
                    channelIndex = registerChannelMap.Add("lampVoltage12");
                    registerChannelMap.PutUserInfo(channelIndex, "units=V");
                    channelIndex = rbnbChannelMap.Add("lampVoltage12");
                    rbnbChannelMap.PutMime(channelIndex, "application/octet-stream");
                    rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { lampVoltage12 });

                    // add the internalPowerVoltage5 channel to the channelMap
                    channelIndex = registerChannelMap.Add("internalPowerVoltage5");
                    registerChannelMap.PutUserInfo(channelIndex, "units=V");
                    channelIndex = rbnbChannelMap.Add("internalPowerVoltage5");
                    rbnbChannelMap.PutMime(channelIndex, "application/octet-stream");
                    rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { internalPowerVoltage5 });

                    // add the mainPowerVoltage channel to the channelMap
                    channelIndex = registerChannelMap.Add("mainPowerVoltage");
                    registerChannelMap.PutUserInfo(channelIndex, "units=V");
                    channelIndex = rbnbChannelMap.Add("mainPowerVoltage");
                    rbnbChannelMap.PutMime(channelIndex, "application/octet-stream");
                    rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { mainPowerVoltage });

                    // add the referenceAverage channel to the channelMap
                    channelIndex = registerChannelMap.Add("referenceAverage");
                    registerChannelMap.PutUserInfo(channelIndex, "units=count");
                    channelIndex = rbnbChannelMap.Add("referenceAverage");
                    rbnbChannelMap.PutMime(channelIndex, "application/octet-stream");
                    rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { referenceAverage });

                    // add the referenceVariance channel to the channelMap
                    channelIndex = registerChannelMap.Add("referenceVariance");
                    registerChannelMap.PutUserInfo(channelIndex, "units=count");
                    channelIndex = rbnbChannelMap.Add("referenceVariance");
                    rbnbChannelMap.PutMime(channelIndex, "application/octet-stream");
                    rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { referenceVariance });

                    // add the seaWaterDarkCounts channel to the channelMap
                    channelIndex = registerChannelMap.Add("seaWaterDarkCounts");
                    registerChannelMap.PutUserInfo(channelIndex, "units=count");
                    channelIndex = rbnbChannelMap.Add("seaWaterDarkCounts");
                    rbnbChannelMap.PutMime(channelIndex, "application/octet-stream");
                    rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { seaWaterDarkCounts });

                    // add the spectrometerAverage channel to the channelMap
                    channelIndex = registerChannelMap.Add("spectrometerAverage");
                    registerChannelMap.PutUserInfo(channelIndex, "units=count");
                    channelIndex = rbnbChannelMap.Add("averageWavelength");
                    rbnbChannelMap.PutMime(channelIndex, "application/octet-stream");
                    rbnbChannelMap.PutDataAsFloat64(channelIndex, new double[] { spectrometerAverage });

                    // Now register the RBNB channels, and flush the rbnbChannelMap to the
                    // DataTurbine
                    getSource().Register(registerChannelMap);
                    getSource().Flush(rbnbChannelMap);
                    logger.info(frameDateAsString + " " + "Sample sent to the DataTurbine: (" + serialNumber
                            + ") " + sampleString);

                    registerChannelMap.Clear();
                    rbnbChannelMap.Clear();

                } else {

                    logger.info("Couldn't apply the calibration coefficients. " + "Skipping this sample.");

                } // end if()

            } // end if()

        } // end for()                                             

        //getSource.Detach();

        success = true;
    } catch (ParseException pe) {
        // parsing of the calibration file failed.  Log the exception, return false
        success = false;
        logger.debug("There was a problem parsing the calibration file. The " + "error message was: "
                + pe.getMessage());
        return success;

    } catch (SAPIException sapie) {
        // In the event of an RBNB communication  exception, log the exception, 
        // and allow execute() to return false, which will prompt a retry.
        success = false;
        sapie.printStackTrace();
        return success;

    }

    return success;
}

From source file:org.apache.druid.indexing.kafka.supervisor.KafkaSupervisor.java

/**
 * This method does two things -//from  w ww. jav a  2s  .com
 * 1. Makes sure the checkpoints information in the taskGroup is consistent with that of the tasks, if not kill
 * inconsistent tasks.
 * 2. truncates the checkpoints in the taskGroup corresponding to which segments have been published, so that any newly
 * created tasks for the taskGroup start indexing from after the latest published offsets.
 */
private void verifyAndMergeCheckpoints(final TaskGroup taskGroup) {
    final int groupId = taskGroup.groupId;
    final List<Pair<String, TreeMap<Integer, Map<Integer, Long>>>> taskSequences = new ArrayList<>();
    final List<ListenableFuture<TreeMap<Integer, Map<Integer, Long>>>> futures = new ArrayList<>();
    final List<String> taskIds = new ArrayList<>();

    for (String taskId : taskGroup.taskIds()) {
        final ListenableFuture<TreeMap<Integer, Map<Integer, Long>>> checkpointsFuture = taskClient
                .getCheckpointsAsync(taskId, true);
        taskIds.add(taskId);
        futures.add(checkpointsFuture);
    }

    try {
        List<TreeMap<Integer, Map<Integer, Long>>> futuresResult = Futures.successfulAsList(futures)
                .get(futureTimeoutInSeconds, TimeUnit.SECONDS);

        for (int i = 0; i < futuresResult.size(); i++) {
            final TreeMap<Integer, Map<Integer, Long>> checkpoints = futuresResult.get(i);
            final String taskId = taskIds.get(i);
            if (checkpoints == null) {
                try {
                    // catch the exception in failed futures
                    futures.get(i).get();
                } catch (Exception e) {
                    log.error(e, "Problem while getting checkpoints for task [%s], killing the task", taskId);
                    killTask(taskId);
                    taskGroup.tasks.remove(taskId);
                }
            } else if (checkpoints.isEmpty()) {
                log.warn("Ignoring task [%s], as probably it is not started running yet", taskId);
            } else {
                taskSequences.add(new Pair<>(taskId, checkpoints));
            }
        }
    } catch (Exception e) {
        throw new RuntimeException(e);
    }

    final KafkaDataSourceMetadata latestDataSourceMetadata = (KafkaDataSourceMetadata) indexerMetadataStorageCoordinator
            .getDataSourceMetadata(dataSource);
    final boolean hasValidOffsetsFromDb = latestDataSourceMetadata != null
            && latestDataSourceMetadata.getKafkaPartitions() != null
            && ioConfig.getTopic().equals(latestDataSourceMetadata.getKafkaPartitions().getTopic());
    final Map<Integer, Long> latestOffsetsFromDb;
    if (hasValidOffsetsFromDb) {
        latestOffsetsFromDb = latestDataSourceMetadata.getKafkaPartitions().getPartitionOffsetMap();
    } else {
        latestOffsetsFromDb = null;
    }

    // order tasks of this taskGroup by the latest sequenceId
    taskSequences.sort((o1, o2) -> o2.rhs.firstKey().compareTo(o1.rhs.firstKey()));

    final Set<String> tasksToKill = new HashSet<>();
    final AtomicInteger earliestConsistentSequenceId = new AtomicInteger(-1);
    int taskIndex = 0;

    while (taskIndex < taskSequences.size()) {
        TreeMap<Integer, Map<Integer, Long>> taskCheckpoints = taskSequences.get(taskIndex).rhs;
        String taskId = taskSequences.get(taskIndex).lhs;
        if (earliestConsistentSequenceId.get() == -1) {
            // find the first replica task with earliest sequenceId consistent with datasource metadata in the metadata
            // store
            if (taskCheckpoints.entrySet().stream()
                    .anyMatch(sequenceCheckpoint -> sequenceCheckpoint.getValue().entrySet().stream()
                            .allMatch(partitionOffset -> Longs.compare(partitionOffset.getValue(),
                                    latestOffsetsFromDb == null ? partitionOffset.getValue()
                                            : latestOffsetsFromDb.getOrDefault(partitionOffset.getKey(),
                                                    partitionOffset.getValue())) == 0)
                            && earliestConsistentSequenceId.compareAndSet(-1, sequenceCheckpoint.getKey()))
                    || (pendingCompletionTaskGroups.getOrDefault(groupId, EMPTY_LIST).size() > 0
                            && earliestConsistentSequenceId.compareAndSet(-1, taskCheckpoints.firstKey()))) {
                final SortedMap<Integer, Map<Integer, Long>> latestCheckpoints = new TreeMap<>(
                        taskCheckpoints.tailMap(earliestConsistentSequenceId.get()));
                log.info("Setting taskGroup sequences to [%s] for group [%d]", latestCheckpoints, groupId);
                taskGroup.sequenceOffsets.clear();
                taskGroup.sequenceOffsets.putAll(latestCheckpoints);
            } else {
                log.debug("Adding task [%s] to kill list, checkpoints[%s], latestoffsets from DB [%s]", taskId,
                        taskCheckpoints, latestOffsetsFromDb);
                tasksToKill.add(taskId);
            }
        } else {
            // check consistency with taskGroup sequences
            if (taskCheckpoints.get(taskGroup.sequenceOffsets.firstKey()) == null
                    || !(taskCheckpoints.get(taskGroup.sequenceOffsets.firstKey())
                            .equals(taskGroup.sequenceOffsets.firstEntry().getValue()))
                    || taskCheckpoints.tailMap(taskGroup.sequenceOffsets.firstKey())
                            .size() != taskGroup.sequenceOffsets.size()) {
                log.debug("Adding task [%s] to kill list, checkpoints[%s], taskgroup checkpoints [%s]", taskId,
                        taskCheckpoints, taskGroup.sequenceOffsets);
                tasksToKill.add(taskId);
            }
        }
        taskIndex++;
    }

    if ((tasksToKill.size() > 0 && tasksToKill.size() == taskGroup.tasks.size()) || (taskGroup.tasks.size() == 0
            && pendingCompletionTaskGroups.getOrDefault(groupId, EMPTY_LIST).size() == 0)) {
        // killing all tasks or no task left in the group ?
        // clear state about the taskgroup so that get latest offset information is fetched from metadata store
        log.warn("Clearing task group [%d] information as no valid tasks left the group", groupId);
        taskGroups.remove(groupId);
        partitionGroups.get(groupId).replaceAll((partition, offset) -> NOT_SET);
    }

    taskSequences.stream().filter(taskIdSequences -> tasksToKill.contains(taskIdSequences.lhs))
            .forEach(sequenceCheckpoint -> {
                log.warn(
                        "Killing task [%s], as its checkpoints [%s] are not consistent with group checkpoints[%s] or latest "
                                + "persisted offsets in metadata store [%s]",
                        sequenceCheckpoint.lhs, sequenceCheckpoint.rhs, taskGroup.sequenceOffsets,
                        latestOffsetsFromDb);
                killTask(sequenceCheckpoint.lhs);
                taskGroup.tasks.remove(sequenceCheckpoint.lhs);
            });
}

From source file:com.netflix.ice.processor.BillingFileProcessor.java

@Override
protected void poll() throws Exception {

    TreeMap<DateTime, List<BillingFile>> filesToProcess = Maps.newTreeMap();
    Map<DateTime, List<BillingFile>> monitorFilesToProcess = Maps.newTreeMap();

    // list the tar.gz file in billing file folder
    for (int i = 0; i < config.billingS3BucketNames.length; i++) {
        String billingS3BucketName = config.billingS3BucketNames[i];
        String billingS3BucketPrefix = config.billingS3BucketPrefixes.length > i
                ? config.billingS3BucketPrefixes[i]
                : "";
        String accountId = config.billingAccountIds.length > i ? config.billingAccountIds[i] : "";
        String billingAccessRoleName = config.billingAccessRoleNames.length > i
                ? config.billingAccessRoleNames[i]
                : "";
        String billingAccessExternalId = config.billingAccessExternalIds.length > i
                ? config.billingAccessExternalIds[i]
                : "";

        logger.info("trying to list objects in billing bucket " + billingS3BucketName
                + " using assume role, and external id " + billingAccessRoleName + " "
                + billingAccessExternalId);
        List<S3ObjectSummary> objectSummaries = AwsUtils.listAllObjects(billingS3BucketName,
                billingS3BucketPrefix, accountId, billingAccessRoleName, billingAccessExternalId);
        logger.info("found " + objectSummaries.size() + " in billing bucket " + billingS3BucketName);
        TreeMap<DateTime, S3ObjectSummary> filesToProcessInOneBucket = Maps.newTreeMap();
        Map<DateTime, S3ObjectSummary> monitorFilesToProcessInOneBucket = Maps.newTreeMap();

        // for each file, download&process if not needed
        for (S3ObjectSummary objectSummary : objectSummaries) {

            String fileKey = objectSummary.getKey();
            DateTime dataTime = AwsUtils.getDateTimeFromFileNameWithTags(fileKey);
            boolean withTags = true;
            if (dataTime == null) {
                dataTime = AwsUtils.getDateTimeFromFileName(fileKey);
                withTags = false;/*  ww  w  . ja v  a2s.  c o  m*/
            }

            if (dataTime != null && !dataTime.isBefore(config.startDate)) {
                if (!filesToProcessInOneBucket.containsKey(dataTime)
                        || withTags && config.resourceService != null
                        || !withTags && config.resourceService == null)
                    filesToProcessInOneBucket.put(dataTime, objectSummary);
                else
                    logger.info("ignoring file " + objectSummary.getKey());
            } else {
                logger.info("ignoring file " + objectSummary.getKey());
            }
        }

        for (S3ObjectSummary objectSummary : objectSummaries) {
            String fileKey = objectSummary.getKey();
            DateTime dataTime = AwsUtils.getDateTimeFromFileNameWithMonitoring(fileKey);

            if (dataTime != null && !dataTime.isBefore(config.startDate)) {
                monitorFilesToProcessInOneBucket.put(dataTime, objectSummary);
            }
        }

        for (DateTime key : filesToProcessInOneBucket.keySet()) {
            List<BillingFile> list = filesToProcess.get(key);
            if (list == null) {
                list = Lists.newArrayList();
                filesToProcess.put(key, list);
            }
            list.add(new BillingFile(filesToProcessInOneBucket.get(key), accountId, billingAccessRoleName,
                    billingAccessExternalId, billingS3BucketPrefix));
        }

        for (DateTime key : monitorFilesToProcessInOneBucket.keySet()) {
            List<BillingFile> list = monitorFilesToProcess.get(key);
            if (list == null) {
                list = Lists.newArrayList();
                monitorFilesToProcess.put(key, list);
            }
            list.add(new BillingFile(monitorFilesToProcessInOneBucket.get(key), accountId,
                    billingAccessRoleName, billingAccessExternalId, billingS3BucketPrefix));
        }
    }

    for (DateTime dataTime : filesToProcess.keySet()) {
        startMilli = endMilli = dataTime.getMillis();
        init();

        boolean hasNewFiles = false;
        boolean hasTags = false;
        long lastProcessed = lastProcessTime(AwsUtils.monthDateFormat.print(dataTime));

        for (BillingFile billingFile : filesToProcess.get(dataTime)) {
            S3ObjectSummary objectSummary = billingFile.s3ObjectSummary;
            if (objectSummary.getLastModified().getTime() < lastProcessed) {
                logger.info("data has been processed. ignoring " + objectSummary.getKey() + "...");
                continue;
            }
            hasNewFiles = true;
        }

        if (!hasNewFiles) {
            logger.info("data has been processed. ignoring all files at "
                    + AwsUtils.monthDateFormat.print(dataTime));
            continue;
        }

        long processTime = new DateTime(DateTimeZone.UTC).getMillis();
        for (BillingFile billingFile : filesToProcess.get(dataTime)) {

            S3ObjectSummary objectSummary = billingFile.s3ObjectSummary;
            String fileKey = objectSummary.getKey();

            File file = new File(config.localDir, fileKey.substring(billingFile.prefix.length()));
            logger.info("trying to download " + fileKey + "...");
            boolean downloaded = AwsUtils.downloadFileIfChangedSince(objectSummary.getBucketName(),
                    billingFile.prefix, file, lastProcessed, billingFile.accountId, billingFile.accessRoleName,
                    billingFile.externalId);
            if (downloaded)
                logger.info("downloaded " + fileKey);
            else {
                logger.info("file already downloaded " + fileKey + "...");
            }

            logger.info("processing " + fileKey + "...");
            boolean withTags = fileKey.contains("with-resources-and-tags");
            hasTags = hasTags || withTags;
            processingMonitor = false;
            processBillingZipFile(file, withTags);
            logger.info("done processing " + fileKey);
        }

        if (monitorFilesToProcess.get(dataTime) != null) {
            for (BillingFile monitorBillingFile : monitorFilesToProcess.get(dataTime)) {

                S3ObjectSummary monitorObjectSummary = monitorBillingFile.s3ObjectSummary;
                if (monitorObjectSummary != null) {
                    String monitorFileKey = monitorObjectSummary.getKey();
                    logger.info("processing " + monitorFileKey + "...");
                    File monitorFile = new File(config.localDir,
                            monitorFileKey.substring(monitorFileKey.lastIndexOf("/") + 1));
                    logger.info("trying to download " + monitorFileKey + "...");
                    boolean downloaded = AwsUtils.downloadFileIfChangedSince(
                            monitorObjectSummary.getBucketName(), monitorBillingFile.prefix, monitorFile,
                            lastProcessed, monitorBillingFile.accountId, monitorBillingFile.accessRoleName,
                            monitorBillingFile.externalId);
                    if (downloaded)
                        logger.info("downloaded " + monitorFile);
                    else
                        logger.warn(monitorFile + "already downloaded...");
                    FileInputStream in = new FileInputStream(monitorFile);
                    try {
                        processingMonitor = true;
                        processBillingFile(monitorFile.getName(), in, true);
                    } catch (Exception e) {
                        logger.error("Error processing " + monitorFile, e);
                    } finally {
                        in.close();
                    }
                }
            }
        }

        if (dataTime.equals(filesToProcess.lastKey())) {
            int hours = (int) ((endMilli - startMilli) / 3600000L);
            logger.info("cut hours to " + hours);
            cutData(hours);
        }

        // now get reservation capacity to calculate upfront and un-used cost
        for (Ec2InstanceReservationPrice.ReservationUtilization utilization : Ec2InstanceReservationPrice.ReservationUtilization
                .values())
            processReservations(utilization);

        if (hasTags && config.resourceService != null)
            config.resourceService.commit();

        logger.info("archiving results for " + dataTime + "...");
        archive();
        logger.info("done archiving " + dataTime);

        updateProcessTime(AwsUtils.monthDateFormat.print(dataTime), processTime);
        if (dataTime.equals(filesToProcess.lastKey())) {
            sendOndemandCostAlert();
        }
    }

    logger.info("AWS usage processed.");
}

From source file:io.apiman.manager.api.rest.impl.OrganizationResourceImpl.java

/**
 * @see io.apiman.manager.api.rest.contract.IOrganizationResource#listMembers(java.lang.String)
 *//*from w  w w. j  a  va  2s .c  o m*/
@Override
public List<MemberBean> listMembers(String organizationId)
        throws OrganizationNotFoundException, NotAuthorizedException {
    get(organizationId);

    try {
        Set<RoleMembershipBean> memberships = query.getOrgMemberships(organizationId);
        TreeMap<String, MemberBean> members = new TreeMap<>();
        storage.beginTx();
        for (RoleMembershipBean membershipBean : memberships) {
            String userId = membershipBean.getUserId();
            String roleId = membershipBean.getRoleId();
            RoleBean role = storage.getRole(roleId);

            // Role does not exist!
            if (role == null) {
                continue;
            }

            MemberBean member = members.get(userId);
            if (member == null) {
                UserBean user = storage.getUser(userId);
                member = new MemberBean();
                member.setEmail(user.getEmail());
                member.setUserId(userId);
                member.setUserName(user.getFullName());
                member.setRoles(new ArrayList<>());
                members.put(userId, member);
            }
            MemberRoleBean mrb = new MemberRoleBean();
            mrb.setRoleId(roleId);
            mrb.setRoleName(role.getName());
            member.getRoles().add(mrb);
            if (member.getJoinedOn() == null
                    || membershipBean.getCreatedOn().compareTo(member.getJoinedOn()) < 0) {
                member.setJoinedOn(membershipBean.getCreatedOn());
            }
        }
        return new ArrayList<>(members.values());
    } catch (StorageException e) {
        throw new SystemErrorException(e);
    } finally {
        storage.rollbackTx();
    }
}