Example usage for java.util TreeMap size

List of usage examples for java.util TreeMap size

Introduction

In this page you can find the example usage for java.util TreeMap size.

Prototype

int size

To view the source code for java.util TreeMap size.

Click Source Link

Document

The number of entries in the tree

Usage

From source file:org.commoncrawl.service.listcrawler.CrawlList.java

void loadSubDomainMetadataFromDisk() throws IOException {
    LOG.info("*** LIST:" + getListId() + " LOAD SUBDOMAIN METADATA FROM DISK ...  ");
    if (_subDomainMetadataFile.exists()) {

        LOG.info("*** LIST:" + getListId() + " FILE EXISTS LOADING SUBDOMAIN DATA FROM DISK.");

        RandomAccessFile file = new RandomAccessFile(_subDomainMetadataFile, "rw");
        DataInputBuffer inputBuffer = new DataInputBuffer();
        byte fixedDataBlock[] = new byte[CrawlListMetadata.Constants.FixedDataSize];

        try {/*from  w  ww .  jav  a 2 s. c om*/
            // skip version 
            file.read();
            // read item count 
            int itemCount = file.readInt();

            LOG.info("*** LIST:" + getListId() + " SUBDOMAIN ITEM COUNT:" + itemCount);

            CrawlListMetadata newMetadata = new CrawlListMetadata();

            TreeMap<Long, Integer> idToOffsetMap = new TreeMap<Long, Integer>();
            for (int i = 0; i < itemCount; ++i) {

                long orignalPos = file.getFilePointer();
                file.readFully(fixedDataBlock, 0, fixedDataBlock.length);
                inputBuffer.reset(fixedDataBlock, fixedDataBlock.length);
                try {
                    newMetadata.deserialize(inputBuffer, new BinaryProtocol());
                } catch (Exception e) {
                    LOG.error("-----Failed to Deserialize Metadata at Index:" + i + " Exception:"
                            + CCStringUtils.stringifyException(e));
                }
                idToOffsetMap.put(newMetadata.getDomainHash(), (int) orignalPos);
            }

            // write lookup table 
            _offsetLookupTable = new DataOutputBuffer(idToOffsetMap.size() * OFFSET_TABLE_ENTRY_SIZE);
            for (Map.Entry<Long, Integer> entry : idToOffsetMap.entrySet()) {
                _offsetLookupTable.writeLong(entry.getKey());
                _offsetLookupTable.writeInt(entry.getValue());
            }
        } finally {
            file.close();
        }
        LOG.info("*** LIST:" + getListId() + " DONE LOADING SUBDOMAIN DATA FROM DISK");
    } else {

        LOG.info("*** LIST:" + getListId() + " SUBDOMAIN METADATA DOES NOT EXIST! LOADING FROM SCRATCH");

        RandomAccessFile fixedDataReader = new RandomAccessFile(_fixedDataFile, "rw");
        RandomAccessFile stringDataReader = new RandomAccessFile(_variableDataFile, "rw");

        try {

            //ok rebuild top level metadata as well 
            _metadata.clear();

            OnDiskCrawlHistoryItem item = new OnDiskCrawlHistoryItem();

            int processedCount = 0;
            while (fixedDataReader.getFilePointer() != fixedDataReader.length()) {

                long position = fixedDataReader.getFilePointer();

                // store offset in item 
                item._fileOffset = position;
                // load from disk 
                item.deserialize(fixedDataReader);
                try {
                    // seek to string data 
                    stringDataReader.seek(item._stringsOffset);
                    // and skip buffer length 
                    WritableUtils.readVInt(stringDataReader);
                    // and read primary string 
                    String url = stringDataReader.readUTF();

                    // get metadata object for subdomain 
                    CrawlListMetadata subDomainMetadata = getTransientSubDomainMetadata(url);

                    // increment url count 
                    subDomainMetadata.setUrlCount(subDomainMetadata.getUrlCount() + 1);

                    // increment top level metadata count 
                    _metadata.setUrlCount(_metadata.getUrlCount() + 1);

                    // update top level metadata ..
                    updateMetadata(item, _metadata, 0);

                    // update sub-domain metadata object  from item data
                    updateMetadata(item, subDomainMetadata, 0);

                    ++processedCount;
                } catch (IOException e) {
                    LOG.error("Exception Reading String Data For Item:" + (processedCount + 1));
                    LOG.error("Exception:" + CCStringUtils.stringifyException(e));
                    LOG.error("File Position:" + fixedDataReader.getFilePointer() + " StringsPointer:"
                            + stringDataReader.getFilePointer());
                }

                if (processedCount % 10000 == 0) {
                    LOG.info("*** LIST:" + getListId() + " Processed:" + processedCount + " Items");
                }
            }

            // ok commit top level metadata to disk as well 
            writeMetadataToDisk();

        } catch (IOException e) {
            LOG.error("Encountered Exception Queueing Items for List:" + _listId + " Exception:"
                    + CCStringUtils.stringifyException(e));
            LOG.error("File Position:" + fixedDataReader.getFilePointer() + " StringsPointer:"
                    + stringDataReader.getFilePointer());
            _queueState = QueueState.QUEUED;
        } finally {
            fixedDataReader.close();
            stringDataReader.close();
        }
        LOG.info("*** LIST:" + getListId() + " SUBDOMAIN METADATA REBUILT FROM LIST DATA . WRITING TO DISK");

        // write metadat to disk 
        writeInitialSubDomainMetadataToDisk();

        LOG.info("*** LIST:" + getListId() + " SUBDOMAIN METADATA REBUILT FROM LIST DATA . WRITE COMPLETE");
    }
}

From source file:cx.ring.service.LocalService.java

public void updateTextNotifications() {
    Log.d(TAG, "updateTextNotifications()");

    for (Conversation c : conversations.values()) {
        TreeMap<Long, TextMessage> texts = c.getUnreadTextMessages();
        if (texts.isEmpty() || texts.lastEntry().getValue().isNotified()) {
            continue;
        } else/*from w  ww .j a v a2s  .  c  om*/
            notificationManager.cancel(c.notificationId);

        CallContact contact = c.getContact();
        if (c.notificationBuilder == null) {
            c.notificationBuilder = new NotificationCompat.Builder(getApplicationContext());
            c.notificationBuilder.setCategory(NotificationCompat.CATEGORY_MESSAGE)
                    .setPriority(NotificationCompat.PRIORITY_HIGH).setDefaults(NotificationCompat.DEFAULT_ALL)
                    .setSmallIcon(R.drawable.ic_launcher).setContentTitle(contact.getDisplayName());
        }
        NotificationCompat.Builder noti = c.notificationBuilder;
        Intent c_intent = new Intent(Intent.ACTION_VIEW).setClass(this, ConversationActivity.class)
                .setData(Uri.withAppendedPath(ConversationActivity.CONTENT_URI, contact.getIds().get(0)));
        Intent d_intent = new Intent(ACTION_CONV_READ).setClass(this, LocalService.class)
                .setData(Uri.withAppendedPath(ConversationActivity.CONTENT_URI, contact.getIds().get(0)));
        noti.setContentIntent(PendingIntent.getActivity(this, new Random().nextInt(), c_intent, 0))
                .setDeleteIntent(PendingIntent.getService(this, new Random().nextInt(), d_intent, 0));

        if (contact.getPhoto() != null) {
            Resources res = getResources();
            int height = (int) res.getDimension(android.R.dimen.notification_large_icon_height);
            int width = (int) res.getDimension(android.R.dimen.notification_large_icon_width);
            noti.setLargeIcon(Bitmap.createScaledBitmap(contact.getPhoto(), width, height, false));
        }
        if (texts.size() == 1) {
            TextMessage txt = texts.firstEntry().getValue();
            txt.setNotified(true);
            noti.setContentText(txt.getMessage());
            noti.setStyle(null);
            noti.setWhen(txt.getTimestamp());
        } else {
            NotificationCompat.InboxStyle inboxStyle = new NotificationCompat.InboxStyle();
            for (TextMessage s : texts.values()) {
                inboxStyle.addLine(Html.fromHtml("<b>"
                        + DateUtils.formatDateTime(this, s.getTimestamp(),
                                DateUtils.FORMAT_SHOW_TIME | DateUtils.FORMAT_ABBREV_ALL)
                        + "</b> " + s.getMessage()));
                s.setNotified(true);
            }
            noti.setContentText(texts.lastEntry().getValue().getMessage());
            noti.setStyle(inboxStyle);
            noti.setWhen(texts.lastEntry().getValue().getTimestamp());
        }
        notificationManager.notify(c.notificationId, noti.build());
    }
}

From source file:com.projity.server.data.Serializer.java

public ProjectData serializeProject(Project project, Collection flatAssignments, Collection flatLinks,
        boolean incremental, SerializeOptions options) throws Exception {
    if (TMP_FILES)
        initTmpDir();//from  w ww .j  a v a  2s.  c o m
    if (project.isForceNonIncremental())
        incremental = false;
    boolean incrementalDistributions = incremental && !project.isForceNonIncrementalDistributions();

    //      calendars.clear();
    Count projectCount = new Count("Project");
    //if (globalIdsOnly) makeGLobal(project);
    ProjectData projectData = (ProjectData) serialize(project, ProjectData.FACTORY, projectCount);
    if (project.isForceNonIncremental())
        projectData.setVersion(0);
    projectData.setMaster(project.isMaster());
    //        projectData.setExternalId(project.getExternalId());

    //exposed attributes
    //        projectData.setAttributes(SpreadSheetFieldArray.convertFields(project, "projectExposed", new Transformer(){
    //           public Object transform(Object value) {
    //              if (value instanceof Money) return ((Money)value).doubleValue();
    //              return null;
    //           }
    //        }));

    projectCount.dump();

    //resources
    Map resourceMap = saveResources(project, projectData);

    //tasks
    saveTasks(project, projectData, resourceMap, flatAssignments, flatLinks, incremental, options);

    //distribution
    long t = System.currentTimeMillis();
    Collection<DistributionData> dist = (Collection<DistributionData>) (new DistributionConverter())
            .createDistributionData(project, incrementalDistributions);
    if (dist == null) {
        dist = new ArrayList<DistributionData>();
    }
    projectData.setDistributions(dist);
    projectData.setIncrementalDistributions(incrementalDistributions);

    TreeMap<DistributionData, DistributionData> distMap = project.getDistributionMap();
    if (distMap == null) {
        distMap = new TreeMap<DistributionData, DistributionData>(new DistributionComparator());
        project.setDistributionMap(distMap);
    }
    TreeMap<DistributionData, DistributionData> newDistMap = new TreeMap<DistributionData, DistributionData>(
            new DistributionComparator());
    //ArrayList<DistributionData> toInsertInOld=new ArrayList<DistributionData>();

    //insert, update dist
    for (Iterator<DistributionData> i = dist.iterator(); i.hasNext();) {
        DistributionData d = i.next();
        if (incrementalDistributions) {
            DistributionData oldD = distMap.get(d);
            if (oldD == null) {
                d.setStatus(DistributionData.INSERT);
            } else {
                if (oldD.getWork() == d.getWork() && oldD.getCost() == d.getCost()) {
                    //System.out.println(d+" did not change");
                    d.setStatus(0);
                    i.remove();
                } else
                    d.setStatus(DistributionData.UPDATE);
            }
        } else {
            d.setStatus(DistributionData.INSERT);
        }
        newDistMap.put(d, d);
    }
    //remove dist
    if (incrementalDistributions && distMap.size() > 0) {
        Set<Long> noChangeTaskIds = new HashSet<Long>();

        Task task;
        for (Iterator i = project.getTaskOutlineIterator(); i.hasNext();) {
            task = (Task) i.next();
            if (incremental && !task.isDirty())
                noChangeTaskIds.add(task.getUniqueId());
        }
        //           for (Iterator i=projectData.getTasks().iterator();i.hasNext();){
        //              TaskData task=(TaskData)i.next();
        //              if (!task.isDirty()) noChangeTaskIds.add(task.getUniqueId());
        //           }
        for (Iterator<DistributionData> i = distMap.values().iterator(); i.hasNext();) {
            DistributionData d = i.next();
            if (newDistMap.containsKey(d))
                continue;
            if (noChangeTaskIds.contains(d.getTaskId())) {
                d.setStatus(0);
                newDistMap.put(d, d);
            } else {
                d.setStatus(DistributionData.REMOVE);
                dist.add(d);
            }
        }
    }
    project.setNewDistributionMap(newDistMap);
    System.out.println("Distributions generated in " + (System.currentTimeMillis() - t) + " ms");

    // send project field values to server too
    HashMap fieldValues = FieldValues.getValues(FieldDictionary.getInstance().getProjectFields(), project);
    if (project.getContainingSubprojectTask() != null) { // special case in which we want to use the duration from subproject task
        Object durationFieldValue = Configuration.getFieldFromId("Field.duration")
                .getValue(project.getContainingSubprojectTask(), null);
        fieldValues.put("Field.duration", durationFieldValue);
    }
    projectData.setFieldValues(fieldValues);
    projectData.setGroup(project.getGroup());
    projectData.setDivision(project.getDivision());
    projectData.setExpenseType(project.getExpenseType());
    projectData.setProjectType(project.getProjectType());
    projectData.setProjectStatus(project.getProjectStatus());
    projectData.setExtraFields(project.getExtraFields());
    projectData.setAccessControlPolicy(project.getAccessControlPolicy());
    projectData.setCreationDate(project.getCreationDate());
    projectData.setLastModificationDate(project.getLastModificationDate());
    //     System.out.println("done serialize project " + project);

    //        Collection<DistributionData> dis=(Collection<DistributionData>)projectData.getDistributions();
    //        for (DistributionData d: dis) System.out.println("Dist: "+d.getTimeId()+", "+d.getType()+", "+d.getStatus());

    //        project.setNewTaskIds(null);
    //        if (projectData.getTasks()!=null){
    //           Set<Long> ids=new HashSet<Long>();
    //           project.setNewTaskIds(ids);
    //           for (TaskData task:(Collection<TaskData>)projectData.getTasks()){
    //              ids.add(task.getUniqueId());
    //           }
    //        }
    //        long[] unchangedTasks=projectData.getUnchangedTasks();
    //        if (unchangedTasks!=null){
    //           Set<Long> ids=project.getNewTaskIds();
    //           if (ids==null){
    //              ids=new HashSet<Long>();
    //              project.setNewTaskIds(ids);
    //           }
    //           for (int i=0;i<unchangedTasks.length;i++) ids.add(unchangedTasks[i]);
    //        }
    //
    //        project.setNewLinkIds(null);
    //        if (flatLinks!=null){
    //           Set<DependencyKey> ids=new HashSet<DependencyKey>();
    //           project.setNewLinkIds(ids);
    //           for (LinkData link:(Collection<LinkData>)flatLinks){
    //              ids.add(new DependencyKey(link.getPredecessorId(),link.getSuccessorId()/*,link.getExternalId()*/));
    //           }
    //        }
    //        long[] unchangedLinks=projectData.getUnchangedLinks();
    //        if (unchangedLinks!=null){
    //           Set<DependencyKey> ids=project.getNewLinkIds();
    //           if (ids==null){
    //              ids=new HashSet<DependencyKey>();
    //              project.setNewLinkIds(ids);
    //           }
    //           for (int i=0;i<unchangedLinks.length;i+=2) ids.add(new DependencyKey(unchangedLinks[i],unchangedLinks[i+1]));
    //        }

    //project.setNewIds(); //claur - useful ?

    return projectData;

}

From source file:org.commoncrawl.service.listcrawler.CrawlList.java

void writeInitialSubDomainMetadataToDisk() throws IOException {

    RandomAccessFile file = new RandomAccessFile(_subDomainMetadataFile, "rw");

    try {//from   w ww  .java  2s . co m

        file.writeByte(0); // version
        file.writeInt(_transientSubDomainStats.size());

        ArrayList<CrawlListMetadata> sortedMetadata = new ArrayList<CrawlListMetadata>();
        sortedMetadata.addAll(_transientSubDomainStats.values());
        _transientSubDomainStats = null;
        CrawlListMetadata metadataArray[] = sortedMetadata.toArray(new CrawlListMetadata[0]);
        Arrays.sort(metadataArray, new Comparator<CrawlListMetadata>() {

            @Override
            public int compare(CrawlListMetadata o1, CrawlListMetadata o2) {
                int result = ((Integer) o2.getUrlCount()).compareTo(o1.getUrlCount());
                if (result == 0) {
                    result = o1.getDomainName().compareTo(o2.getDomainName());
                }
                return result;
            }
        });

        DataOutputBuffer outputBuffer = new DataOutputBuffer(CrawlListMetadata.Constants.FixedDataSize);

        TreeMap<Long, Integer> idToOffsetMap = new TreeMap<Long, Integer>();

        for (CrawlListMetadata entry : metadataArray) {
            // reset output buffer 
            outputBuffer.reset();
            // write item to disk 
            entry.serialize(outputBuffer, new BinaryProtocol());

            if (outputBuffer.getLength() > CrawlListMetadata.Constants.FixedDataSize) {
                LOG.fatal("Metadata Serialization for List:" + getListId() + " SubDomain:"
                        + entry.getDomainName());
                System.out.println("Metadata Serialization for List:" + getListId() + " SubDomain:"
                        + entry.getDomainName());
            }
            // save offset 
            idToOffsetMap.put(entry.getDomainHash(), (int) file.getFilePointer());
            // write out fixed data size 
            file.write(outputBuffer.getData(), 0, CrawlListMetadata.Constants.FixedDataSize);
        }

        // write lookup table 
        _offsetLookupTable = new DataOutputBuffer(idToOffsetMap.size() * OFFSET_TABLE_ENTRY_SIZE);

        for (Map.Entry<Long, Integer> entry : idToOffsetMap.entrySet()) {
            _offsetLookupTable.writeLong(entry.getKey());
            _offsetLookupTable.writeInt(entry.getValue());
        }
    } finally {
        file.close();
    }
    _transientSubDomainStats = null;
}

From source file:net.spfbl.core.User.java

public TreeMap<Long, Query> getQueryMap(Long begin, String filter) {
    TreeMap<Long, Query> queryLocalMap = getQueryHeadMap(begin);
    Connection connection = Core.poolConnectionMySQL();
    try {//from  w w  w  .  jav a 2 s.c  o m
        if (connection != null) {
            try {
                String ipParam = Subnet.isValidIP(filter) ? Subnet.normalizeIP(filter) : null;
                String emailParam = Domain.isValidEmail(filter) ? filter.toLowerCase() : null;
                String command = "SELECT * FROM spfbl.user_query\n" + "WHERE user = '" + getEmail() + "'\n"
                        + (begin == null ? "" : "AND time <= " + begin + "\n")
                        + ("rejeitada".equals(filter) ? "AND result " + "IN('BLOCK','REJECT')\n" : "")
                        + (ipParam == null ? "" : "AND ip = '" + ipParam + "'\n")
                        + (emailParam == null ? ""
                                : "AND '" + emailParam + "' " + "IN(sender, mailFrom, replyto, recipient)\n")
                        + "ORDER BY time DESC\n" + "LIMIT " + (QUERY_MAX_ROWS + 1);
                Statement statement = connection.createStatement();
                try {
                    ResultSet rs = statement.executeQuery(command);
                    while (rs.next()) {
                        try {
                            long time = rs.getLong("time");
                            Query query = queryLocalMap.get(time);
                            if (query == null) {
                                query = new Query(rs);
                                queryLocalMap.put(time, query);
                            }
                        } catch (Exception ex) {
                            Server.logError(ex);
                        }
                    }
                } finally {
                    statement.close();
                }
            } catch (SQLException ex) {
                Server.logError(ex);
            }
        }
    } finally {
        Core.offerConnectionMySQL(connection);
    }
    TreeMap<Long, Query> resultMap = new TreeMap<Long, Query>();
    while (resultMap.size() < (QUERY_MAX_ROWS + 1)) {
        Entry<Long, Query> entry = queryLocalMap.pollLastEntry();
        if (entry == null) {
            break;
        } else {
            long time = entry.getKey();
            Query query = entry.getValue();
            if (filter == null) {
                resultMap.put(time, query);
            } else if (filter.length() == 0) {
                resultMap.put(time, query);
            } else if (query.match(filter)) {
                resultMap.put(time, query);
            }
        }
    }
    return resultMap;
}

From source file:com.enonic.vertical.adminweb.handlers.ContentBaseHandlerServlet.java

private void handlerPreviewSiteList(HttpServletRequest request, HttpServletResponse response,
        AdminService admin, ExtendedMap formItems, User user)
        throws VerticalAdminException, VerticalEngineException {
    Map<String, Object> parameters = new HashMap<String, Object>();
    parameters.put("page", formItems.get("page"));
    int unitKey = formItems.getInt("selectedunitkey", -1);
    int siteKey = formItems.getInt("menukey", -1);

    int contentKey = formItems.getInt("contentkey", -1);
    int contentTypeKey;
    if (contentKey >= 0) {
        parameters.put("contentkey", contentKey);
        contentTypeKey = admin.getContentTypeKey(contentKey);
        parameters.put("sessiondata", formItems.getBoolean("sessiondata", false));
    } else {/*from   w w  w. ja  v a2s  .  c o  m*/
        contentTypeKey = formItems.getInt("contenttypekey", -1);
    }
    parameters.put("contenttypekey", contentTypeKey);

    int versionKey = formItems.getInt("versionkey", -1);
    if (versionKey != -1) {
        parameters.put("versionkey", versionKey);
    }

    Document doc = XMLTool.domparse(admin.getAdminMenu(user, -1));
    Element rootSitesElement = doc.getDocumentElement();
    Element[] allSiteElements = XMLTool.getElements(rootSitesElement);
    int defaultPageTemplateKey = -1;
    if (allSiteElements.length > 0) {
        TreeMap<String, Element> allSitesMap = new TreeMap<String, Element>();
        for (Element siteElement : allSiteElements) {
            int mKey = Integer.valueOf(siteElement.getAttribute("key"));
            if (admin.hasContentPageTemplates(mKey, contentTypeKey)) {
                String name = siteElement.getAttribute("name");
                allSitesMap.put(name, siteElement);
            }
            rootSitesElement.removeChild(siteElement);
        }

        if (allSitesMap.size() > 0) {
            Element firstMenuElem = allSitesMap.get(allSitesMap.firstKey());
            if (siteKey < 0) {
                siteKey = Integer.valueOf(firstMenuElem.getAttribute("key"));
            }

            for (Element siteElement : allSitesMap.values()) {
                rootSitesElement.appendChild(siteElement);
                int key = Integer.parseInt(siteElement.getAttribute("key"));
                if (key == siteKey) {
                    String defaultPageTemplateAttr = siteElement.getAttribute("defaultpagetemplate");
                    if (defaultPageTemplateAttr != null && !defaultPageTemplateAttr.equals("")) {
                        defaultPageTemplateKey = Integer.parseInt(defaultPageTemplateAttr);
                    }

                }
            }
        }
    }

    addCommonParameters(admin, user, request, parameters, unitKey, siteKey);

    if (siteKey >= 0) {
        int[] excludeTypeKeys = { 1, 2, 3, 4, 6 };
        String pageTemplateXML = admin.getPageTemplatesByMenu(siteKey, excludeTypeKeys);
        Document ptDoc = XMLTool.domparse(pageTemplateXML);
        XMLTool.mergeDocuments(doc, ptDoc, true);

        if (contentKey >= 0) {
            Document chDoc = XMLTool.domparse(admin.getContentHomes(contentKey));
            XMLTool.mergeDocuments(doc, chDoc, true);
        }

        if (formItems.containsKey("pagetemplatekey")) {
            int pageTemplateKey = formItems.getInt("pagetemplatekey");
            parameters.put("pagetemplatekey", String.valueOf(pageTemplateKey));
        } else {
            if (contentTypeKey >= 0) {
                org.jdom.Document pageTemplateDocument = XMLTool.jdomparse(pageTemplateXML);
                org.jdom.Element root = pageTemplateDocument.getRootElement();
                List<org.jdom.Element> pageTemplates = root.getChildren("pagetemplate");
                Set<KeyValue> pageTemplateKeys = new HashSet<KeyValue>();
                for (org.jdom.Element pageTemplate : pageTemplates) {

                    int pageTemplateKey = Integer.parseInt(pageTemplate.getAttribute("key").getValue());
                    org.jdom.Element contentTypesNode = pageTemplate.getChild("contenttypes");
                    List<org.jdom.Element> contentTypeElements = contentTypesNode.getChildren("contenttype");

                    if (checkMatchingContentType(contentTypeKey, contentTypeElements)) {
                        KeyValue keyValue = new KeyValue(pageTemplateKey, pageTemplate.getChildText("name"));
                        pageTemplateKeys.add(keyValue);
                    }
                }
                if (pageTemplateKeys.size() > 0) {
                    KeyValue[] keys = new KeyValue[pageTemplateKeys.size()];
                    keys = pageTemplateKeys.toArray(keys);
                    Arrays.sort(keys);
                    parameters.put("pagetemplatekey", keys[0].key);
                } else {
                    if (defaultPageTemplateKey < 0) {
                        throw new VerticalAdminException("Unable to resolve page template. "
                                + "No matching page template found and default page template is not set.");
                    }
                    parameters.put("pagetemplatekey", String.valueOf(defaultPageTemplateKey));
                }

            }
        }

        if (formItems.containsKey("menuitemkey")) {
            parameters.put("menuitemkey", formItems.get("menuitemkey"));
        }
    }

    transformXML(request, response, doc, "contenttype_preview_list.xsl", parameters);
}

From source file:io.druid.query.search.SearchQueryRunner.java

@Override
public Sequence<Result<SearchResultValue>> run(final Query<Result<SearchResultValue>> input,
        Map<String, Object> responseContext) {
    if (!(input instanceof SearchQuery)) {
        throw new ISE("Got a [%s] which isn't a %s", input.getClass(), SearchQuery.class);
    }//from  w  w  w .ja v a 2s. c om

    final SearchQuery query = (SearchQuery) input;
    final Filter filter = Filters.convertToCNFFromQueryContext(query,
            Filters.toFilter(query.getDimensionsFilter()));
    final List<DimensionSpec> dimensions = query.getDimensions();
    final SearchQuerySpec searchQuerySpec = query.getQuery();
    final int limit = query.getLimit();
    final boolean descending = query.isDescending();
    final List<Interval> intervals = query.getQuerySegmentSpec().getIntervals();
    if (intervals.size() != 1) {
        throw new IAE("Should only have one interval, got[%s]", intervals);
    }
    final Interval interval = intervals.get(0);

    // Closing this will cause segfaults in unit tests.
    final QueryableIndex index = segment.asQueryableIndex();

    if (index != null) {
        final TreeMap<SearchHit, MutableInt> retVal = Maps.newTreeMap(query.getSort().getComparator());

        Iterable<DimensionSpec> dimsToSearch;
        if (dimensions == null || dimensions.isEmpty()) {
            dimsToSearch = Iterables.transform(index.getAvailableDimensions(), Druids.DIMENSION_IDENTITY);
        } else {
            dimsToSearch = dimensions;
        }

        final BitmapFactory bitmapFactory = index.getBitmapFactoryForDimensions();

        final ImmutableBitmap baseFilter = filter == null ? null
                : filter.getBitmapIndex(new ColumnSelectorBitmapIndexSelector(bitmapFactory, index));

        ImmutableBitmap timeFilteredBitmap;
        if (!interval.contains(segment.getDataInterval())) {
            MutableBitmap timeBitmap = bitmapFactory.makeEmptyMutableBitmap();
            final Column timeColumn = index.getColumn(Column.TIME_COLUMN_NAME);
            try (final GenericColumn timeValues = timeColumn.getGenericColumn()) {

                int startIndex = Math.max(0, getStartIndexOfTime(timeValues, interval.getStartMillis(), true));
                int endIndex = Math.min(timeValues.length() - 1,
                        getStartIndexOfTime(timeValues, interval.getEndMillis(), false));

                for (int i = startIndex; i <= endIndex; i++) {
                    timeBitmap.add(i);
                }

                final ImmutableBitmap finalTimeBitmap = bitmapFactory.makeImmutableBitmap(timeBitmap);
                timeFilteredBitmap = (baseFilter == null) ? finalTimeBitmap
                        : finalTimeBitmap.intersection(baseFilter);
            }
        } else {
            timeFilteredBitmap = baseFilter;
        }

        for (DimensionSpec dimension : dimsToSearch) {
            final Column column = index.getColumn(dimension.getDimension());
            if (column == null) {
                continue;
            }

            final BitmapIndex bitmapIndex = column.getBitmapIndex();
            ExtractionFn extractionFn = dimension.getExtractionFn();
            if (extractionFn == null) {
                extractionFn = IdentityExtractionFn.getInstance();
            }
            if (bitmapIndex != null) {
                for (int i = 0; i < bitmapIndex.getCardinality(); ++i) {
                    String dimVal = Strings.nullToEmpty(extractionFn.apply(bitmapIndex.getValue(i)));
                    if (!searchQuerySpec.accept(dimVal)) {
                        continue;
                    }
                    ImmutableBitmap bitmap = bitmapIndex.getBitmap(i);
                    if (timeFilteredBitmap != null) {
                        bitmap = bitmapFactory.intersection(Arrays.asList(timeFilteredBitmap, bitmap));
                    }
                    if (bitmap.size() > 0) {
                        MutableInt counter = new MutableInt(bitmap.size());
                        MutableInt prev = retVal.put(new SearchHit(dimension.getOutputName(), dimVal), counter);
                        if (prev != null) {
                            counter.add(prev.intValue());
                        }
                        if (retVal.size() >= limit) {
                            return makeReturnResult(limit, retVal);
                        }
                    }
                }
            }
        }

        return makeReturnResult(limit, retVal);
    }

    final StorageAdapter adapter = segment.asStorageAdapter();

    if (adapter == null) {
        log.makeAlert("WTF!? Unable to process search query on segment.")
                .addData("segment", segment.getIdentifier()).addData("query", query).emit();
        throw new ISE(
                "Null storage adapter found. Probably trying to issue a query against a segment being memory unmapped.");
    }

    final Iterable<DimensionSpec> dimsToSearch;
    if (dimensions == null || dimensions.isEmpty()) {
        dimsToSearch = Iterables.transform(adapter.getAvailableDimensions(), Druids.DIMENSION_IDENTITY);
    } else {
        dimsToSearch = dimensions;
    }

    final Sequence<Cursor> cursors = adapter.makeCursors(filter, interval, query.getGranularity(), descending);

    final TreeMap<SearchHit, MutableInt> retVal = cursors.accumulate(
            Maps.<SearchHit, SearchHit, MutableInt>newTreeMap(query.getSort().getComparator()),
            new Accumulator<TreeMap<SearchHit, MutableInt>, Cursor>() {
                @Override
                public TreeMap<SearchHit, MutableInt> accumulate(TreeMap<SearchHit, MutableInt> set,
                        Cursor cursor) {
                    if (set.size() >= limit) {
                        return set;
                    }

                    Map<String, DimensionSelector> dimSelectors = Maps.newHashMap();
                    for (DimensionSpec dim : dimsToSearch) {
                        dimSelectors.put(dim.getOutputName(), cursor.makeDimensionSelector(dim));
                    }

                    while (!cursor.isDone()) {
                        for (Map.Entry<String, DimensionSelector> entry : dimSelectors.entrySet()) {
                            final DimensionSelector selector = entry.getValue();

                            if (selector != null) {
                                final IndexedInts vals = selector.getRow();
                                for (int i = 0; i < vals.size(); ++i) {
                                    final String dimVal = selector.lookupName(vals.get(i));
                                    if (searchQuerySpec.accept(dimVal)) {
                                        MutableInt counter = new MutableInt(1);
                                        MutableInt prev = set.put(new SearchHit(entry.getKey(), dimVal),
                                                counter);
                                        if (prev != null) {
                                            counter.add(prev.intValue());
                                        }
                                        if (set.size() >= limit) {
                                            return set;
                                        }
                                    }
                                }
                            }
                        }

                        cursor.advance();
                    }

                    return set;
                }
            });

    return makeReturnResult(limit, retVal);
}

From source file:ubic.gemma.persistence.service.association.coexpression.CoexpressionDaoImpl.java

private void saveExperimentLevelLinks(Session sess, LinkCreator c,
        TreeMap<Long, NonPersistentNonOrderedCoexpLink> links, BioAssaySet bioAssaySet) {
    int progress = 0;
    int BATCH_SIZE = 1024;
    List<ExperimentCoexpressionLink> flippedLinks = new ArrayList<>();
    for (Long linkid : links.keySet()) {
        NonPersistentNonOrderedCoexpLink link = links.get(linkid);
        ExperimentCoexpressionLink ecl = c.createEELink(bioAssaySet, linkid, link.getFirstGene(),
                link.getSecondGene());/*from ww w  .  j  a v a  2 s.c  o m*/

        /*
         * At same time, create flipped versions, but save them later for ordering. Notice that we use the SAME link
         * ID - not the one for the flipped version in the gene2gene table.
         *
         * Ideally we would ensure that the gene2gene link ID used is the same for all links that are between
         * the same pair of genes. That would let us be able to easily count the support directly from an
         * experiment-level query, without going to the supportDetails. I do not believe the current code guarantees
         * this.
         */
        flippedLinks.add(c.createEELink(bioAssaySet, linkid, link.getSecondGene(), link.getFirstGene()));

        sess.save(ecl);

        if (++progress % 50000 == 0) {
            CoexpressionDaoImpl.log
                    .info("Created " + progress + "/" + links.size() + " experiment-level links...");
        }

        if (progress % BATCH_SIZE == 0) {
            sess.flush();
            sess.clear();
        }
    }

    sess.flush();
    sess.clear();

    /*
     * Sort the flipped links by the first gene
     */
    Collections.sort(flippedLinks, new Comparator<ExperimentCoexpressionLink>() {
        @Override
        public int compare(ExperimentCoexpressionLink o1, ExperimentCoexpressionLink o2) {
            return o1.getFirstGene().compareTo(o2.getFirstGene());
        }
    });

    /*
     * Save the flipped ones.
     */
    progress = 0;
    for (ExperimentCoexpressionLink fl : flippedLinks) {
        sess.save(fl);

        if (++progress % 50000 == 0) {
            CoexpressionDaoImpl.log
                    .info("Created " + progress + "/" + links.size() + " flipped experiment-level links...");
        }

        if (progress % BATCH_SIZE == 0) {
            sess.flush();
            sess.clear();
        }
    }

    // one for the road.
    sess.flush();
    sess.clear();
}

From source file:org.jactr.eclipse.ui.editor.assist.ACTRContentAssistProposer.java

public ICompletionProposal[] computeCompletionProposals(ITextViewer viewer, int offset) {
    try {//from  w  ww.j a  v a2 s.  c  o m
        IRegion region = getPrefixRegion(viewer, offset);
        String prefix = getPrefix(viewer, region).toLowerCase();

        ASTPosition position = getContextualPosition(viewer, offset);

        if (LOGGER.isDebugEnabled())
            LOGGER.debug(String.format("computing proposals for %d in (%d-%d), prefixed:%s, ASTPosition:%d",
                    offset, region.getOffset(), region.getOffset() + region.getLength(), prefix,
                    position != null ? position.getNode().getType() : -1));

        Map<String, CommonTree> recommendations = getRecommendationsUsingPositions(position, viewer, offset,
                prefix);

        if (LOGGER.isDebugEnabled())
            LOGGER.debug(String.format("Yielded %d recommendations", recommendations.size()));

        if (recommendations.size() == 0)
            return null;

        if (LOGGER.isDebugEnabled())
            LOGGER.debug("Proposing " + recommendations);

        Point selection = viewer.getSelectedRange();

        TreeMap<CommonTree, ICompletionProposal> proposals = new TreeMap<CommonTree, ICompletionProposal>(
                new Comparator<CommonTree>() {

                    public int compare(CommonTree o1, CommonTree o2) {
                        if (o1 == o2)
                            return 0;
                        try {
                            int compare = ASTSupport.getName(o1).compareToIgnoreCase(ASTSupport.getName(o2));
                            if (compare != 0)
                                return compare;
                        } catch (Exception e) {
                            // its a variable (no name node)
                        }
                        // compare types..
                        int o1Type = o1.getType();
                        int o2Type = o2.getType();
                        if (o1Type < o2Type)
                            return -1;
                        if (o1Type > o2Type)
                            return 1;
                        return o1.hashCode() < o2.hashCode() ? -1 : 1;
                    }

                });
        // TreeMap<String, ICompletionProposal> proposals = new TreeMap<String,
        // ICompletionProposal>();

        for (Map.Entry<String, CommonTree> entry : recommendations.entrySet()) {
            int start = Math.min(region.getOffset(), selection.x);
            int length = region.getLength() + selection.y;
            CommonTree node = entry.getValue();
            String textToInsert = entry.getKey();
            if (LOGGER.isDebugEnabled())
                LOGGER.debug("Initial proposal (" + prefix + ") : " + textToInsert + " at " + start
                        + " replacing " + length);

            if (node != null)
                try {
                    textToInsert = ASTSupport.getName(entry.getValue());
                    if (prefix.length() > 0 && textToInsert.startsWith(prefix)) {
                        if (LOGGER.isDebugEnabled())
                            LOGGER.debug("shifting start and length " + region.getLength());
                        textToInsert = textToInsert.substring(region.getLength());
                        start += region.getLength();
                        length -= region.getLength();
                    }
                } catch (Exception e) {
                }

            if (textToInsert.length() == 0)
                continue;

            if (textToInsert.equals(prefix))
                continue;

            if (LOGGER.isDebugEnabled())
                LOGGER.debug("Proposing : " + textToInsert + " at " + start + " replacing " + length);

            /*
             * note: the displayString is not being used, but rather the
             * textToInsert. this is because for some reason, the display string is
             * being used for the intermediary completion (say, when there are
             * multiple completions that might work). don't know why..
             */
            ACTRCompletionProposal proposal = new ACTRCompletionProposal(textToInsert, start, length,
                    textToInsert.length(), ACTRLabelProvider.getImageOfAST(node), textToInsert, null, null,
                    true);
            proposals.put(node, proposal);
        }

        return proposals.values().toArray(new ICompletionProposal[proposals.size()]);
    } catch (Exception e) {
        if (LOGGER.isDebugEnabled())
            LOGGER.debug("something went wrong ", e);
    }

    return null;
}

From source file:com.act.lcms.db.analysis.WaveformAnalysis.java

/**
 * This function takes in a standard molecules's intensity vs time data and a collection of negative controls data
 * and plots the SNR value at each time period, assuming the time jitter effects are negligible (more info on this
 * is here: https://github.com/20n/act/issues/136). Based on the snr values, it rank orders the metlin ions of the
 * molecule./*from   w w  w  . j  a  v a  2s.com*/
 * @param ionToIntensityData A map of chemical to intensity/time data
 * @param standardChemical The chemical that is the standard of analysis
 * @return A sorted linked hash map of Metlin ion to (intensity, time) pairs from highest intensity to lowest
 */
public static LinkedHashMap<String, XZ> performSNRAnalysisAndReturnMetlinIonsRankOrderedBySNR(
        ChemicalToMapOfMetlinIonsToIntensityTimeValues ionToIntensityData, String standardChemical,
        Map<String, List<Double>> restrictedTimeWindows) {

    TreeMap<Double, List<String>> sortedIntensityToIon = new TreeMap<>(Collections.reverseOrder());
    Map<String, XZ> ionToSNR = new HashMap<>();

    for (String ion : ionToIntensityData.getMetlinIonsOfChemical(standardChemical).keySet()) {

        // We first compress the ion spectra by 5 seconds (this number was gotten from trial and error on labelled
        // spectra). Then, we do feature detection of peaks in the compressed data.
        List<XZ> standardIntensityTime = detectPeaksInIntensityTimeWaveform(
                compressIntensityAndTimeGraphsAndFindMaxIntensityInEveryTimeWindow(
                        ionToIntensityData.getMetlinIonsOfChemical(standardChemical).get(ion),
                        COMPRESSION_CONSTANT).getLeft(),
                PEAK_DETECTION_THRESHOLD);

        List<List<XZ>> negativeIntensityTimes = new ArrayList<>();
        for (String chemical : ionToIntensityData.getIonList()) {
            if (!chemical.equals(standardChemical)) {
                negativeIntensityTimes.add(compressIntensityAndTimeGraphsAndFindMaxIntensityInEveryTimeWindow(
                        ionToIntensityData.getMetlinIonsOfChemical(chemical).get(ion), COMPRESSION_CONSTANT)
                                .getLeft());
            }
        }

        List<XZ> rmsOfNegativeValues = rmsOfIntensityTimeGraphs(negativeIntensityTimes);

        List<Double> listOfTimeWindows = new ArrayList<>();
        if (restrictedTimeWindows != null && restrictedTimeWindows.get(ion) != null) {
            listOfTimeWindows.addAll(restrictedTimeWindows.get(ion));
        }

        Boolean canUpdateMaxSNRAndTime = true;
        Boolean useRestrictedTimeWindowAnalysis = false;

        // If there are restricted time windows, set the default to not update SNR until certain conditions are met.
        if (listOfTimeWindows.size() > 0) {
            useRestrictedTimeWindowAnalysis = true;
            canUpdateMaxSNRAndTime = false;
        }

        Double maxSNR = 0.0;
        Double maxTime = 0.0;

        // For each of the peaks detected in the positive control, find the spectral intensity values from the negative
        // controls and calculate SNR based on that.
        for (XZ positivePosition : standardIntensityTime) {

            Double time = positivePosition.getTime();

            XZ negativeControlPosition = null;
            for (XZ position : rmsOfNegativeValues) {
                if (position.getTime() > time - POSITION_TIME_WINDOW_IN_SECONDS
                        && position.getTime() < time + POSITION_TIME_WINDOW_IN_SECONDS) {
                    negativeControlPosition = position;
                    break;
                }
            }

            Double snr = Math.pow(positivePosition.getIntensity() / negativeControlPosition.getIntensity(), 2);

            // If the given time point overlaps with one of the restricted time windows, we can update the snr calculations.
            for (Double restrictedTimeWindow : listOfTimeWindows) {
                if ((time > restrictedTimeWindow - RESTRICTED_RETENTION_TIME_WINDOW_IN_SECONDS)
                        && (time < restrictedTimeWindow + RESTRICTED_RETENTION_TIME_WINDOW_IN_SECONDS)) {
                    canUpdateMaxSNRAndTime = true;
                    break;
                }
            }

            if (canUpdateMaxSNRAndTime) {
                maxSNR = Math.max(maxSNR, snr);
                maxTime = Math.max(maxTime, time);
            }

            if (useRestrictedTimeWindowAnalysis) {
                canUpdateMaxSNRAndTime = false;
            }
        }

        ionToSNR.put(ion, new XZ(maxTime, maxSNR));

        List<String> ionValues = sortedIntensityToIon.get(maxSNR);
        if (ionValues == null) {
            ionValues = new ArrayList<>();
            sortedIntensityToIon.put(maxSNR, ionValues);
        }

        ionValues.add(ion);
    }

    LinkedHashMap<String, XZ> result = new LinkedHashMap<>(sortedIntensityToIon.size());
    for (Map.Entry<Double, List<String>> entry : sortedIntensityToIon.entrySet()) {
        List<String> ions = entry.getValue();
        for (String ion : ions) {
            result.put(ion, ionToSNR.get(ion));
        }
    }

    return result;
}