Example usage for java.io Serializable toString

List of usage examples for java.io Serializable toString

Introduction

In this page you can find the example usage for java.io Serializable toString.

Prototype

public String toString() 

Source Link

Document

Returns a string representation of the object.

Usage

From source file:com.ephesoft.dcma.batch.service.BatchSchemaServiceImpl.java

/**
 * An API to get the Batch object for an input of batchInstanceIdentifier.
 * /*from w w w  . j  a v a 2 s  .c o m*/
 * @param batchInstanceIdentifier Serializable
 * @return Batch
 */
@Override
public Batch getBatch(final Serializable batchInstanceIdentifier) {
    Batch batch = null;
    if (null == batchInstanceIdentifier) {
        LOGGER.info(BatchConstants.BATCH_INSTANCE_ID_NULL);
    } else {
        batch = this.batchSchemaDao.get(batchInstanceIdentifier,
                batchInstanceService.getSystemFolderForBatchInstanceId(batchInstanceIdentifier.toString()));
    }
    return batch;
}

From source file:com.ephesoft.dcma.batch.service.BatchSchemaServiceImpl.java

/**
 * API to update the batch object.//from w  w  w . ja v  a  2s.c  o m
 * 
 * @param hocrPages {@link HocrPages}
 * @param batchInstanceIdentifier Serializable
 * @param pageId {@link String}
 */
public void update(final HocrPages hocrPages, final Serializable batchInstanceIdentifier, final String pageId) {
    if (null == batchInstanceIdentifier) {
        LOGGER.info(BatchConstants.BATCH_INSTANCE_ID_NULL);
    } else {
        this.hocrSchemaDao.update(hocrPages, batchInstanceIdentifier, HOCR_FILE_NAME, pageId, false,
                batchInstanceService.getSystemFolderForBatchInstanceId(batchInstanceIdentifier.toString()));
    }
}

From source file:com.ephesoft.dcma.batch.service.BatchSchemaServiceImpl.java

/**
 * An API to get the HocrPages object for an input of HOCR.XML.
 * //from www .  ja v  a2 s  .  c om
 * @param batchInstanceIdentifier Serializable
 * @param pageId {@link String}
 * @return {@link HocrPages}
 */
@Override
public HocrPages getHocrPages(final Serializable batchInstanceIdentifier, final String pageId) {
    HocrPages hocrPages = null;
    if (null == batchInstanceIdentifier) {
        LOGGER.info(BatchConstants.BATCH_INSTANCE_ID_NULL);
    } else {
        hocrPages = this.hocrSchemaDao.get(batchInstanceIdentifier, pageId, HOCR_FILE_NAME,
                batchInstanceService.getSystemFolderForBatchInstanceId(batchInstanceIdentifier.toString()));
    }
    return hocrPages;
}

From source file:org.alfresco.repo.jscript.ScriptNode.java

@SuppressWarnings("unchecked")
/**//w ww  .  ja va2s. c  o  m
 * @param files                Return files extending from cm:content
 * @param folders              Return folders extending from cm:folder - ignoring sub-types of cm:systemfolder
 * @param ignoreTypes          Also optionally removes additional type qnames. The additional type can be
 *                             specified in short or long qname string form as a single string or an Array e.g. "fm:forum".
 * @param skipOffset           Items to skip (e.g. 0 or (num pages to skip * size of page)
 * @param maxItems             Max number of items (eg. size of page)
 * @param requestTotalCountMax Request total count (upto a given max total count)
 *                             Note: if 0 then total count is not requested and the query may be able to optimise/cutoff for max items)
 * @param sortProp             Optional sort property as a prefix qname string (e.g. "cm:name"). Also supports special 
 *                             content case (i.e. "cm:content.size" and "cm:content.mimetype")
 * @param sortAsc              Given a sort property, true => ascending, false => descending
 * @param queryExecutionId     If paging then can pass back the previous query execution (as a hint for possible query optimisation)
 *                             
 * @return Returns ScriptPagingNodes which includes a JavaScript array of child file/folder nodes for this nodes.
 *         Automatically retrieves all sub-types of cm:content and cm:folder, also removes
 *         system folder types from the results.
 *         This is equivalent to @see FileFolderService.listFiles() and @see FileFolderService.listFolders()
 *         
 * <br/><br/>author janv
 * @since 4.0
 */
public ScriptPagingNodes childFileFolders(boolean files, boolean folders, Object ignoreTypes, int skipOffset,
        int maxItems, int requestTotalCountMax, String sortProp, Boolean sortAsc, String queryExecutionId) {
    Object[] results;

    Set<QName> ignoreTypeQNames = new HashSet<QName>(5);

    // Add user defined types to ignore
    if (ignoreTypes instanceof ScriptableObject) {
        Serializable types = getValueConverter().convertValueForRepo((ScriptableObject) ignoreTypes);
        if (types instanceof List) {
            for (Serializable typeObj : (List<Serializable>) types) {
                ignoreTypeQNames.add(createQName(typeObj.toString()));
            }
        } else if (types instanceof String) {
            ignoreTypeQNames.add(createQName(types.toString()));
        }
    } else if (ignoreTypes instanceof String) {
        ignoreTypeQNames.add(createQName(ignoreTypes.toString()));
    }

    // ALF-13968 - sort folders before files (for Share) - TODO should be optional sort param
    List<Pair<QName, Boolean>> sortProps = new ArrayList<Pair<QName, Boolean>>(2);
    if ((sortProp == null) || (!sortProp.equals(GetChildrenCannedQuery.SORT_QNAME_NODE_TYPE.getLocalName()))) {
        sortProps.add(new Pair<QName, Boolean>(GetChildrenCannedQuery.SORT_QNAME_NODE_IS_FOLDER, false));
    }
    if (sortProp != null) {
        sortProps.add(new Pair<QName, Boolean>(createQName(sortProp), sortAsc));
    }

    PagingRequest pageRequest = new PagingRequest(skipOffset, maxItems, queryExecutionId);
    pageRequest.setRequestTotalCountMax(requestTotalCountMax);

    PagingResults<FileInfo> pageOfNodeInfos = null;
    FileFilterMode.setClient(Client.script);
    try {
        pageOfNodeInfos = this.fileFolderService.list(this.nodeRef, files, folders, null, ignoreTypeQNames,
                sortProps, pageRequest);
    } finally {
        FileFilterMode.clearClient();
    }

    List<FileInfo> nodeInfos = pageOfNodeInfos.getPage();
    int size = nodeInfos.size();
    results = new Object[size];
    for (int i = 0; i < size; i++) {
        FileInfo nodeInfo = nodeInfos.get(i);
        results[i] = newInstance(nodeInfo, this.services, this.scope);
    }

    int totalResultCountLower = -1;
    int totalResultCountUpper = -1;

    Pair<Integer, Integer> totalResultCount = pageOfNodeInfos.getTotalResultCount();
    if (totalResultCount != null) {
        totalResultCountLower = (totalResultCount.getFirst() != null ? totalResultCount.getFirst() : -1);
        totalResultCountUpper = (totalResultCount.getSecond() != null ? totalResultCount.getSecond() : -1);
    }

    return new ScriptPagingNodes(Context.getCurrentContext().newArray(this.scope, results),
            pageOfNodeInfos.hasMoreItems(), totalResultCountLower, totalResultCountUpper);
}

From source file:org.pentaho.platform.repository2.unified.jcr.JcrRepositoryFileDao.java

private RepositoryFile internalCreateFolder(final Session session, final Serializable parentFolderId,
        final RepositoryFile folder, final RepositoryFileAcl acl, final String versionMessage)
        throws RepositoryException {
    if (isKioskEnabled()) {
        throw new RuntimeException(
                Messages.getInstance().getString("JcrRepositoryFileDao.ERROR_0006_ACCESS_DENIED")); //$NON-NLS-1$
    }/*from  w ww  .  j av  a2 s  . c om*/

    // Get repository file info and acl info of parent
    if (parentFolderId != null) {
        RepositoryFile parentRepositoryFile = getFileById(parentFolderId);
        if (parentRepositoryFile != null) {
            RepositoryFileAcl parentAcl = aclDao.getAcl(parentRepositoryFile.getId());
            // Invoke accessVoterManager to see if we have access to perform this operation
            if (!accessVoterManager.hasAccess(parentRepositoryFile, RepositoryFilePermission.WRITE, parentAcl,
                    PentahoSessionHolder.getSession())) {
                return null;
            }
        }
    }
    PentahoJcrConstants pentahoJcrConstants = new PentahoJcrConstants(session);
    JcrRepositoryFileUtils.checkoutNearestVersionableFileIfNecessary(session, pentahoJcrConstants,
            parentFolderId);
    Node folderNode = JcrRepositoryFileUtils.createFolderNode(session, pentahoJcrConstants, parentFolderId,
            folder);
    // create a temporary folder object with correct path for default acl purposes.
    String path = JcrRepositoryFileUtils.getAbsolutePath(session, pentahoJcrConstants, folderNode);
    RepositoryFile tmpFolder = new RepositoryFile.Builder(folder).path(path).build();
    // we must create the acl during checkout
    JcrRepositoryFileAclUtils.createAcl(session, pentahoJcrConstants, folderNode.getIdentifier(),
            acl == null ? defaultAclHandler.createDefaultAcl(tmpFolder) : acl);
    session.save();
    if (folder.isVersioned()) {
        JcrRepositoryFileUtils.checkinNearestVersionableNodeIfNecessary(session, pentahoJcrConstants,
                folderNode, versionMessage);
    }
    JcrRepositoryFileUtils.checkinNearestVersionableFileIfNecessary(session, pentahoJcrConstants,
            parentFolderId,
            Messages.getInstance().getString("JcrRepositoryFileDao.USER_0001_VER_COMMENT_ADD_FOLDER", //$NON-NLS-1$
                    folder.getName(), (parentFolderId == null ? "root" : parentFolderId.toString()))); //$NON-NLS-1$
    return JcrRepositoryFileUtils.nodeToFile(session, pentahoJcrConstants, pathConversionHelper, lockHelper,
            folderNode);
}

From source file:com.ephesoft.dcma.batch.service.BatchSchemaServiceImpl.java

/**
 * An API to store all files to base folder location.
 * /*from ww w. jav a  2s.c om*/
 * @param identifier Serializable
 * @param files File[]
 */
public void storeFiles(final Serializable identifier, final File[] files) {

    String errMsg = null;

    if (null == identifier || null == files) {
        errMsg = "Input parameters id/files are null.";
        LOGGER.error(errMsg);
        throw new DCMABusinessException(errMsg);
    }

    boolean preserveFileDate = false;
    String newPath = null;
    final String localFolderLocation = batchInstanceService
            .getSystemFolderForBatchInstanceId(identifier.toString());

    for (final File srcFile : files) {

        final String path = srcFile.getPath();

        newPath = localFolderLocation + File.separator + identifier + BatchConstants.UNDER_SCORE + path;
        // The target file name to which the source file will be copied.
        final File destFile = new File(newPath);

        try {
            FileUtils.copyFile(srcFile, destFile, preserveFileDate);
            errMsg = "Successfully copy of the file for the batch Instance Id : " + identifier;
            LOGGER.info(errMsg);
        } catch (final IOException e) {
            errMsg = "Unable to copy the file for the batch Instance Id : " + identifier;
            LOGGER.error(errMsg);
            LOGGER.error(e.getMessage());
        }
    }
}

From source file:com.bluexml.xforms.controller.alfresco.AlfrescoController.java

/**
 * Sets initial values for workflow fields from BlueXML properties (stored
 * in the repository) of/*from   w  w  w .j a  v a 2  s  .co  m*/
 * the workflow instance.
 * 
 * @param wkFormName
 *            the name of the workflow form to display
 * @param doc
 *            the XForms instance
 * @param instanceId
 *            the workflow instance Id (previously provided by Alfresco)
 * @return false if a lethal exception occurred. True if the normal end of
 *         the function was
 *         reached, which does not imply anything about the setting of
 *         initial values.
 */
public boolean workflowPatchInstance(AlfrescoTransaction transaction, String wkFormName, Document doc,
        String instanceId) {
    if (logger.isDebugEnabled()) {
        logger.debug("Patching workflow instance with Id:'" + instanceId + "', form name: " + wkFormName);
    }
    QName qname;
    String namespaceURI = null; // to be set once
    Map<QName, Serializable> properties = null; // to be set once

    if (StringUtils.trimToNull(instanceId) == null) {
        if (logger.isDebugEnabled()) {
            logger.debug("  No patching performed: the instanceId is null");
        }
        return true;
    }
    if (instanceId.equals("null")) {
        if (logger.isDebugEnabled()) {
            logger.debug("  No patching performed, invalid instanceId with string 'null'");
        }
        return true;
    }
    Element root = doc.getDocumentElement();
    Element formElt = DOMUtil.getChild(root, wkFormName);
    List<Element> allFields = DOMUtil.getAllChildren(formElt);

    // we need to fail silently so that the form is displayed even in the event of errors
    for (Element field : allFields) {
        String fieldUniqueName = field.getTagName();
        Serializable fieldValue = null;
        String localName = getWorkflowFieldAlfrescoName(wkFormName, fieldUniqueName);
        if (localName != null) {
            // build the QName
            if (namespaceURI == null) {
                String processName = workflowExtractProcessNameFromFormName(wkFormName);
                namespaceURI = getWorkflowNamespaceURI(processName);
            }
            qname = QName.createQName(namespaceURI, localName);

            // read the QName value from the collected properties of the workflow instance
            if (properties == null) {
                properties = workflowCollectInstanceProperties(transaction, instanceId);
                if (properties == null) {
                    return false; // there's no point in continuing without the properties
                }
            }
            try {
                // set the initial value
                fieldValue = properties.get(qname);
                if (fieldValue != null) {
                    field.setTextContent(fieldValue.toString());
                }
            } catch (NullPointerException e) {
                // we'll get this when displaying workflow forms while the webscript is down
                return false;
            }
        }
    }
    return true;
}

From source file:org.nuxeo.ecm.core.TestSQLRepositoryAPI.java

@Test
public void testProxySchemas() throws Exception {
    DocumentModel folder = new DocumentModelImpl("/", "folder", "Folder");
    folder = session.createDocument(folder);
    DocumentModel doc = new DocumentModelImpl("/", "file", "File");
    doc = session.createDocument(doc);/*w  w w .j a va 2  s .  co  m*/
    DocumentModel proxy = session.publishDocument(doc, folder);
    session.save();
    try {
        doc.setPropertyValue("info:info", "docinfo");
        doc = session.saveDocument(doc);
        session.save();
    } catch (PropertyNotFoundException e) {
        assertTrue(e.getMessage().contains("info:info"));
    }

    assertNull(proxy.getPropertyValue("info:info"));
    proxy.setPropertyValue("info:info", "proxyinfo");
    proxy = session.saveDocument(proxy);
    session.save();

    // new session
    reopenSession();
    DocumentModel root = session.getRootDocument();
    proxy = session.getDocument(proxy.getRef());
    assertEquals("proxyinfo", proxy.getPropertyValue("info:info"));

    // test a query
    String nxql;
    DocumentModelList list;
    nxql = "SELECT * FROM Document WHERE info:info = 'proxyinfo' AND ecm:isProxy = 1";
    list = session.query(nxql);
    assertEquals(1, list.size());
    nxql = "SELECT * FROM Document WHERE info:info = 'proxyinfo'";
    list = session.query(nxql);
    assertEquals(1, list.size());
    nxql = "SELECT * FROM Document WHERE info:info = 'proxyinfo' AND ecm:isProxy = 0";
    list = session.query(nxql);
    assertEquals(0, list.size());

    // queryAndFetch
    nxql = "SELECT ecm:uuid, info:info FROM File WHERE info:info IS NOT NULL";
    IterableQueryResult res = session.queryAndFetch(nxql, "NXQL");
    Map<Serializable, String> actual = new HashMap<>();
    for (Map<String, Serializable> map : res) {
        Serializable uuid = map.get("ecm:uuid");
        String info = (String) map.get("info:info");
        actual.put(uuid.toString(), info); // toString() for sequence ids
    }
    res.close();
    assertEquals(Collections.singletonMap(proxy.getId(), "proxyinfo"), actual);

    // test that the copy has the extra schema values
    session.copy(folder.getRef(), root.getRef(), "folderCopy");
    DocumentModel proxyCopy = session.getDocument(new PathRef("/folderCopy/file"));
    assertTrue(proxyCopy.isProxy());
    assertEquals("proxyinfo", proxyCopy.getPropertyValue("info:info"));
}

From source file:com.linkedin.pinot.query.plan.PlanMakerTest.java

@Test
public void testInnerSegmentPlanMakerForAggregationGroupByNoFilter() {
    BrokerRequest brokerRequest = getAggregationGroupByNoFilterBrokerRequest();
    PlanMaker instancePlanMaker = new InstancePlanMakerImplV2();
    PlanNode rootPlanNode = instancePlanMaker.makeInnerSegmentPlan(_indexSegment, brokerRequest);
    rootPlanNode.showTree("");
    IntermediateResultsBlock resultBlock = (IntermediateResultsBlock) rootPlanNode.run().nextBlock();
    LOGGER.debug("RunningTime: {}", resultBlock.getTimeUsedMs());
    LOGGER.debug("NumDocsScanned: {}", resultBlock.getNumDocsScanned());
    LOGGER.debug("TotalDocs: {}", resultBlock.getTotalRawDocs());
    List<Map<String, Serializable>> combinedGroupByResult = resultBlock.getAggregationGroupByOperatorResult();

    Map<String, Serializable> singleGroupByResult = combinedGroupByResult.get(COUNT_AGGREGATION_INDEX);
    for (String keyString : singleGroupByResult.keySet()) {
        if (keyString.equals("0")) {
            Serializable resultList = singleGroupByResult.get(keyString);
            LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
            assertEquals(200001, ((Number) resultList).longValue());
        } else {//from w  w w.  ja  v a2  s. c o m
            Serializable resultList = singleGroupByResult.get(keyString);
            LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
            assertEquals(200000, ((Number) resultList).longValue());
        }
    }

    singleGroupByResult = combinedGroupByResult.get(SUM_AGGREGATION_INDEX);
    for (String keyString : singleGroupByResult.keySet()) {
        if (keyString.equals("0")) {
            Serializable resultList = singleGroupByResult.get(keyString);
            LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
            double expectedSumValue = ((Double.parseDouble(keyString) + 2000000 + Double.parseDouble(keyString))
                    * 200001) / 2;
            assertEquals(expectedSumValue, ((Double) resultList).doubleValue());
        } else {
            Serializable resultList = singleGroupByResult.get(keyString);
            LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
            double expectedSumValue = (((Double.parseDouble(keyString) + 2000000) - 10)
                    + Double.parseDouble(keyString)) * 100000;
            assertEquals(expectedSumValue, ((Double) resultList).doubleValue());
        }
    }

    singleGroupByResult = combinedGroupByResult.get(MAX_AGGREGATION_INDEX);
    for (String keyString : singleGroupByResult.keySet()) {
        if (keyString.equals("0")) {
            Serializable resultList = singleGroupByResult.get(keyString);
            LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
            assertEquals(2000000 + Double.parseDouble(keyString), ((Double) resultList).doubleValue());
        } else {
            Serializable resultList = singleGroupByResult.get(keyString);
            LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
            assertEquals((2000000 - 10) + Double.parseDouble(keyString), ((Double) resultList).doubleValue());
        }
    }

    singleGroupByResult = combinedGroupByResult.get(MIN_AGGREGATION_INDEX);
    for (String keyString : singleGroupByResult.keySet()) {
        if (keyString.equals("0")) {
            Serializable resultList = singleGroupByResult.get(keyString);
            LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
            assertEquals(Double.parseDouble(keyString), ((Double) resultList).doubleValue());
        } else {
            Serializable resultList = singleGroupByResult.get(keyString);
            LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
            assertEquals(Double.parseDouble(keyString), ((Double) resultList).doubleValue());
        }
    }

    singleGroupByResult = combinedGroupByResult.get(AVG_AGGREGATION_INDEX);
    for (String keyString : singleGroupByResult.keySet()) {
        if (keyString.equals("0")) {
            Serializable resultList = singleGroupByResult.get(keyString);

            LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
            double expectedAvgValue = ((Double.parseDouble(keyString) + 2000000 + Double.parseDouble(keyString))
                    * 200001) / 2 / 200001;
            assertEquals(expectedAvgValue, Double.parseDouble((resultList.toString())));
        } else {
            Serializable resultList = singleGroupByResult.get(keyString);
            LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
            double expectedAvgValue = ((((Double.parseDouble(keyString) + 2000000) - 10)
                    + Double.parseDouble(keyString)) * 100000) / 200000;
            assertEquals(expectedAvgValue, Double.parseDouble((resultList.toString())));
        }
    }

    singleGroupByResult = combinedGroupByResult.get(DISTINCT_DIM0_AGGREGATION_INDEX);
    for (String keyString : singleGroupByResult.keySet()) {
        Serializable resultList = singleGroupByResult.get(keyString);
        LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
        assertEquals(1, ((IntOpenHashSet) resultList).size());
    }

    singleGroupByResult = combinedGroupByResult.get(DISTINCT_DIM1_AGGREGATION_INDEX);
    for (String keyString : singleGroupByResult.keySet()) {
        Serializable resultList = singleGroupByResult.get(keyString);
        LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
        assertEquals(10, ((IntOpenHashSet) resultList).size());
    }
}

From source file:com.linkedin.pinot.query.plan.PlanMakerTest.java

@Test
public void testInterSegmentAggregationGroupByPlanMakerAndRun() {
    PlanMaker instancePlanMaker = new InstancePlanMakerImplV2();
    BrokerRequest brokerRequest = getAggregationGroupByNoFilterBrokerRequest();
    ExecutorService executorService = Executors.newCachedThreadPool(new NamedThreadFactory("test-plan-maker"));
    Plan globalPlan = instancePlanMaker.makeInterSegmentPlan(makeSegMgrList(_indexSegmentList), brokerRequest,
            executorService, 150000);/*  ww  w.j  ava 2s  . c o m*/
    globalPlan.print();
    globalPlan.execute();
    DataTable instanceResponse = globalPlan.getInstanceResponse();

    LOGGER.debug(instanceResponse.toString());
    List<DataTable> instanceResponseList = new ArrayList<DataTable>();
    instanceResponseList.add(instanceResponse);

    List<Map<String, Serializable>> combinedGroupByResult = AggregationGroupByOperatorService
            .transformDataTableToGroupByResult(instanceResponse);

    Map<String, Serializable> singleGroupByResult = combinedGroupByResult.get(COUNT_AGGREGATION_INDEX);
    for (String keyString : singleGroupByResult.keySet()) {
        if (keyString.equals("0")) {
            Serializable resultList = singleGroupByResult.get(keyString);
            LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
            assertEquals(400020, ((Number) resultList).longValue());
        } else {
            Serializable resultList = singleGroupByResult.get(keyString);
            LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
            assertEquals(400000, ((Number) resultList).longValue());
        }
    }

    singleGroupByResult = combinedGroupByResult.get(SUM_AGGREGATION_INDEX);
    for (String keyString : singleGroupByResult.keySet()) {
        if (keyString.equals("0")) {
            Serializable resultList = singleGroupByResult.get(keyString);
            LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
            double expectedSumValue = (((Double.parseDouble(keyString) + 200000 + Double.parseDouble(keyString))
                    * 20001) / 2) * 20;
            assertEquals(expectedSumValue, ((Double) resultList).doubleValue());
        } else {
            Serializable resultList = singleGroupByResult.get(keyString);
            LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
            double expectedSumValue = (((Double.parseDouble(keyString) + 200000) - 10)
                    + Double.parseDouble(keyString)) * 10000 * 20;

            assertEquals(expectedSumValue, ((Double) resultList).doubleValue());
        }
    }

    singleGroupByResult = combinedGroupByResult.get(MAX_AGGREGATION_INDEX);
    for (String keyString : singleGroupByResult.keySet()) {
        if (keyString.equals("0")) {
            Serializable resultList = singleGroupByResult.get(keyString);
            LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
            assertEquals(200000 + Double.parseDouble(keyString), ((Double) resultList).doubleValue());
        } else {
            Serializable resultList = singleGroupByResult.get(keyString);
            LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
            assertEquals((200000 - 10) + Double.parseDouble(keyString), ((Double) resultList).doubleValue());
        }
    }

    singleGroupByResult = combinedGroupByResult.get(MIN_AGGREGATION_INDEX);
    for (String keyString : singleGroupByResult.keySet()) {
        if (keyString.equals("0")) {
            Serializable resultList = singleGroupByResult.get(keyString);
            LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
            assertEquals(Double.parseDouble(keyString), ((Double) resultList).doubleValue());
        } else {
            Serializable resultList = singleGroupByResult.get(keyString);
            LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
            assertEquals(Double.parseDouble(keyString), ((Double) resultList).doubleValue());
        }
    }

    singleGroupByResult = combinedGroupByResult.get(AVG_AGGREGATION_INDEX);
    for (String keyString : singleGroupByResult.keySet()) {
        if (keyString.equals("0")) {
            Serializable resultList = singleGroupByResult.get(keyString);

            LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
            double expectedAvgValue = ((((Double.parseDouble(keyString) + 200000
                    + Double.parseDouble(keyString)) * 20001) / 2) * 20) / 400020;
            assertEquals(expectedAvgValue, Double.parseDouble((resultList.toString())));
        } else {
            Serializable resultList = singleGroupByResult.get(keyString);
            LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
            double expectedAvgValue = ((((Double.parseDouble(keyString) + 200000) - 10)
                    + Double.parseDouble(keyString)) * 10000 * 20) / 400000;
            assertEquals(expectedAvgValue, Double.parseDouble((resultList.toString())));
        }
    }

    singleGroupByResult = combinedGroupByResult.get(DISTINCT_DIM0_AGGREGATION_INDEX);
    for (String keyString : singleGroupByResult.keySet()) {
        Serializable resultList = singleGroupByResult.get(keyString);
        LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
        int expectedAvgValue = 1;
        assertEquals(expectedAvgValue, ((IntOpenHashSet) resultList).size());
    }

    singleGroupByResult = combinedGroupByResult.get(DISTINCT_DIM1_AGGREGATION_INDEX);
    for (String keyString : singleGroupByResult.keySet()) {
        Serializable resultList = singleGroupByResult.get(keyString);
        LOGGER.debug("grouped key: {}, value: {}", keyString, resultList);
        int expectedAvgValue = 10;
        assertEquals(expectedAvgValue, ((IntOpenHashSet) resultList).size());
    }

    BrokerReduceService reduceService = new BrokerReduceService();
    Map<ServerInstance, DataTable> instanceResponseMap = new HashMap<ServerInstance, DataTable>();
    instanceResponseMap.put(new ServerInstance("localhost:0000"), instanceResponse);
    BrokerResponseNative brokerResponse = reduceService.reduceOnDataTable(brokerRequest, instanceResponseMap);
    LOGGER.debug(new JSONArray(brokerResponse.getAggregationResults()).toString());
    LOGGER.debug("Time used: {}", brokerResponse.getTimeUsedMs());
}