Example usage for java.util Set clear

List of usage examples for java.util Set clear

Introduction

In this page you can find the example usage for java.util Set clear.

Prototype

void clear();

Source Link

Document

Removes all of the elements from this set (optional operation).

Usage

From source file:org.zenoss.zep.index.impl.EventIndexDaoImplIT.java

@Test
public void testExcludeUuids() throws ZepException {
    EventSummary event1 = createEventWithSeverity(EventSeverity.SEVERITY_ERROR, EventStatus.STATUS_NEW);
    EventSummary event2 = createEventWithSeverity(EventSeverity.SEVERITY_ERROR, EventStatus.STATUS_NEW);
    EventSummary event3 = createEventWithSeverity(EventSeverity.SEVERITY_ERROR, EventStatus.STATUS_NEW);

    Set<String> include = new HashSet<String>();
    Set<String> exclude = new HashSet<String>();

    // No filters should return all three events
    Set<String> foundUuids = getUuidsFromResult(this.eventIndexDao.list(createUuidRequest(include, exclude)));
    assertEquals(3, foundUuids.size());//from  www  .  ja v a2  s .co  m
    assertTrue(foundUuids.contains(event1.getUuid()));
    assertTrue(foundUuids.contains(event2.getUuid()));
    assertTrue(foundUuids.contains(event3.getUuid()));

    // Test filter excluding all events
    exclude.add(event1.getUuid());
    exclude.add(event2.getUuid());
    exclude.add(event3.getUuid());
    foundUuids = getUuidsFromResult(this.eventIndexDao.list(createUuidRequest(include, exclude)));
    assertEquals(0, foundUuids.size());

    // Test filter including 2 events
    include.clear();
    exclude.clear();
    include.add(event1.getUuid());
    include.add(event3.getUuid());
    foundUuids = getUuidsFromResult(this.eventIndexDao.list(createUuidRequest(include, exclude)));
    assertEquals(2, foundUuids.size());
    assertTrue(foundUuids.contains(event1.getUuid()));
    assertTrue(foundUuids.contains(event3.getUuid()));

    // Test filter including all events of SEVERITY_ERROR but excluding a UUID
    EventFilter filter = EventFilter.newBuilder().addSeverity(EventSeverity.SEVERITY_ERROR).build();
    EventFilter exclusion = EventFilter.newBuilder().addUuid(event1.getUuid()).build();
    EventSummaryRequest req = EventSummaryRequest.newBuilder().setEventFilter(filter)
            .setExclusionFilter(exclusion).build();
    foundUuids = getUuidsFromResult(this.eventIndexDao.list(req));
    assertEquals(2, foundUuids.size());
    assertTrue(foundUuids.contains(event2.getUuid()));
    assertTrue(foundUuids.contains(event3.getUuid()));
}

From source file:org.apache.geode.management.internal.beans.DistributedSystemBridge.java

private String[] listStandAloneLocatorMembers() {
    String[] locatorMembers = ManagementConstants.NO_DATA_STRING;

    Set<DistributedMember> members = new HashSet<>();
    members.add(system.getDistributedMember());
    members.addAll(system.getAllOtherMembers());

    if (!members.isEmpty()) {
        Set<String> locatorMemberSet = new TreeSet<>();
        for (DistributedMember member : members) {
            if (DistributionManager.LOCATOR_DM_TYPE == ((InternalDistributedMember) member).getVmKind()) {
                String name = member.getName();
                name = StringUtils.isNotBlank(name) ? name : member.getId();
                locatorMemberSet.add(name);
            }//from   w  w  w  . j a  va  2s . com
        }
        locatorMembers = locatorMemberSet.toArray(locatorMembers);
        members.clear();
        locatorMemberSet.clear();
    }

    return locatorMembers;
}

From source file:org.alfresco.repo.transfer.fsr.ManifestProcessorImpl.java

@Override
protected void endManifest() {
    String pathPrefix = fileTransferReceiver.getDefaultReceivingroot();

    if (isDebugEnabled) {
        log.debug("Initial pass through manifest is complete. Post-processing has started.");
    }// w w w  . ja  v a2  s  .  co  m

    //Process any existing files that need to be replaced with new versions
    //Copy the collection first so we can safely remove processed files as we go. This helps
    //us later if we ever need to clean up following an error.
    Collection<NodeContext> filesToReplace = new ArrayList<NodeContext>(existingFilesToReplace.values());
    for (NodeContext fileToReplace : filesToReplace) {
        if (switchFile(fileToReplace.nodeId, fileToReplace.newParentId, fileToReplace.tempName,
                fileToReplace.newContentUrl, pathPrefix)) {
            //Record this node in the list of temp files to be renamed
            tempFilesToRename.put(fileToReplace.nodeId, fileToReplace);
            existingFilesToReplace.remove(fileToReplace.nodeId);
        }
    }

    //Deal with any folders that need moving
    int folderCount;
    Set<String> processedFolders = new TreeSet<String>();
    while (!foldersToMove.isEmpty()) {
        folderCount = foldersToMove.size();
        if (isDebugEnabled) {
            log.debug("Folders that need to be moved: " + folderCount);
        }
        for (NodeContext folder : foldersToMove.values()) {
            FileTransferInfoEntity folderEntity = dbHelper.findFileTransferInfoByNodeRef(folder.nodeId);
            FileTransferInfoEntity parentEntity = dbHelper.findFileTransferInfoByNodeRef(folder.newParentId);
            if (moveFolder(folderEntity, parentEntity, folder.newName, pathPrefix)) {
                processedFolders.add(folder.nodeId);

                //Log the effect that this has had...
                if (folder.isNew) {
                    logCreated(folder.nodeId, folder.newParentId,
                            pathPrefix + folderEntity.getPath() + folderEntity.getContentName(), false);
                } else {
                    logMoved(folder.nodeId, pathPrefix + folder.currentParentPath + folder.currentName,
                            folder.newParentId,
                            pathPrefix + folderEntity.getPath() + folderEntity.getContentName());
                }
            }
        }
        for (String nodeId : processedFolders) {
            foldersToMove.remove(nodeId);
        }
        processedFolders.clear();
        if (folderCount == foldersToMove.size()) {
            //We have reached a point where we have failed to process any more folders
            log.error("We failed to move any folders successfully on that loop.");
            break;
        }
    }

    //If any folders need to be deleted then handle them now
    removeDeletedFolders(pathPrefix);

    //If we are dealing with a "sync" transfer then we now need to work out
    //if there are any implicit deletions required and process them if so.
    if (isSync) {
        if (isDebugEnabled) {
            log.debug("Sync-mode transfer: started checking received data for implicit deletes...");
        }
        //For each folder that we received in the transfer, check which children we have received in this transfer and
        //compare with the list of children that we currently have. If there are any existing children that
        //we didn't receive in this transfer then we assume that we must delete them...
        for (String parentId : receivedFolderIds) {
            Set<String> receivedChildren = parentChildMap.get(parentId);
            List<FileTransferInfoEntity> currentChildren = dbHelper
                    .findFileTransferInfoByParentNodeRef(parentId);
            for (FileTransferInfoEntity currentChild : currentChildren) {
                if (receivedChildren == null || !receivedChildren.remove(currentChild.getNodeRef())) {
                    if (isDebugEnabled) {
                        log.debug("Have not received data for existing node " + currentChild.getNodeRef() + " ("
                                + currentChild.getPath() + currentChild.getContentName() + ")");
                    }
                    deleteNode(currentChild, pathPrefix);
                }
            }
        }
        if (isDebugEnabled) {
            log.debug("Sync-mode transfer: finished checking received data for implicit deletes.");
        }
    }

    //Finally we need to run through all the new files with temporary names and rename them
    renameTempFiles(pathPrefix);

    log.info("Completed processing manifest file. It took " + (System.currentTimeMillis() - processStartTime)
            + "ms");
}

From source file:net.sf.infrared.web.action.PerfDataSummaryAction.java

public ActionForward execute(ActionMapping mapping, ActionForm form, HttpServletRequest request,
        HttpServletResponse response) throws Exception {

    ConfigureFormBean configBean = (ConfigureFormBean) form;

    HttpSession session = request.getSession();
    Set selectedApplications = new HashSet();
    boolean ascending = true;
    boolean ascendingAbs = true;

    PerfDataSummaryBean perfBean = new PerfDataSummaryBean();
    Map hierarchicalLayerTimes;//from   www .  j a va  2 s  .  com
    Map absoluteLayerTimes;
    LayerTime[] hierarchicalLayerTimeArr = null;
    LayerTime[] absoluteLayerTimeArr = null;

    if (configBean.isLiveData()) {
        session.setAttribute("isInActiveMode", "true");
        Set applicationNames = DataFetchUtil.getApplicationNames();
        PerformanceDataSnapshot currentPerfData = (PerformanceDataSnapshot) session.getAttribute("perfData");
        if (request.getParameterValues("applicationName") != null) {
            String[] parameterValues = request.getParameterValues("applicationName");
            for (int i = 0; i < parameterValues.length; i++) {
                selectedApplications.add(parameterValues[i]);
            }
        } else if (currentPerfData != null) {
            selectedApplications = currentPerfData.getApplicationNames();
        } else if (applicationNames.size() > 0) {
            selectedApplications.add(applicationNames.iterator().next());
        }

        Set instances = DataFetchUtil.getInstanceNames(selectedApplications);
        Set selectedInstances = new HashSet();
        if (request.getParameter("instanceName") != null) {
            String[] parameterValues = request.getParameterValues("instanceName");
            for (int i = 0; i < parameterValues.length; i++) {
                selectedInstances.add(parameterValues[i]);
            }
        } else if (currentPerfData != null) {
            selectedInstances = currentPerfData.getInstanceNames();
        } else if (instances.size() > 0) {
            selectedInstances.add(instances.iterator().next());
        }

        if ("true".equals(request.getParameter("reset"))) {
            selectedApplications.clear();
            selectedInstances.clear();
            applicationNames.clear();
            instances.clear();
            session.setAttribute("perfData", DataFetchUtil.reset());
        }

        // -- creating a application-name to instance-name map --//
        String map[][] = new String[applicationNames.size()][];
        getAppNameToInstNameMapping(applicationNames, map);
        request.setAttribute("map", map);

        PerformanceDataSnapshot perfData = DataFetchUtil.getPerfData(selectedApplications, selectedInstances);

        perfBean.setApplicationName(applicationNames);
        perfBean.setInstanceName(instances);
        perfBean.setSelectedApplications(selectedApplications);
        perfBean.setSelectedInstances(selectedInstances);

        session.setAttribute("perfData", perfData);

        hierarchicalLayerTimes = getHierarchicalLayerTimesMap(perfData);
        absoluteLayerTimes = getAbsoluteLayerTimesMap(perfData);
        String sortBy = getSortBy(request.getParameter("sortBy"));
        ascending = getSortDir(request.getParameter("sortDir"));

        String sortByAbs = getSortBy(request.getParameter("sortByAbs"));
        ascendingAbs = getSortDir(request.getParameter("sortDirAbs"));

        hierarchicalLayerTimeArr = new LayerTime[hierarchicalLayerTimes.size()];
        absoluteLayerTimeArr = new LayerTime[absoluteLayerTimes.size()];

        int count = 0;
        for (Iterator itr = hierarchicalLayerTimes.values().iterator(); itr.hasNext();) {
            hierarchicalLayerTimeArr[count] = (LayerTime) itr.next();
            count++;
        }
        count = 0;
        for (Iterator itr = absoluteLayerTimes.values().iterator(); itr.hasNext();) {
            absoluteLayerTimeArr[count] = (LayerTime) itr.next();
            count++;
        }

        ViewUtil.sort(hierarchicalLayerTimeArr, sortBy, ascending);
        ViewUtil.sort(absoluteLayerTimeArr, sortByAbs, ascendingAbs);
    } else {
        String start = configBean.getStartDate();
        String end = configBean.getEndDate() + " 23:59:59";
        session.setAttribute("isInActiveMode", "false");
        PerformanceDataSnapshot perfData = (PerformanceDataSnapshot) session.getAttribute("perfData");
        if (perfData == null) {
            perfData = new PerformanceDataSnapshot();
        }
        SimpleDateFormat formatter = new SimpleDateFormat("dd/MM/yyyy");
        SimpleDateFormat endFormatter = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss");
        Date startDate = formatter.parse(start);
        Date endDate = endFormatter.parse(end);
        Set applicationNames = perfData.getApplicationNames();
        Set instanceNames = perfData.getInstanceNames();

        perfData = DataFetchUtil.getDataFromDB(applicationNames, instanceNames, startDate, endDate);

        session.setAttribute("perfData", perfData);
        perfBean.setApplicationName(applicationNames);
        perfBean.setInstanceName(instanceNames);
        perfBean.setSelectedApplications(applicationNames);
        perfBean.setSelectedInstances(instanceNames);

        hierarchicalLayerTimes = getHierarchicalLayerTimesMap(perfData);
        absoluteLayerTimes = getAbsoluteLayerTimesMap(perfData);

        String sortBy = getSortBy(request.getParameter("sortBy"));
        ascending = getSortDir(request.getParameter("sortDir"));

        String sortByAbs = getSortBy(request.getParameter("sortByAbs"));
        ascendingAbs = getSortDir(request.getParameter("sortDirAbs"));
        hierarchicalLayerTimeArr = new LayerTime[hierarchicalLayerTimes.size()];
        absoluteLayerTimeArr = new LayerTime[absoluteLayerTimes.size()];
        int count = 0;
        for (Iterator itr = hierarchicalLayerTimes.values().iterator(); itr.hasNext();) {
            hierarchicalLayerTimeArr[count] = (LayerTime) itr.next();
            count++;
        }
        count = 0;
        for (Iterator itr = absoluteLayerTimes.values().iterator(); itr.hasNext();) {
            absoluteLayerTimeArr[count] = (LayerTime) itr.next();
            count++;
        }

        ViewUtil.sort(hierarchicalLayerTimeArr, sortBy, ascending);
        ViewUtil.sort(absoluteLayerTimeArr, sortByAbs, ascendingAbs);
    }

    ArrayList hierarchicalLayerTimeBean = new ArrayList();

    for (int i = 0; i < hierarchicalLayerTimeArr.length; i++) {
        LayerTimeBean layerBean = new LayerTimeBean();
        BeanUtils.copyProperties(layerBean, hierarchicalLayerTimeArr[i]);
        hierarchicalLayerTimeBean.add(layerBean);
    }

    ArrayList absoluteLayerTimeBean = new ArrayList();
    for (int i = 0; i < absoluteLayerTimeArr.length; i++) {
        LayerTimeBean layerBean = new LayerTimeBean();
        BeanUtils.copyProperties(layerBean, absoluteLayerTimeArr[i]);
        absoluteLayerTimeBean.add(layerBean);
    }

    perfBean.setLayerTimes(hierarchicalLayerTimeBean);
    perfBean.setAbsoluteLayerTimes(absoluteLayerTimeBean);
    request.setAttribute("perfBean", perfBean);
    return (mapping.findForward("continue"));
}

From source file:net.sf.jclal.sampling.supervised.Resample.java

/**
 * creates the subsample with replacement
 *
 * @param dataSet The dataset to extract a percent of instances
 * @param sampleSize the size to generate
 * @param actualClasses The actual classes
 * @param classIndices The indexes of the classes
 */// w  w  w  .  ja va 2 s. co  m
public void createSubsampleWithReplacement(WekaDataset dataSet, int sampleSize, int actualClasses,
        int[] classIndices) {

    int originalSize = dataSet.getNumInstances();

    Set<Integer> indexes = new HashSet<Integer>();

    Instances labeledInstances = new Instances(dataSet.getDataset(), sampleSize);

    for (int i = 0; i < sampleSize; i++) {

        int index = 0;

        if (getRandgen().uniform(0, 1) < biasToUniformClass) {

            // Pick a random class (of those classes that actually appear)
            int cIndex = getRandgen().choose(0, actualClasses);

            for (int j = 0, k = 0; j < classIndices.length - 1; j++) {
                if ((classIndices[j] != classIndices[j + 1]) && (k++ >= cIndex)) {
                    // Pick a random instance of the designated class
                    index = classIndices[j] + getRandgen().choose(0, classIndices[j + 1] - classIndices[j]);
                    break;
                }
            }
        } else {
            index = getRandgen().choose(0, originalSize);
        }

        labeledInstances.add((Instance) dataSet.instance(index).copy());
        indexes.add(index);
    }

    setLabeledData(new WekaDataset(labeledInstances));

    ArrayList<Container> indexesArray = new ArrayList<Container>();

    for (Integer i : indexes) {
        indexesArray.add(new Container(i, i));
    }

    //The array is ordered in descendent order
    OrderUtils.mergeSort(indexesArray, true);

    //Copy the entire dataset into unlabeled set
    Instances unlabeledInstances = new Instances(dataSet.getDataset());

    //remove the instances that have been selected previously
    for (Container pair : indexesArray) {
        unlabeledInstances.remove(Integer.parseInt(pair.getValue().toString()));
    }

    setUnlabeledData(new WekaDataset(unlabeledInstances));

    //clean up
    labeledInstances.clear();
    unlabeledInstances.clear();
    indexes.clear();
    indexesArray.clear();

    labeledInstances = null;
    unlabeledInstances = null;
    indexes = null;
    indexesArray = null;

}

From source file:com.appdynamics.analytics.processor.event.ElasticSearchEventService.java

private void extractFailedDocIds(BulkResponse bulkResponse, Set<String> movedButNotDeleted) {
    /* 1036 */ movedButNotDeleted.clear();
    /* 1037 */ for (BulkItemResponse itemResponse : bulkResponse.getItems()) {
        /* 1038 */ if (itemResponse.isFailed()) {
            /* 1039 */ movedButNotDeleted.add(itemResponse.getId());
            /*      */ }
        /*      */ }
    /*      */ }/*from w ww  .  j  a  va  2s . c om*/

From source file:gov.nih.nci.cabig.ctms.acegi.acls.dao.impl.BasicAclLookupStrategy.java

public Map readAclsById(ObjectIdentity[] objects, Sid[] sids) {

    Assert.isTrue(batchSize >= 1, "BatchSize must be >= 1");
    Assert.notEmpty(objects, "Objects to lookup required");

    // Map<ObjectIdentity,Acl>
    Map result = new HashMap(); // contains FULLY loaded Acl objects

    Set currentBatchToLoad = new HashSet(); // contains ObjectIdentitys

    for (int i = 0; i < objects.length; i++) {
        // Check we don't already have this ACL in the results
        if (result.containsKey(objects[i])) {
            continue; // already in results, so move to next element
        }//from w  w w.  ja v  a 2s. co  m

        // Check cache for the present ACL entry
        Acl acl = aclCache.getFromCache(objects[i]);

        // Ensure any cached element supports all the requested SIDs
        // (they should always, as our base impl doesn't filter on SID)
        if (acl != null) {
            if (acl.isSidLoaded(sids)) {
                result.put(acl.getObjectIdentity(), acl);

                continue; // now in results, so move to next element
            } else {
                throw new IllegalStateException(
                        "Error: SID-filtered element detected when implementation does not perform SID filtering - have you added something to the cache manually?");
            }
        }

        // To get this far, we have no choice but to retrieve it from DB
        // (although we don't do it until we get a batch of them to load)
        currentBatchToLoad.add(objects[i]);

        // Is it time to load from DB the currentBatchToLoad?
        if ((currentBatchToLoad.size() == this.batchSize) || ((i + 1) == objects.length)) {
            Map loadedBatch = lookupObjectIdentities(
                    (ObjectIdentity[]) currentBatchToLoad.toArray(new ObjectIdentity[] {}), sids);

            // Add loaded batch (all elements 100% initialized) to results
            result.putAll(loadedBatch);

            // Add the loaded batch to the cache
            Iterator loadedAclIterator = loadedBatch.values().iterator();

            while (loadedAclIterator.hasNext()) {
                aclCache.putInCache((AclImpl) loadedAclIterator.next());
            }

            currentBatchToLoad.clear();
        }
    }

    // Now we're done, check every requested object identity was found
    // (throw NotFoundException if needed)
    for (int i = 0; i < objects.length; i++) {
        if (!result.containsKey(objects[i])) {
            throw new NotFoundException(
                    "Unable to find ACL information for object identity '" + objects[i].toString() + "'");
        }
    }

    return result;

}

From source file:org.apache.ambari.server.controller.AmbariManagementControllerImplTest.java

private void testRunSmokeTestFlag(Map<String, String> mapRequestProps, AmbariManagementController amc,
        Set<ServiceRequest> serviceRequests) throws AmbariException {
    RequestStatusResponse response;//Starting HDFS service. No run_smoke_test flag is set, smoke

    //Stopping HDFS service
    serviceRequests.clear();
    serviceRequests.add(new ServiceRequest("c1", "HDFS", null, "INSTALLED"));
    response = amc.updateServices(serviceRequests, mapRequestProps, false, false);

    //Starting HDFS service. No run_smoke_test flag is set, smoke
    // test(HDFS_SERVICE_CHECK) won't run
    boolean runSmokeTest = false;
    serviceRequests.clear();//from w ww .j  a v  a2 s .c om
    serviceRequests.add(new ServiceRequest("c1", "HDFS", null, "STARTED"));
    response = amc.updateServices(serviceRequests, mapRequestProps, runSmokeTest, false);

    List<ShortTaskStatus> taskStatuses = response.getTasks();
    boolean smokeTestRequired = false;
    for (ShortTaskStatus shortTaskStatus : taskStatuses) {
        if (shortTaskStatus.getRole().equals(Role.HDFS_SERVICE_CHECK.toString())) {
            smokeTestRequired = true;
        }
    }
    assertFalse(smokeTestRequired);

    //Stopping HDFS service
    serviceRequests.clear();
    serviceRequests.add(new ServiceRequest("c1", "HDFS", null, "INSTALLED"));
    response = amc.updateServices(serviceRequests, mapRequestProps, false, false);

    //Starting HDFS service again.
    //run_smoke_test flag is set, smoke test will be run
    runSmokeTest = true;
    serviceRequests.clear();
    serviceRequests.add(new ServiceRequest("c1", "HDFS", null, "STARTED"));
    response = amc.updateServices(serviceRequests, mapRequestProps, runSmokeTest, false);

    taskStatuses = response.getTasks();
    smokeTestRequired = false;
    for (ShortTaskStatus shortTaskStatus : taskStatuses) {
        if (shortTaskStatus.getRole().equals(Role.HDFS_SERVICE_CHECK.toString())) {
            smokeTestRequired = true;
        }
    }
    assertTrue(smokeTestRequired);
}

From source file:org.apache.sentry.api.service.thrift.SentryPolicyStoreProcessor.java

@Override
public TListSentryRolesResponse list_sentry_roles_by_group(TListSentryRolesRequest request) throws TException {
    final Timer.Context timerContext = sentryMetrics.listRolesByGroupTimer.time();
    TListSentryRolesResponse response = new TListSentryRolesResponse();
    TSentryResponseStatus status;/*from   www  .ja v  a2 s .  com*/
    Set<TSentryRole> roleSet = new HashSet<TSentryRole>();
    String subject = request.getRequestorUserName();
    boolean checkAllGroups = false;
    try {
        validateClientVersion(request.getProtocol_version());
        Set<String> groups = getRequestorGroups(subject);
        // Don't check admin permissions for listing requestor's own roles
        if (AccessConstants.ALL.equalsIgnoreCase(request.getGroupName())) {
            checkAllGroups = true;
        } else {
            boolean admin = inAdminGroups(groups);
            //Only admin users can list all roles in the system ( groupname = null)
            //Non admin users are only allowed to list only groups which they belong to
            if (!admin && (request.getGroupName() == null || !groups.contains(request.getGroupName()))) {
                throw new SentryAccessDeniedException("Access denied to " + subject);
            } else {
                groups.clear();
                groups.add(request.getGroupName());
            }
        }
        roleSet = sentryStore.getTSentryRolesByGroupName(groups, checkAllGroups);
        response.setRoles(roleSet);
        response.setStatus(Status.OK());
    } catch (SentryNoSuchObjectException e) {
        response.setRoles(roleSet);
        String msg = "Request: " + request + " couldn't be completed, message: " + e.getMessage();
        LOGGER.error(msg, e);
        response.setStatus(Status.NoSuchObject(msg, e));
    } catch (SentryAccessDeniedException e) {
        LOGGER.error(e.getMessage(), e);
        response.setStatus(Status.AccessDenied(e.getMessage(), e));
    } catch (SentryGroupNotFoundException e) {
        LOGGER.error(e.getMessage(), e);
        response.setStatus(Status.AccessDenied(e.getMessage(), e));
    } catch (SentryThriftAPIMismatchException e) {
        LOGGER.error(e.getMessage(), e);
        response.setStatus(Status.THRIFT_VERSION_MISMATCH(e.getMessage(), e));
    } catch (Exception e) {
        String msg = "Unknown error for request: " + request + ", message: " + e.getMessage();
        LOGGER.error(msg, e);
        response.setStatus(Status.RuntimeError(msg, e));
    } finally {
        timerContext.stop();
    }
    return response;
}