List of usage examples for java.util Set containsAll
boolean containsAll(Collection<?> c);
From source file:org.jahia.ajax.gwt.helper.PublicationHelper.java
public List<GWTJahiaPublicationInfo> getFullPublicationInfos(List<String> uuids, Set<String> languages, JCRSessionWrapper currentUserSession, boolean allSubTree, boolean checkForUnpublication) throws GWTJahiaServiceException { try {//from w ww . j a v a 2s . c om if (!checkForUnpublication) { LinkedHashMap<String, GWTJahiaPublicationInfo> res = new LinkedHashMap<String, GWTJahiaPublicationInfo>(); for (String language : languages) { List<PublicationInfo> infos = publicationService.getPublicationInfos(uuids, Collections.singleton(language), true, true, allSubTree, currentUserSession.getWorkspace().getName(), Constants.LIVE_WORKSPACE); for (PublicationInfo info : infos) { info.clearInternalAndPublishedReferences(uuids); } final List<GWTJahiaPublicationInfo> infoList = convert(infos, currentUserSession, language, "publish"); String lastGroup = null; String lastTitle = null; Locale l = new Locale(language); for (GWTJahiaPublicationInfo info : infoList) { if (((info.isPublishable() || info.getStatus() == GWTJahiaPublicationInfo.MANDATORY_LANGUAGE_UNPUBLISHABLE) && (info.getWorkflowDefinition() != null || info.isAllowedToPublishWithoutWorkflow()))) { res.put(language + "/" + info.getUuid(), info); if (lastGroup == null || !info.getWorkflowGroup().equals(lastGroup)) { lastGroup = info.getWorkflowGroup(); lastTitle = info.getTitle() + " ( " + l.getDisplayName(l) + " )"; } info.setWorkflowTitle(lastTitle); } } } return new ArrayList<GWTJahiaPublicationInfo>(res.values()); } else { List<PublicationInfo> infos = publicationService.getPublicationInfos(uuids, null, false, true, allSubTree, currentUserSession.getWorkspace().getName(), Constants.LIVE_WORKSPACE); LinkedHashMap<String, GWTJahiaPublicationInfo> res = new LinkedHashMap<String, GWTJahiaPublicationInfo>(); for (String language : languages) { final List<GWTJahiaPublicationInfo> infoList = convert(infos, currentUserSession, language, "unpublish"); String lastGroup = null; String lastTitle = null; Locale l = new Locale(language); for (GWTJahiaPublicationInfo info : infoList) { if ((info.getStatus() == GWTJahiaPublicationInfo.PUBLISHED && (info.getWorkflowDefinition() != null || info.isAllowedToPublishWithoutWorkflow()))) { res.put(language + "/" + info.getUuid(), info); if (lastGroup == null || !info.getWorkflowGroup().equals(lastGroup)) { lastGroup = info.getWorkflowGroup(); lastTitle = info.getTitle() + " ( " + l.getDisplayName(l) + " )"; } info.setWorkflowTitle(lastTitle); } } } for (PublicationInfo info : infos) { Set<String> publishedLanguages = info.getAllPublishedLanguages(); if (!languages.containsAll(publishedLanguages)) { keepOnlyTranslation(res); } } return new ArrayList<GWTJahiaPublicationInfo>(res.values()); } } catch (RepositoryException e) { logger.error("repository exception", e); throw new GWTJahiaServiceException( "Cannot get publication status for nodes " + uuids + ". Cause: " + e.getLocalizedMessage(), e); } }
From source file:org.mitre.openid.connect.service.impl.TestMITREidDataService_1_2.java
@Test public void testExportGrants() throws IOException, ParseException { Date creationDate1 = formatter.parse("2014-09-10T22:49:44.090+0000", Locale.ENGLISH); Date accessDate1 = formatter.parse("2014-09-10T23:49:44.090+0000", Locale.ENGLISH); OAuth2AccessTokenEntity mockToken1 = mock(OAuth2AccessTokenEntity.class); when(mockToken1.getId()).thenReturn(1L); ApprovedSite site1 = new ApprovedSite(); site1.setId(1L);/*from w w w. j a va 2s . c o m*/ site1.setClientId("foo"); site1.setCreationDate(creationDate1); site1.setAccessDate(accessDate1); site1.setUserId("user1"); site1.setAllowedScopes(ImmutableSet.of("openid", "phone")); site1.setApprovedAccessTokens(ImmutableSet.of(mockToken1)); Date creationDate2 = formatter.parse("2014-09-11T18:49:44.090+0000", Locale.ENGLISH); Date accessDate2 = formatter.parse("2014-09-11T20:49:44.090+0000", Locale.ENGLISH); Date timeoutDate2 = formatter.parse("2014-10-01T20:49:44.090+0000", Locale.ENGLISH); ApprovedSite site2 = new ApprovedSite(); site2.setId(2L); site2.setClientId("bar"); site2.setCreationDate(creationDate2); site2.setAccessDate(accessDate2); site2.setUserId("user2"); site2.setAllowedScopes(ImmutableSet.of("openid", "offline_access", "email", "profile")); site2.setTimeoutDate(timeoutDate2); Set<ApprovedSite> allApprovedSites = ImmutableSet.of(site1, site2); Mockito.when(clientRepository.getAllClients()).thenReturn(new HashSet<ClientDetailsEntity>()); Mockito.when(approvedSiteRepository.getAll()).thenReturn(allApprovedSites); Mockito.when(blSiteRepository.getAll()).thenReturn(new HashSet<BlacklistedSite>()); Mockito.when(wlSiteRepository.getAll()).thenReturn(new HashSet<WhitelistedSite>()); Mockito.when(authHolderRepository.getAll()).thenReturn(new ArrayList<AuthenticationHolderEntity>()); Mockito.when(tokenRepository.getAllAccessTokens()).thenReturn(new HashSet<OAuth2AccessTokenEntity>()); Mockito.when(tokenRepository.getAllRefreshTokens()).thenReturn(new HashSet<OAuth2RefreshTokenEntity>()); Mockito.when(sysScopeRepository.getAll()).thenReturn(new HashSet<SystemScope>()); // do the data export StringWriter stringWriter = new StringWriter(); JsonWriter writer = new JsonWriter(stringWriter); writer.beginObject(); dataService.exportData(writer); writer.endObject(); writer.close(); // parse the output as a JSON object for testing JsonElement elem = new JsonParser().parse(stringWriter.toString()); JsonObject root = elem.getAsJsonObject(); // make sure the root is there assertThat(root.has(MITREidDataService.MITREID_CONNECT_1_2), is(true)); JsonObject config = root.get(MITREidDataService.MITREID_CONNECT_1_2).getAsJsonObject(); // make sure all the root elements are there assertThat(config.has(MITREidDataService.CLIENTS), is(true)); assertThat(config.has(MITREidDataService.GRANTS), is(true)); assertThat(config.has(MITREidDataService.WHITELISTEDSITES), is(true)); assertThat(config.has(MITREidDataService.BLACKLISTEDSITES), is(true)); assertThat(config.has(MITREidDataService.REFRESHTOKENS), is(true)); assertThat(config.has(MITREidDataService.ACCESSTOKENS), is(true)); assertThat(config.has(MITREidDataService.SYSTEMSCOPES), is(true)); assertThat(config.has(MITREidDataService.AUTHENTICATIONHOLDERS), is(true)); // make sure the root elements are all arrays assertThat(config.get(MITREidDataService.CLIENTS).isJsonArray(), is(true)); assertThat(config.get(MITREidDataService.GRANTS).isJsonArray(), is(true)); assertThat(config.get(MITREidDataService.WHITELISTEDSITES).isJsonArray(), is(true)); assertThat(config.get(MITREidDataService.BLACKLISTEDSITES).isJsonArray(), is(true)); assertThat(config.get(MITREidDataService.REFRESHTOKENS).isJsonArray(), is(true)); assertThat(config.get(MITREidDataService.ACCESSTOKENS).isJsonArray(), is(true)); assertThat(config.get(MITREidDataService.SYSTEMSCOPES).isJsonArray(), is(true)); assertThat(config.get(MITREidDataService.AUTHENTICATIONHOLDERS).isJsonArray(), is(true)); // check our scope list (this test) JsonArray sites = config.get(MITREidDataService.GRANTS).getAsJsonArray(); assertThat(sites.size(), is(2)); // check for both of our sites in turn Set<ApprovedSite> checked = new HashSet<>(); for (JsonElement e : sites) { assertThat(e.isJsonObject(), is(true)); JsonObject site = e.getAsJsonObject(); ApprovedSite compare = null; if (site.get("id").getAsLong() == site1.getId().longValue()) { compare = site1; } else if (site.get("id").getAsLong() == site2.getId().longValue()) { compare = site2; } if (compare == null) { fail("Could not find matching whitelisted site id: " + site.get("id").getAsString()); } else { assertThat(site.get("clientId").getAsString(), equalTo(compare.getClientId())); assertThat(site.get("creationDate").getAsString(), equalTo(formatter.print(compare.getCreationDate(), Locale.ENGLISH))); assertThat(site.get("accessDate").getAsString(), equalTo(formatter.print(compare.getAccessDate(), Locale.ENGLISH))); if (site.get("timeoutDate").isJsonNull()) { assertNull(compare.getTimeoutDate()); } else { assertThat(site.get("timeoutDate").getAsString(), equalTo(formatter.print(compare.getTimeoutDate(), Locale.ENGLISH))); } assertThat(site.get("userId").getAsString(), equalTo(compare.getUserId())); assertThat(jsonArrayToStringSet(site.getAsJsonArray("allowedScopes")), equalTo(compare.getAllowedScopes())); if (site.get("approvedAccessTokens").isJsonNull() || site.getAsJsonArray("approvedAccessTokens") == null) { assertTrue(compare.getApprovedAccessTokens() == null || compare.getApprovedAccessTokens().isEmpty()); } else { assertNotNull(compare.getApprovedAccessTokens()); Set<String> tokenIds = new HashSet<>(); for (OAuth2AccessTokenEntity entity : compare.getApprovedAccessTokens()) { tokenIds.add(entity.getId().toString()); } assertThat(jsonArrayToStringSet(site.getAsJsonArray("approvedAccessTokens")), equalTo(tokenIds)); } checked.add(compare); } } // make sure all of our clients were found assertThat(checked.containsAll(allApprovedSites), is(true)); }
From source file:org.photovault.imginfo.PhotoInfo.java
/** Find instance that is preferred for use in particular situation. This function seeks for an image that has at least a given resolution, has certain operations already applied and is available. @param requiredOpers Set of operations that must be applied correctly in the returned image (but not that the operations need not be applied if this photo does not specify some operation. So even if this is non-empty it is possible that the method returns original image! @param allowedOpers Set of operations that may be applied to the returned image// w w w . j a v a 2s.co m @param minWidth Minimum width of the returned image in pixels @param minHeight Minimum height of the returned image in pixels @param maxWidth Maximum width of the returned image in pixels @param maxHeight Maximum height of the returned image in pixels @return Image that best matches the given criteria or <code>null</code> if no suct image exists or is not available. */ public ImageDescriptorBase getPreferredImage(Set<ImageOperations> requiredOpers, Set<ImageOperations> allowedOpers, int minWidth, int minHeight, int maxWidth, int maxHeight) { ImageDescriptorBase preferred = null; EnumSet<ImageOperations> appliedPreferred = null; // We are not interested in operations that are not specified for this photo EnumSet<ImageOperations> specifiedOpers = getAppliedOperations(); requiredOpers = EnumSet.copyOf(requiredOpers); requiredOpers.removeAll(EnumSet.complementOf(specifiedOpers)); /* Would the original be OK? */ if (requiredOpers.size() == 0 && original.getWidth() <= maxWidth && original.getHeight() <= maxHeight && original.getFile().findAvailableCopy() != null) { preferred = original; appliedPreferred = EnumSet.noneOf(ImageOperations.class); } // Calculate minimum & maimum scaling of resolution compared to original double minScale = ((double) minWidth) / ((double) original.getWidth()); double maxScale = ((double) maxHeight) / ((double) original.getHeight()); if (allowedOpers.contains(ImageOperations.CROP)) { Dimension croppedSize = getCroppedSize(); double aspectRatio = croppedSize.getWidth() / croppedSize.getHeight(); double miw = minWidth; double mih = minHeight; double maw = maxWidth; double mah = maxHeight; if (mih == 0.0 || (miw / mih) > aspectRatio) { mih = miw / aspectRatio; } if (mih > 0.0 && (miw / mih) < aspectRatio) { miw = mih * aspectRatio; } if (maw / mah > aspectRatio) { maw = mah * aspectRatio; } if (maw / mah < aspectRatio) { mah = maw / aspectRatio; } miw = Math.floor(miw); mih = Math.floor(mih); maw = Math.ceil(maw); mah = Math.ceil(mah); minScale = ((double) miw) / ((double) croppedSize.getWidth()); maxScale = ((double) maw) / ((double) croppedSize.getWidth()); } // Check the copies Set<CopyImageDescriptor> copies = original.getCopies(); for (CopyImageDescriptor copy : copies) { double scale = ((double) copy.getWidth()) / ((double) original.getWidth()); if (copy.getAppliedOperations().contains(ImageOperations.CROP)) { scale = ((double) copy.getWidth()) / ((double) getCroppedSize().getWidth()); } if (scale >= minScale && scale <= maxScale && copy.getFile().findAvailableCopy() != null) { EnumSet<ImageOperations> applied = copy.getAppliedOperations(); if (applied.containsAll(requiredOpers) && allowedOpers.containsAll(applied) && isConsistentWithCurrentSettings(copy)) { // This is a potential one if (preferred == null || !appliedPreferred.containsAll(applied)) { preferred = copy; appliedPreferred = applied; } } } } return preferred; }
From source file:com.datatorrent.stram.StreamingContainerManager.java
private void calculateEndWindowStats() { if (!endWindowStatsOperatorMap.isEmpty()) { Set<Integer> allCurrentOperators = plan.getAllOperators().keySet(); if (endWindowStatsOperatorMap.size() > this.vars.maxWindowsBehindForStats) { LOG.warn("Some operators are behind for more than {} windows! Trimming the end window stats map", this.vars.maxWindowsBehindForStats); while (endWindowStatsOperatorMap.size() > this.vars.maxWindowsBehindForStats) { LOG.debug(/* w w w . j av a 2 s . com*/ "Removing incomplete end window stats for window id {}. Collected operator set: {}. Complete set: {}", endWindowStatsOperatorMap.firstKey(), endWindowStatsOperatorMap.get(endWindowStatsOperatorMap.firstKey()).keySet(), allCurrentOperators); endWindowStatsOperatorMap.remove(endWindowStatsOperatorMap.firstKey()); } } //logicalMetrics.clear(); int numOperators = allCurrentOperators.size(); Long windowId = endWindowStatsOperatorMap.firstKey(); while (windowId != null) { Map<Integer, EndWindowStats> endWindowStatsMap = endWindowStatsOperatorMap.get(windowId); Set<Integer> endWindowStatsOperators = endWindowStatsMap.keySet(); aggregateMetrics(windowId, endWindowStatsMap); criticalPathInfo = findCriticalPath(); if (allCurrentOperators.containsAll(endWindowStatsOperators)) { if (endWindowStatsMap.size() < numOperators) { if (windowId < completeEndWindowStatsWindowId) { LOG.debug("Disregarding stale end window stats for window {}", windowId); endWindowStatsOperatorMap.remove(windowId); } else { break; } } else { endWindowStatsOperatorMap.remove(windowId); currentEndWindowStatsWindowId = windowId; } } else { // the old stats contains operators that do not exist any more // this is probably right after a partition happens. LOG.debug( "Stats for non-existent operators detected. Disregarding end window stats for window {}", windowId); endWindowStatsOperatorMap.remove(windowId); } windowId = endWindowStatsOperatorMap.higherKey(windowId); } } }
From source file:org.apache.hadoop.hive.ql.parse.sql.HiveASTGenerator.java
/** * Check if this is a Correlated Filter. * // w w w. j a v a2 s .c o m * @param op * top node of this filter subtree * @param qInfo * belonging qInfo * @param selectKeyThisQ * select key of this subquery * @return */ private boolean isCorrelatedFilter(SqlASTNode op, QueryInfo qInfo, SqlASTNode selectKeyThisQ) { LOG.debug("Enclosing Select Key: " + selectKeyThisQ.toStringTree()); Set<String> thisQTbls = qInfo.getSrcTblAliasForSelectKey(selectKeyThisQ); Set<String> referedTbls = new HashSet<String>(); SqlXlateUtil.getReferredTblAlias(op, referedTbls); LOG.debug("Referred Tables : " + referedTbls); LOG.debug("Src Tables: " + thisQTbls); if (thisQTbls.containsAll(referedTbls)) { return false; } else { return true; } }
From source file:org.ballerinalang.bre.bvm.BVM.java
private static boolean checkFieldEquivalency(BRecordType lhsType, BRecordType rhsType, List<TypePair> unresolvedTypes) { Map<String, BField> rhsFields = rhsType.getFields(); Set<String> lhsFieldNames = lhsType.getFields().keySet(); for (BField lhsField : lhsType.getFields().values()) { BField rhsField = rhsFields.get(lhsField.fieldName); // If the LHS field is a required one, there has to be a corresponding required field in the RHS record. if (!Flags.isFlagOn(lhsField.flags, Flags.OPTIONAL) && (rhsField == null || Flags.isFlagOn(rhsField.flags, Flags.OPTIONAL))) { return false; }//from w w w.ja va 2 s . c om if (rhsField == null || !isAssignable(rhsField.fieldType, lhsField.fieldType, unresolvedTypes)) { return false; } } if (lhsType.sealed) { return lhsFieldNames.containsAll(rhsFields.keySet()); } return rhsFields.values().stream().filter(field -> !lhsFieldNames.contains(field.fieldName)) .allMatch(field -> isAssignable(field.fieldType, lhsType.restFieldType, unresolvedTypes)); }
From source file:de.uni_koblenz.jgralab.utilities.rsa.Rsa2Tg.java
private List<GraphElementClass> getAllNestingElements(EdgeClass containedEC) { List<GraphElementClass> nestedGECs = new LinkedList<GraphElementClass>(); Set<VertexClass> incidentVertexClasses = new HashSet<VertexClass>(); LocalGenericGraphMarker<Set<GraphElementClass>> nestedIncidentVertexClasses = new LocalGenericGraphMarker<Set<GraphElementClass>>( sg);//from w w w . j a v a 2s . c om LocalGenericGraphMarker<Set<Edge>> edgesInPath = new LocalGenericGraphMarker<Set<Edge>>(sg); // depth first search started at each incident VertexClass for (IncidenceClass ic : getAllIncidenceClasses(containedEC)) { VertexClass vc = getConnectedVertexClass(ic); if (!incidentVertexClasses.contains(vc)) { markAllNestingGraphElementClasses(nestedIncidentVertexClasses, vc, edgesInPath); incidentVertexClasses.add(vc); } } // all edges are candidates for (GraphElement<?, ?, ?, ?> ge : nestedIncidentVertexClasses.getMarkedElements()) { if (EdgeClass.class.isInstance(ge)) { // ge must nest one incident VertexClasses of containedEC if (ge != containedEC) { nestedGECs.add((EdgeClass) ge); } } else { if (nestedIncidentVertexClasses.getMark(ge).size() == incidentVertexClasses.size()) { // ge must nest all incident VertetexClasses of containedEC nestedGECs.add((VertexClass) ge); } } } Set<GraphElementClass> toDelete = new HashSet<GraphElementClass>(); for (int i = 0; i < nestedGECs.size(); i++) { GraphElementClass currentI = nestedGECs.get(i); Set<Edge> edgesOfI = edgesInPath.getMark(currentI); for (int j = 0; j < nestedGECs.size(); j++) { if (j == i) { continue; } GraphElementClass currentJ = nestedGECs.get(j); Set<Edge> edgesOfJ = edgesInPath.getMark(currentJ); if (edgesOfJ.containsAll(edgesOfI) && !existsAPath(incidentVertexClasses, currentJ, edgesOfI)) { /* * edgesOfJ is the set of all edges, which are on a path to * GraphElementClass j. edgesOfI is the set of all edges, * which are on a path to GraphElementClass j. If edgesOfI * is a subset of edgesOfJ and j could not be reached via a * path, which does not contain edges of the set edgesOfI, * then j can be deleted. */ toDelete.add(currentJ); } } } nestedGECs.removeAll(toDelete); return nestedGECs; }
From source file:com.ugam.collage.plus.service.people_count.impl.PeopleAccountingServiceImpl.java
/** * @param empcntClientProjectDataList// ww w . j a v a2 s. co m * @param empClientProjectTeamStructList * @param employee * @param month * @param year * @param costCentre * @param countType * @param allignedTimeZero * @param assistedTimeZero * @param apportionedTimeZero * @param allignedTimeOne * @param totalTimeOne */ private void getSingleProjectDetail(List<EmpcntClientProjectData> empOpenCntClientProjectDataList, List<EmpcntClientProjectData> empCloseCntClientProjectDataList, List<EmpClientProjectTeamStruct> empClientProjectTeamStructList, EmployeeMaster employee, TabMonth month, TabYear year, CostCentre costCentre, CountClassification countType, BigDecimal allignedTimeZero, BigDecimal assistedTimeZero, BigDecimal apportionedTimeZero, BigDecimal allignedTimeOne, BigDecimal totalTimeOne, Integer countTypeId, Map<String, EmployeePcTagsTeamStruct> employeePcTagsTeamStructMap) { /* * Also assign to assisted if project detail present in both assigned * and unassigned list * * Note : Only in unassigned project . do the remaining count as per * revenue to apportion * * If not present in revenue table then go to zero project details */ logger.debug("<====getSingleProjectDetail START====>"); Integer employeeId = employee.getEmployeeId(); Integer yearId = year.getYearId(); Integer monthId = month.getMonthId(); String costCentreId = costCentre.getCostCentreId(); BigDecimal deviderHour = new BigDecimal(Constants.TOTAL_WORKING_HOURS); logger.debug("getSingleProjectDetail parameter===>" + employeeId + "::" + yearId + "::" + monthId + "::" + costCentreId + "::" + deviderHour); // Get project details Map<Integer, EmpClientProjectTeamStruct> employeeProjectIds = new HashMap<Integer, EmpClientProjectTeamStruct>(); Map<Integer, EmpClientProjectTeamStruct> validEmployeeProjectIds = new HashMap<Integer, EmpClientProjectTeamStruct>(); for (EmpClientProjectTeamStruct empClientProjectTeamStructThree : empClientProjectTeamStructList) { employeeProjectIds.put(empClientProjectTeamStructThree.getProjectMaster().getProjectId(), empClientProjectTeamStructThree); } validEmployeeProjectIds.putAll(employeeProjectIds); // logger.debug("validEmployeeProjectIds 1:size===>" + // validEmployeeProjectIds.size()); // check in revenue table for (Integer key : employeeProjectIds.keySet()) { EmpClientProjectTeamStruct mapValues = employeeProjectIds.get(key); List<CollageProjectRevenue> listValues = collageProjectRevenueDao .findByYearIdMonthIdProjectIdCostCentreId(mapValues.getTabYear().getYearId(), mapValues.getTabMonth().getMonthId(), mapValues.getProjectMaster().getProjectId(), costCentre.getCostCentreId()); if (listValues.isEmpty()) { validEmployeeProjectIds.remove(key); } } // logger.debug("validEmployeeProjectIds 2:size===>" + // validEmployeeProjectIds.size()); if (validEmployeeProjectIds.isEmpty()) { getZeroProjectsDetail(yearId, monthId, costCentreId, empOpenCntClientProjectDataList, empCloseCntClientProjectDataList, employee, month, year, costCentre, countType, allignedTimeZero, assistedTimeZero, totalTimeOne, totalTimeOne, countTypeId, employeePcTagsTeamStructMap); } // Get list of project from execution data for that employee List<Integer> projectIdList = executionDataDao.findByPersonYearMonthCostCentre(employeeId, yearId, monthId, costCentreId); // logger.debug("execution data projects===>" + projectIdList.size()); if (projectIdList.isEmpty()) { // logger.debug("Contain InValid projects :(Assign count one)===>"); for (Integer projectId : validEmployeeProjectIds.keySet()) { EmpClientProjectTeamStruct mapValues = validEmployeeProjectIds.get(projectId); // logger.debug("978: Contain InValid projects :(Assign count one)===>"+1); EmpcntClientProjectData empcntClientProjectData = new EmpcntClientProjectData(employee, mapValues.getCompanyMaster(), countType, month, mapValues.getProjectMaster(), year, costCentre, allignedTimeOne, assistedTimeZero, apportionedTimeZero, allignedTimeOne); if (countTypeId == 1) { empOpenCntClientProjectDataList.add(empcntClientProjectData); } else if (countTypeId == 2) { empCloseCntClientProjectDataList.add(empcntClientProjectData); } } } else { // logger.debug("Else Contain Valid projects===>"); Integer validEmployeeProjectCount = validEmployeeProjectIds.size(); // Get valid projects list=>project is both revenue data and // execution data Set<Integer> validAllProjects = new HashSet<Integer>(); for (Integer projectId : projectIdList) { List<CollageProjectRevenue> listValues = collageProjectRevenueDao .findByYearIdMonthIdProjectIdCostCentreId(yearId, monthId, projectId, costCentreId); if (!listValues.isEmpty()) { validAllProjects.add(projectId); } } Integer validAllProjectCount = validAllProjects.size(); // logger.debug("validAllProjects :size===>" + // validAllProjects.size()); // Total hour worked by an Employee List<BigDecimal> toatalHours = executionDataDao.findByPersonIdYearIdMonthIdCostCentreId(employeeId, yearId, monthId, costCentreId); BigDecimal toatlTime = toatalHours.get(0); // logger.debug("ToatalHours===>" + toatlTime); // Separate assigned projects from execution data projects Map<Integer, BigDecimal> assignedProjects = new HashMap<Integer, BigDecimal>(); Map<Integer, BigDecimal> unAssignedProjects = new HashMap<Integer, BigDecimal>(); List<Object[]> allProjectTimeList = executionDataDao .findByEmployeeIdYearIdMonthIdCostCentreId(employeeId, yearId, monthId, costCentreId); for (Object[] result : allProjectTimeList) { Integer projectId = (Integer) result[0]; BigDecimal hour = (BigDecimal) result[1]; Integer companyId = (Integer) result[2]; if (validEmployeeProjectIds.containsKey(projectId) && validAllProjects.contains(projectId)) { // logger.debug("UnAssignedProjects===>" + // projectId+"::"+hour+"::"+companyId); assignedProjects.put(projectId, hour); } if (!validEmployeeProjectIds.containsKey(projectId) && validAllProjects.contains(projectId)) { // logger.debug("assignedProjects===>" + // projectId+"::"+hour+"::"+companyId); unAssignedProjects.put(projectId, hour); } } if (validEmployeeProjectCount == 1 && validAllProjectCount == 1 && validAllProjects.containsAll(validEmployeeProjectIds.keySet()) && unAssignedProjects.isEmpty()) { // logger.debug("validEmployeeProjectCount==validAllProjectCount :(Only in assigned projects)"); for (Integer key : assignedProjects.keySet()) { // Get time spent on each project by employee id Integer projectId = key; EmpClientProjectTeamStruct mapValues = validEmployeeProjectIds.get(projectId); // logger.debug("1034 :validEmployeeProjectCount==validAllProjectCount :(Only in assigned projects)===>1"); EmpcntClientProjectData empcntClientProjectData = new EmpcntClientProjectData(employee, mapValues.getCompanyMaster(), countType, month, mapValues.getProjectMaster(), year, costCentre, allignedTimeOne, assistedTimeZero, apportionedTimeZero, allignedTimeOne); if (countTypeId == 1) { empOpenCntClientProjectDataList.add(empcntClientProjectData); } if (countTypeId == 2) { empCloseCntClientProjectDataList.add(empcntClientProjectData); } } } else if (!assignedProjects.isEmpty() && !unAssignedProjects.isEmpty()) { // logger.debug("1047 : Both in assigned and unassigned projects===>"); if (toatlTime.compareTo(new BigDecimal(Constants.TOTAL_WORKING_HOURS)) >= 0) { // logger.debug("Worked hours===> >=168"); for (Integer key : assignedProjects.keySet()) { // Get time spent on each project by employee id Integer projectId = key; BigDecimal timeByProject = assignedProjects.get(key); EmpClientProjectTeamStruct mapValues = validEmployeeProjectIds.get(projectId); BigDecimal workedHours = timeByProject.divide(toatlTime, 2, RoundingMode.HALF_EVEN); workedHours = workedHours.setScale(2, RoundingMode.CEILING); // logger.debug("1056 :assigned:(Both in assigned and unassigned projects===>"+workedHours); EmpcntClientProjectData empcntClientProjectData = new EmpcntClientProjectData(employee, mapValues.getCompanyMaster(), countType, month, mapValues.getProjectMaster(), year, costCentre, workedHours, assistedTimeZero, apportionedTimeZero, workedHours); if (countTypeId == 1) { empOpenCntClientProjectDataList.add(empcntClientProjectData); } if (countTypeId == 2) { empCloseCntClientProjectDataList.add(empcntClientProjectData); } } for (Integer key : unAssignedProjects.keySet()) { // Get time spent on each project by employee id Integer projectId = key; BigDecimal timeByProject = unAssignedProjects.get(key); EmpClientProjectTeamStruct mapValues = validEmployeeProjectIds.get(projectId); BigDecimal workedHours = timeByProject.divide(toatlTime, 2, RoundingMode.HALF_EVEN); workedHours = workedHours.setScale(2, RoundingMode.CEILING); // logger.debug("1073 :unassigned :(Both in assigned and unassigned projects===>"+workedHours); EmpcntClientProjectData empcntClientProjectData = new EmpcntClientProjectData(employee, mapValues.getCompanyMaster(), countType, month, mapValues.getProjectMaster(), year, costCentre, apportionedTimeZero, workedHours, apportionedTimeZero, workedHours); if (countTypeId == 1) { empOpenCntClientProjectDataList.add(empcntClientProjectData); } if (countTypeId == 2) { empCloseCntClientProjectDataList.add(empcntClientProjectData); } } } else { // logger.debug("Worked hours===> <168"); BigDecimal totalUnAssingnedHours = BigDecimal.ZERO; BigDecimal assingnedHours = BigDecimal.ZERO; for (Integer key : unAssignedProjects.keySet()) { // Get time spent on each project by employee id BigDecimal timeByProject = unAssignedProjects.get(key); BigDecimal workedHours = timeByProject.divide(deviderHour, 2, RoundingMode.HALF_EVEN); totalUnAssingnedHours = totalUnAssingnedHours.add(workedHours); workedHours = workedHours.setScale(2, RoundingMode.CEILING); // Assign to assisted count for unAssignedProjects Integer projectId = key; List<ProjectMaster> projectList = projectMasterDao.findByProjectId(projectId); ProjectMaster projectMaster = projectList.get(0); CompanyMaster companyMaster = projectMaster.getCompanyMaster(); // logger.debug("769: Assisted hours (Both in assigned and unassigned projects) 2===>"+workedHours); EmpcntClientProjectData empcntClientProjectData = new EmpcntClientProjectData(employee, companyMaster, countType, month, projectMaster, year, costCentre, apportionedTimeZero, workedHours, apportionedTimeZero, workedHours); if (countTypeId == 1) { empOpenCntClientProjectDataList.add(empcntClientProjectData); } if (countTypeId == 2) { empCloseCntClientProjectDataList.add(empcntClientProjectData); } } totalUnAssingnedHours = BigDecimal.ONE.subtract(totalUnAssingnedHours); // logger.debug("totalUnAssingnedHours===> "+totalUnAssingnedHours); for (Map.Entry<Integer, BigDecimal> entry : assignedProjects.entrySet()) { assingnedHours = assingnedHours.add(entry.getValue()); } // logger.debug("assingnedHours===> "+assingnedHours); for (Integer key : assignedProjects.keySet()) { Integer projectId = key; BigDecimal timeByProject = assignedProjects.get(key); // logger.debug("1119 :projectId : timeByProject===> "+projectId+" : "+timeByProject); EmpClientProjectTeamStruct mapValues = validEmployeeProjectIds.get(projectId); BigDecimal averageWorkedHours = timeByProject.divide(assingnedHours, 2, RoundingMode.HALF_EVEN); // logger.debug("1121 :assingnedHours : totalUnAssingnedHours===> "+assingnedHours+" : "+totalUnAssingnedHours); BigDecimal actualWorkedHours = averageWorkedHours.multiply(totalUnAssingnedHours); actualWorkedHours = actualWorkedHours.setScale(2, RoundingMode.CEILING); // logger.debug("1124 :averageWorkedHours : actualWorkedHours===> "+averageWorkedHours+" : "+actualWorkedHours); EmpcntClientProjectData empcntClientProjectData = new EmpcntClientProjectData(employee, mapValues.getCompanyMaster(), countType, month, mapValues.getProjectMaster(), year, costCentre, actualWorkedHours, assistedTimeZero, apportionedTimeZero, actualWorkedHours); if (countTypeId == 1) { empOpenCntClientProjectDataList.add(empcntClientProjectData); } if (countTypeId == 2) { empCloseCntClientProjectDataList.add(empcntClientProjectData); } } } } else if (assignedProjects.isEmpty() && !unAssignedProjects.isEmpty()) { // logger.debug("In unassigned projects only===>"); if (toatlTime.compareTo(new BigDecimal(Constants.TOTAL_WORKING_HOURS)) >= 0) { // logger.debug("Worked hours===> >=168"); for (Integer key : unAssignedProjects.keySet()) { // Get time spent on each project by employee id Integer projectId = key; BigDecimal timeByProject = unAssignedProjects.get(key); EmpClientProjectTeamStruct mapValues = validEmployeeProjectIds.get(projectId); BigDecimal workedHours = timeByProject.divide(toatlTime, 2, RoundingMode.HALF_EVEN); workedHours = workedHours.setScale(2, RoundingMode.CEILING); // logger.debug("1148 :In unassigned projects only===>"+workedHours); EmpcntClientProjectData empcntClientProjectData = new EmpcntClientProjectData(employee, mapValues.getCompanyMaster(), countType, month, mapValues.getProjectMaster(), year, costCentre, apportionedTimeZero, workedHours, apportionedTimeZero, workedHours); if (countTypeId == 1) { empOpenCntClientProjectDataList.add(empcntClientProjectData); } if (countTypeId == 2) { empCloseCntClientProjectDataList.add(empcntClientProjectData); } } } else { // logger.debug("Worked hours===> <168"); BigDecimal totalUnAssingnedHours = BigDecimal.ZERO; BigDecimal assingnedHours = BigDecimal.ZERO; for (Integer key : unAssignedProjects.keySet()) { // Get time spent on each project by employee id BigDecimal timeByProject = unAssignedProjects.get(key); BigDecimal workedHours = timeByProject.divide(deviderHour, 2, RoundingMode.HALF_EVEN); workedHours = workedHours.setScale(2, RoundingMode.CEILING); totalUnAssingnedHours = totalUnAssingnedHours.add(workedHours); // Assign to assisted count for unAssignedProjects Integer projectId = key; List<ProjectMaster> projectList = projectMasterDao.findByProjectId(projectId); ProjectMaster projectMaster = projectList.get(0); CompanyMaster companyMaster = projectMaster.getCompanyMaster(); // logger.debug("1173: Assisted hours (In unassigned projects) 2===>"+workedHours); EmpcntClientProjectData empcntClientProjectData = new EmpcntClientProjectData(employee, companyMaster, countType, month, projectMaster, year, costCentre, apportionedTimeZero, workedHours, apportionedTimeZero, workedHours); if (countTypeId == 1) { empOpenCntClientProjectDataList.add(empcntClientProjectData); } if (countTypeId == 2) { empCloseCntClientProjectDataList.add(empcntClientProjectData); } } logger.debug("1209 totalUnAssingnedHours===> " + totalUnAssingnedHours); BigDecimal remainProportion = BigDecimal.ONE.subtract(totalUnAssingnedHours); logger.debug("remainProportion===> " + remainProportion); getRevenueCountProportion(empOpenCntClientProjectDataList, empCloseCntClientProjectDataList, employee, month, year, costCentre, countType, remainProportion, unAssignedProjects, countTypeId, employeePcTagsTeamStructMap); } } } // logger.debug("<====getSingleProjectDetail END====>"); }
From source file:com.ugam.collage.plus.service.people_count.impl.PeopleAccountingServiceImpl.java
/** * @param empcntClientProjectDataList/*w ww . ja va 2s.c o m*/ * @param empClientProjectTeamStructList * @param employee * @param month * @param year * @param costCentre * @param countType * @param allignedTimeZero * @param assistedTimeZero * @param apportionedTimeZero * @param allignedTimeOne * @param totalTimeOne * @return */ private void getMultipleProjectDetail(List<EmpcntClientProjectData> empOpenCntClientProjectDataList, List<EmpcntClientProjectData> empCloseCntClientProjectDataList, List<EmpClientProjectTeamStruct> empClientProjectTeamStructList, EmployeeMaster employee, TabMonth month, TabYear year, CostCentre costCentre, CountClassification countType, BigDecimal allignedTimeZero, BigDecimal assistedTimeZero, BigDecimal apportionedTimeZero, BigDecimal allignedTimeOne, BigDecimal totalTimeOne, Integer countTypeId, Map<String, EmployeePcTagsTeamStruct> employeePcTagsTeamStructMap) { logger.debug("<====getMultipleProjectDetail START====>"); Integer employeeId = employee.getEmployeeId(); Integer yearId = year.getYearId(); Integer monthId = month.getMonthId(); String costCentreId = costCentre.getCostCentreId(); BigDecimal deviderHour = new BigDecimal(Constants.TOTAL_WORKING_HOURS); logger.debug("getMultipleProjectDetail parameter===>" + employeeId + "::" + yearId + "::" + monthId + "::" + costCentreId + "::" + deviderHour); // Get project details Map<Integer, EmpClientProjectTeamStruct> employeeProjectIds = new HashMap<Integer, EmpClientProjectTeamStruct>(); Map<Integer, EmpClientProjectTeamStruct> validEmployeeProjectIds = new HashMap<Integer, EmpClientProjectTeamStruct>(); for (EmpClientProjectTeamStruct empClientProjectTeamStructThree : empClientProjectTeamStructList) { employeeProjectIds.put(empClientProjectTeamStructThree.getProjectMaster().getProjectId(), empClientProjectTeamStructThree); } validEmployeeProjectIds.putAll(employeeProjectIds); // logger.debug("validEmployeeProjectIds 1:size===>" + // validEmployeeProjectIds.size()); // check in revenue table for (Integer key : employeeProjectIds.keySet()) { EmpClientProjectTeamStruct mapValues = employeeProjectIds.get(key); List<CollageProjectRevenue> listValues = collageProjectRevenueDao .findByYearIdMonthIdProjectIdCostCentreId(mapValues.getTabYear().getYearId(), mapValues.getTabMonth().getMonthId(), mapValues.getProjectMaster().getProjectId(), costCentre.getCostCentreId()); if (listValues.isEmpty()) { validEmployeeProjectIds.remove(key); } } // logger.debug("validEmployeeProjectIds 2:size===>" + // validEmployeeProjectIds.size()); // For all invalid projects calculate count zero if (validEmployeeProjectIds.isEmpty()) { getZeroProjectsDetail(yearId, monthId, costCentreId, empOpenCntClientProjectDataList, empCloseCntClientProjectDataList, employee, month, year, costCentre, countType, allignedTimeZero, assistedTimeZero, totalTimeOne, totalTimeOne, countTypeId, employeePcTagsTeamStructMap); } // Get list of project from execution data for that employee List<Integer> projectIdList = executionDataDao.findByPersonYearMonthCostCentre(employeeId, yearId, monthId, costCentreId); // logger.debug("execution data projects===>" + projectIdList.size()); // If List is empty if (projectIdList.isEmpty()) { // logger.debug("Contain InValid projects (: Find by Revenue)===>"); Map<Integer, BigDecimal> projectRevenueMap = new HashMap<Integer, BigDecimal>(); BigDecimal sumOfRevenue = BigDecimal.ZERO; List<Object[]> collageProjectRevenueList = collageProjectRevenueDao.findByCostCentreIdYearIdMonthId( costCentre.getCostCentreId(), year.getYearId(), month.getMonthId()); for (Object[] collageProjectRevenue : collageProjectRevenueList) { Integer projectId = (Integer) collageProjectRevenue[0]; BigDecimal revenue = (BigDecimal) collageProjectRevenue[1]; projectRevenueMap.put(projectId, revenue); } // logger.debug("projectRevenueMap size===>" + // projectRevenueMap.size()); for (Integer key : projectRevenueMap.keySet()) { sumOfRevenue = sumOfRevenue.add(projectRevenueMap.get(key)); } logger.debug("sumOfRevenue===>" + sumOfRevenue); for (Integer projectId : validEmployeeProjectIds.keySet()) { EmpClientProjectTeamStruct mapValues = validEmployeeProjectIds.get(projectId); BigDecimal revenue = projectRevenueMap.get(projectId); logger.debug("revenue===>" + revenue); BigDecimal projectRevenueCount = revenue.divide(sumOfRevenue, 2, RoundingMode.HALF_EVEN); projectRevenueCount = projectRevenueCount.setScale(2, RoundingMode.CEILING); // logger.debug("685 empOpenCntClientProjectData ProjectId:Revenue===>"+projectId+" : " // + projectRevenueCount); EmpcntClientProjectData empcntClientProjectData = new EmpcntClientProjectData(employee, mapValues.getCompanyMaster(), countType, month, mapValues.getProjectMaster(), year, costCentre, projectRevenueCount, BigDecimal.ZERO, BigDecimal.ZERO, projectRevenueCount); if (countTypeId == 1) { empOpenCntClientProjectDataList.add(empcntClientProjectData); } if (countTypeId == 2) { empCloseCntClientProjectDataList.add(empcntClientProjectData); } } } else { // logger.debug("Else Contain Valid projects===>"); Integer validEmployeeProjectCount = validEmployeeProjectIds.size(); // Get valid projects list=>project is both revenue data and // execution data Set<Integer> validAllProjects = new HashSet<Integer>(); for (Integer projectId : projectIdList) { List<CollageProjectRevenue> listValues = collageProjectRevenueDao .findByYearIdMonthIdProjectIdCostCentreId(yearId, monthId, projectId, costCentreId); if (!listValues.isEmpty()) { validAllProjects.add(projectId); } } Integer validAllProjectCount = validAllProjects.size(); // logger.debug("validAllProjects :size===>" + // validAllProjects.size()); // Total hour worked by an Employee List<BigDecimal> toatalHours = executionDataDao.findByPersonIdYearIdMonthIdCostCentreId(employeeId, yearId, monthId, costCentreId); BigDecimal toatlTime = toatalHours.get(0); // logger.debug("ToatalHours===>" + toatlTime); // Separate assigned projects from execution data projects Map<Integer, BigDecimal> assignedProjects = new HashMap<Integer, BigDecimal>(); Map<Integer, BigDecimal> unAssignedProjects = new HashMap<Integer, BigDecimal>(); List<Object[]> allProjectTimeList = executionDataDao .findByEmployeeIdYearIdMonthIdCostCentreId(employeeId, yearId, monthId, costCentreId); for (Object[] result : allProjectTimeList) { Integer projectId = (Integer) result[0]; BigDecimal hour = (BigDecimal) result[1]; Integer companyId = (Integer) result[2]; if (validEmployeeProjectIds.containsKey(projectId) && validAllProjects.contains(projectId)) { // logger.debug("UnAssignedProjects===>" + // projectId+"::"+hour+"::"+companyId); assignedProjects.put(projectId, hour); } if (!validEmployeeProjectIds.containsKey(projectId) && validAllProjects.contains(projectId)) { // logger.debug("assignedProjects===>" + // projectId+"::"+hour+"::"+companyId); unAssignedProjects.put(projectId, hour); } } if (validEmployeeProjectCount == validAllProjectCount && validAllProjects.containsAll(validEmployeeProjectIds.keySet()) && unAssignedProjects.isEmpty()) { // logger.debug("validEmployeeProjectCount==validAllProjectCount :(Only in assigned projects)"); for (Integer key : assignedProjects.keySet()) { // Get time spent on each project by employee id Integer projectId = key; BigDecimal timeByProject = assignedProjects.get(key); EmpClientProjectTeamStruct mapValues = validEmployeeProjectIds.get(projectId); // logger.debug("744 : Worked hours (Only in assigned projects) 1===>"+timeByProject+ // " : "+toatlTime); BigDecimal workedHours = timeByProject.divide(toatlTime, 2, RoundingMode.HALF_EVEN); workedHours = workedHours.setScale(2, RoundingMode.CEILING); // logger.debug("745: Worked hours (Only in assigned projects) 2===>"+workedHours); EmpcntClientProjectData empcntClientProjectData = new EmpcntClientProjectData(employee, mapValues.getCompanyMaster(), countType, month, mapValues.getProjectMaster(), year, costCentre, workedHours, assistedTimeZero, apportionedTimeZero, workedHours); if (countTypeId == 1) { empOpenCntClientProjectDataList.add(empcntClientProjectData); } if (countTypeId == 2) { empCloseCntClientProjectDataList.add(empcntClientProjectData); } } } else if (!assignedProjects.isEmpty() && !unAssignedProjects.isEmpty()) { // logger.debug("validEmployeeProjectCount!=validAllProjectCount :(Both in assigned and unassigned projects)"); if (toatlTime.compareTo(new BigDecimal(Constants.TOTAL_WORKING_HOURS)) >= 0) { // logger.debug("Worked hours===> >=168"); for (Integer key : assignedProjects.keySet()) { // Get time spent on each project by employee id Integer projectId = key; BigDecimal timeByProject = assignedProjects.get(key); EmpClientProjectTeamStruct mapValues = validEmployeeProjectIds.get(projectId); BigDecimal workedHours = timeByProject.divide(toatlTime, 2, RoundingMode.HALF_EVEN); workedHours = workedHours.setScale(2, RoundingMode.CEILING); // logger.debug("768: Aligned hours (Both in assigned and unassigned projects) 1===>"+workedHours); EmpcntClientProjectData empcntClientProjectData = new EmpcntClientProjectData(employee, mapValues.getCompanyMaster(), countType, month, mapValues.getProjectMaster(), year, costCentre, workedHours, assistedTimeZero, apportionedTimeZero, workedHours); if (countTypeId == 1) { empOpenCntClientProjectDataList.add(empcntClientProjectData); } if (countTypeId == 2) { empCloseCntClientProjectDataList.add(empcntClientProjectData); } } for (Integer key : unAssignedProjects.keySet()) { // Get time spent on each project by employee id Integer projectId = key; BigDecimal timeByProject = unAssignedProjects.get(key); List<ProjectMaster> projectList = projectMasterDao.findByProjectId(projectId); ProjectMaster projectMaster = projectList.get(0); CompanyMaster companyMaster = projectMaster.getCompanyMaster(); BigDecimal workedHours = timeByProject.divide(toatlTime, 2, RoundingMode.HALF_EVEN); workedHours = workedHours.setScale(2, RoundingMode.CEILING); // logger.debug("787: Assisted hours (Both in assigned and unassigned projects) 2===>"+workedHours); EmpcntClientProjectData empcntClientProjectData = new EmpcntClientProjectData(employee, companyMaster, countType, month, projectMaster, year, costCentre, apportionedTimeZero, workedHours, apportionedTimeZero, workedHours); if (countTypeId == 1) { empOpenCntClientProjectDataList.add(empcntClientProjectData); } if (countTypeId == 2) { empCloseCntClientProjectDataList.add(empcntClientProjectData); } } } else { // logger.debug("Worked hours===> <168"); BigDecimal totalUnAssingnedHours = BigDecimal.ZERO; BigDecimal assingnedHours = BigDecimal.ZERO; for (Integer key : unAssignedProjects.keySet()) { // Get time spent on each project by employee id Integer projectId = key; // logger.debug("Project Id===>"+key); BigDecimal timeByProject = unAssignedProjects.get(key); BigDecimal workedHours = timeByProject.divide(deviderHour, 2, RoundingMode.HALF_EVEN); workedHours = workedHours.setScale(2, RoundingMode.CEILING); totalUnAssingnedHours = totalUnAssingnedHours.add(workedHours); // Assign to assisted count for unAssignedProjects List<ProjectMaster> projectList = projectMasterDao.findByProjectId(projectId); ProjectMaster projectMaster = projectList.get(0); CompanyMaster companyMaster = projectMaster.getCompanyMaster(); // logger.debug("811: Assisted hours (Both in assigned and unassigned projects) 2===>"+workedHours); EmpcntClientProjectData empcntClientProjectData = new EmpcntClientProjectData(employee, companyMaster, countType, month, projectMaster, year, costCentre, apportionedTimeZero, workedHours, apportionedTimeZero, workedHours); if (countTypeId == 1) { empOpenCntClientProjectDataList.add(empcntClientProjectData); } if (countTypeId == 2) { empCloseCntClientProjectDataList.add(empcntClientProjectData); } } totalUnAssingnedHours = BigDecimal.ONE.subtract(totalUnAssingnedHours); // logger.debug("totalUnAssingnedHours===> "+totalUnAssingnedHours); for (Map.Entry<Integer, BigDecimal> entry : assignedProjects.entrySet()) { assingnedHours = assingnedHours.add(entry.getValue()); } // logger.debug("Aligned Hours===> "+assingnedHours); for (Integer key : assignedProjects.keySet()) { Integer projectId = key; BigDecimal timeByProject = assignedProjects.get(key); // logger.debug("831 :projectId : timeByProject===> "+projectId+" : "+timeByProject); EmpClientProjectTeamStruct mapValues = validEmployeeProjectIds.get(projectId); BigDecimal averageWorkedHours = timeByProject.divide(assingnedHours, 2, RoundingMode.HALF_EVEN); // logger.debug("834 :averageWorkedHours : assingnedHours===> "+averageWorkedHours+" : "+assingnedHours); BigDecimal actualWorkedHours = averageWorkedHours.multiply(totalUnAssingnedHours); actualWorkedHours = actualWorkedHours.setScale(2, RoundingMode.CEILING); // logger.debug("836: actualWorkedHours : totalUnAssingnedHours 2===>"+actualWorkedHours+" : "+totalUnAssingnedHours); EmpcntClientProjectData empcntClientProjectData = new EmpcntClientProjectData(employee, mapValues.getCompanyMaster(), countType, month, mapValues.getProjectMaster(), year, costCentre, actualWorkedHours, assistedTimeZero, apportionedTimeZero, actualWorkedHours); if (countTypeId == 1) { empOpenCntClientProjectDataList.add(empcntClientProjectData); } if (countTypeId == 2) { empCloseCntClientProjectDataList.add(empcntClientProjectData); } } } } else if (assignedProjects.isEmpty() && !unAssignedProjects.isEmpty()) { // logger.debug("Only in unassigned projects===>"); if (toatlTime.compareTo(new BigDecimal(Constants.TOTAL_WORKING_HOURS)) >= 0) { // logger.debug(" unassigned projects Worked hours===> >=168"); for (Integer key : unAssignedProjects.keySet()) { // Get time spent on each project by employee id Integer projectId = key; BigDecimal timeByProject = unAssignedProjects.get(key); List<ProjectMaster> projectList = projectMasterDao.findByProjectId(projectId); ProjectMaster projectMaster = projectList.get(0); CompanyMaster companyMaster = projectMaster.getCompanyMaster(); BigDecimal workedHours = timeByProject.divide(toatlTime, 2, RoundingMode.HALF_EVEN); workedHours = workedHours.setScale(2, RoundingMode.CEILING); // logger.debug("860: Assisted hours (Both in assigned and unassigned projects) 2===>"+workedHours); EmpcntClientProjectData empcntClientProjectData = new EmpcntClientProjectData(employee, companyMaster, countType, month, projectMaster, year, costCentre, apportionedTimeZero, workedHours, apportionedTimeZero, workedHours); if (countTypeId == 1) { empOpenCntClientProjectDataList.add(empcntClientProjectData); } if (countTypeId == 2) { empCloseCntClientProjectDataList.add(empcntClientProjectData); } } } else { // logger.debug("unassigned projects Worked hours===> <168"); BigDecimal totalUnAssingnedHours = BigDecimal.ZERO; BigDecimal assingnedHours = BigDecimal.ZERO; for (Integer key : unAssignedProjects.keySet()) { // Get time spent on each project by employee id Integer projectId = key; BigDecimal timeByProject = unAssignedProjects.get(key); BigDecimal workedHours = timeByProject.divide(deviderHour, 2, RoundingMode.HALF_EVEN); workedHours = workedHours.setScale(2, RoundingMode.CEILING); totalUnAssingnedHours = totalUnAssingnedHours.add(workedHours); // Assign to assisted count for unAssignedProjects List<ProjectMaster> projectList = projectMasterDao.findByProjectId(projectId); ProjectMaster projectMaster = projectList.get(0); CompanyMaster companyMaster = projectMaster.getCompanyMaster(); // logger.debug("884: Assisted hours in unassigned projects) 2===>"+workedHours); EmpcntClientProjectData empcntClientProjectData = new EmpcntClientProjectData(employee, companyMaster, countType, month, projectMaster, year, costCentre, apportionedTimeZero, workedHours, apportionedTimeZero, workedHours); if (countTypeId == 1) { empOpenCntClientProjectDataList.add(empcntClientProjectData); } if (countTypeId == 2) { empCloseCntClientProjectDataList.add(empcntClientProjectData); } } // logger.debug("totalUnAssingnedHours===> "+totalUnAssingnedHours); if (totalUnAssingnedHours.compareTo(BigDecimal.ONE) == -1) { BigDecimal remainProportion = BigDecimal.ONE.subtract(totalUnAssingnedHours); getRevenueCountProportion(empOpenCntClientProjectDataList, empCloseCntClientProjectDataList, employee, month, year, costCentre, countType, remainProportion, unAssignedProjects, countTypeId, employeePcTagsTeamStructMap); } } } } // logger.debug("<====getMultipleProjectDetail END====>"); }
From source file:org.mindswap.swoop.renderer.entity.ConciseFormatEntityRenderer.java
public void HSTMUPS(Set mups, OWLOntology onto, List MUPS, List explStr, Set satPaths, Set currPath) { // key step - make a backup of onto OWLOntology backup = swoopModel.cloneOntology(onto); try {/*from w w w .j av a 2 s . co m*/ for (Iterator iter = mups.iterator(); iter.hasNext();) { // reset ontology OWLOntology copyOnt = swoopModel.cloneOntology(backup); OWLObject axiom = (OWLObject) iter.next(); currPath.add(axiom); // System.out.println(axiom); // **** remove axiom from copyOnt ***** if (axiom instanceof OWLDisjointClassesAxiom) { OWLDisjointClassesAxiom dis = (OWLDisjointClassesAxiom) axiom; Set disSet = dis.getDisjointClasses(); Set newDisSet = new HashSet(); for (Iterator iter2 = disSet.iterator(); iter2.hasNext();) { OWLDescription desc = (OWLDescription) iter2.next(); if (desc instanceof OWLClass) newDisSet.add(copyOnt.getClass(((OWLClass) desc).getURI())); else newDisSet.add(desc); } OWLDisjointClassesAxiom newDis = copyOnt.getOWLDataFactory() .getOWLDisjointClassesAxiom(newDisSet); RemoveClassAxiom r = new RemoveClassAxiom(copyOnt, (OWLClassAxiom) newDis, null); r.accept((ChangeVisitor) copyOnt); } else if (axiom instanceof OWLEquivalentClassesAxiom) { OWLEquivalentClassesAxiom equ = (OWLEquivalentClassesAxiom) axiom; Set equSet = equ.getEquivalentClasses(); Set newEquSet = new HashSet(); List equList = new ArrayList(); for (Iterator iter2 = equSet.iterator(); iter2.hasNext();) { OWLDescription desc = (OWLDescription) iter2.next(); if (desc instanceof OWLClass) { newEquSet.add(copyOnt.getClass(((OWLClass) desc).getURI())); equList.add(copyOnt.getClass(((OWLClass) desc).getURI())); } else { newEquSet.add(desc); equList.add(desc); } } OWLEquivalentClassesAxiom newEqu = copyOnt.getOWLDataFactory() .getOWLEquivalentClassesAxiom(newEquSet); RemoveClassAxiom r = new RemoveClassAxiom(copyOnt, (OWLClassAxiom) newEqu, null); r.accept((ChangeVisitor) copyOnt); if (equList.size() == 2) { OWLDescription desc1 = (OWLDescription) equList.get(0); OWLDescription desc2 = (OWLDescription) equList.get(0); if (desc1 instanceof OWLClass) { RemoveEquivalentClass re = new RemoveEquivalentClass(copyOnt, (OWLClass) desc1, desc2, null); re.accept((ChangeVisitor) copyOnt); } if (desc2 instanceof OWLClass) { RemoveEquivalentClass re = new RemoveEquivalentClass(copyOnt, (OWLClass) desc2, desc1, null); re.accept((ChangeVisitor) copyOnt); } } } else if (axiom instanceof OWLSubClassAxiom) { OWLSubClassAxiom subA = (OWLSubClassAxiom) axiom; OWLDescription sub = subA.getSubClass(); OWLDescription sup = subA.getSuperClass(); OWLDescription newSub = sub; if (sub instanceof OWLClass) newSub = copyOnt.getClass(((OWLClass) sub).getURI()); OWLDescription newSup = sup; if (sup instanceof OWLClass) newSup = copyOnt.getClass(((OWLClass) sup).getURI()); OWLSubClassAxiom newSubA = copyOnt.getOWLDataFactory().getOWLSubClassAxiom(newSub, newSup); OntologyChange r = new RemoveClassAxiom(copyOnt, (OWLClassAxiom) newSubA, null); r.accept((ChangeVisitor) copyOnt); if (newSub instanceof OWLClass) { r = new RemoveSuperClass(copyOnt, (OWLClass) newSub, newSup, null); r.accept((ChangeVisitor) copyOnt); } } else if (axiom instanceof OWLPropertyDomainAxiom) { OWLPropertyDomainAxiom opd = (OWLPropertyDomainAxiom) axiom; OWLProperty prop = opd.getProperty(); OWLProperty newProp = null; if (prop instanceof OWLDataProperty) newProp = copyOnt.getDataProperty(prop.getURI()); else newProp = copyOnt.getObjectProperty(prop.getURI()); OWLDescription desc = opd.getDomain(); OWLDescription newDesc = desc; if (desc instanceof OWLClass) newDesc = copyOnt.getClass(((OWLClass) desc).getURI()); RemoveDomain rd = new RemoveDomain(copyOnt, newProp, newDesc, null); rd.accept((ChangeVisitor) copyOnt); } else if (axiom instanceof OWLObjectPropertyRangeAxiom) { OWLObjectPropertyRangeAxiom opd = (OWLObjectPropertyRangeAxiom) axiom; OWLObjectProperty prop = opd.getProperty(); OWLObjectProperty newProp = copyOnt.getObjectProperty(prop.getURI()); OWLDescription desc = opd.getRange(); OWLDescription newDesc = desc; if (desc instanceof OWLClass) newDesc = copyOnt.getClass(((OWLClass) desc).getURI()); RemoveObjectPropertyRange ropr = new RemoveObjectPropertyRange(copyOnt, newProp, newDesc, null); ropr.accept((ChangeVisitor) copyOnt); } else if (axiom instanceof OWLObjectPropertyInstance) { OWLObjectPropertyInstance oop = (OWLObjectPropertyInstance) axiom; OWLIndividual sub = copyOnt.getIndividual(oop.getSubject().getURI()); OWLObjectProperty prop = copyOnt.getObjectProperty(oop.getProperty().getURI()); OWLIndividual obj = copyOnt.getIndividual(oop.getObject().getURI()); RemoveObjectPropertyInstance ropi = new RemoveObjectPropertyInstance(copyOnt, sub, prop, obj, null); ropi.accept((ChangeVisitor) copyOnt); } else if (axiom instanceof OWLSameIndividualsAxiom) { OWLSameIndividualsAxiom osi = (OWLSameIndividualsAxiom) axiom; Set newInd = new HashSet(); for (Iterator it = osi.getIndividuals().iterator(); it.hasNext();) { newInd.add(copyOnt.getIndividual(((OWLIndividual) it.next()).getURI())); } OWLSameIndividualsAxiom copyInd = copyOnt.getOWLDataFactory() .getOWLSameIndividualsAxiom(newInd); RemoveIndividualAxiom ria = new RemoveIndividualAxiom(copyOnt, copyInd, null); ria.accept((ChangeVisitor) copyOnt); } else if (axiom instanceof OWLDifferentIndividualsAxiom) { OWLDifferentIndividualsAxiom osi = (OWLDifferentIndividualsAxiom) axiom; Set newInd = new HashSet(); for (Iterator it = osi.getIndividuals().iterator(); it.hasNext();) { newInd.add(copyOnt.getIndividual(((OWLIndividual) it.next()).getURI())); } OWLDifferentIndividualsAxiom copyInd = copyOnt.getOWLDataFactory() .getOWLDifferentIndividualsAxiom(newInd); RemoveIndividualAxiom ria = new RemoveIndividualAxiom(copyOnt, copyInd, null); ria.accept((ChangeVisitor) copyOnt); } //TODO: more removal! // test if copyOnt has changed //FIXME: not working when individual obj prop assertions are actually removed // if (copyOnt.equals(onto)) { // System.out.println("Ontology hasn't changed after removing axiom "+axiom); // continue; // } // get class in copyOnt OWLClass cla = null; if (swoopModel.getSelectedEntity() != null) cla = copyOnt.getClass(swoopModel.getSelectedEntity().getURI()); // early path termination boolean earlyTermination = false; for (Iterator i = satPaths.iterator(); i.hasNext();) { Set satPath = (HashSet) i.next(); if (satPath.containsAll(currPath)) { System.out.println("EARLY PATH TERMINATION!"); earlyTermination = true; break; } } if (!earlyTermination) { // check if there is a new mups of class Set newMUPS = new HashSet(); String expl = ""; if (swoopModel.isUseTableau()) { // use tableau tracing List explList = this.getTableauSOS(copyOnt, cla); expl = explList.get(0).toString(); newMUPS = (HashSet) explList.get(1); } else { // use black box newMUPS = this.getBlackBoxSOS(copyOnt, cla); } if (!newMUPS.isEmpty()) { if (!MUPS.contains(newMUPS)) { // print explanation for new MUPS MUPS.add(newMUPS); explStr.add(expl); System.out.println("FOUND NEW MUPS - MUPS COUNT: " + MUPS.size()); // recurse! HSTMUPS(newMUPS, copyOnt, MUPS, explStr, satPaths, currPath); } } else { satPaths.add(new HashSet(currPath)); } } currPath.remove(axiom); } } catch (Exception ex) { ex.printStackTrace(); } }