List of usage examples for java.util EnumMap EnumMap
public EnumMap(Map<K, ? extends V> m)
From source file:org.lunarray.model.generation.vaadin.render.factories.form.vaadin.FormPropertyRenderStrategyFactoryImpl.java
/** * The default constructor.// w ww . ja v a 2 s. c om * * @param form * The form. May not be null. */ public FormPropertyRenderStrategyFactoryImpl(final FormComponent<? super E, E> form) { Validate.notNull(form, "Form may not be null."); this.form = form; this.eventBus = new Bus(); this.builders = new HashMap<OperationDescriptor<E>, OperationInvocationBuilder<E>>(); this.defaultFactory = new TextOutputPropertyStrategy.Factory(); this.factories = new EnumMap<RenderType, StrategyFactory>(RenderType.class); this.factories.put(RenderType.CHECKBOX, new CheckboxPropertyStrategy.Factory()); this.factories.put(RenderType.DATE_PICKER, new DatePickerPropertyStrategy.DateFactory()); this.factories.put(RenderType.DATE_TIME_PICKER, new DatePickerPropertyStrategy.DateFactory()); this.factories.put(RenderType.TIME_PICKER, new DatePickerPropertyStrategy.DateTimeFactory()); this.factories.put(RenderType.PICKLIST, new ListSelectPropertyStrategy.Factory()); this.factories.put(RenderType.DROPDOWN, new MenuSelectPropertyStrategy.Factory()); this.factories.put(RenderType.RADIO, new RadioSelectPropertyStrategy.Factory()); this.factories.put(RenderType.SHUTTLE, new ShuttleSelectPropertyStrategy.Factory()); this.factories.put(RenderType.TEXT, new TextFieldPropertyStrategy.Factory()); this.factories.put(RenderType.TEXT_AREA, new TextAreaPropertyStrategy.Factory()); this.factories.put(RenderType.RICH_TEXT, new RichTextAreaPropertyStrategy.Factory()); }
From source file:com.adobe.acs.commons.mcp.impl.processes.RefreshFolderTumbnails.java
private void record(String path, String action, String description) { EnumMap<ReportColumns, String> row = new EnumMap<ReportColumns, String>(ReportColumns.class); row.put(ReportColumns.PATH, path);/* w w w . j a va 2 s . c o m*/ row.put(ReportColumns.ACTION, action); row.put(ReportColumns.DESCRIPTION, description); reportData.add(row); }
From source file:com.moviejukebox.plugin.FanartTvPlugin.java
/** * Scan and return the artwork type requested (or all if type is null) * * @param movie/*from ww w . jav a 2 s.co m*/ * @param artworkType Artwork type required (null is all) * @return */ public boolean scan(Movie movie, final FTArtworkType artworkType) { if (artworkType != null && !ARTWORK_TYPES.containsKey(artworkType)) { LOG.debug("{} not required", artworkType.toString().toLowerCase()); return true; } ArtworkList ftArtwork; String requiredLanguage; Map<FTArtworkType, Integer> requiredArtworkTypes = new EnumMap<>(ARTWORK_TYPES); if (movie.isTVShow()) { int tvdbid = NumberUtils.toInt(movie.getId(TheTvDBPlugin.THETVDB_PLUGIN_ID), 0); // Remove the non-TV types for (FTArtworkType at : requiredArtworkTypes.keySet()) { if (at.getSourceType() != FTSourceType.TV) { requiredArtworkTypes.remove(at); } } // Get all the artwork to speed up any subsequent requests ftArtwork = getTvArtwork(tvdbid); requiredLanguage = LANG_TV; } else { int tmdbId = NumberUtils.toInt(movie.getId(TheMovieDbPlugin.TMDB_PLUGIN_ID), 0); // Remove the non-Movie types for (FTArtworkType at : requiredArtworkTypes.keySet()) { if (at.getSourceType() != FTSourceType.MOVIE) { requiredArtworkTypes.remove(at); } } // Get all the artwork to speed up any subsequent requests ftArtwork = getMovieArtwork(tmdbId, movie.getId(ImdbPlugin.IMDB_PLUGIN_ID)); requiredLanguage = LANG_MOVIE; } if (ftArtwork.hasArtwork()) { LOG.debug("Found {} artwork items", ftArtwork.getArtwork().size()); FTArtworkType ftType; for (Map.Entry<FTArtworkType, List<FTArtwork>> entry : ftArtwork.getArtwork().entrySet()) { LOG.trace("Found '{}' with {} items", entry.getKey(), entry.getValue().size()); ftType = entry.getKey(); if (requiredArtworkTypes.containsKey(ftType) && requiredArtworkTypes.get(ftType) > 0) { LOG.trace("Processing '{}' artwork, {} are requried", entry.getKey(), requiredArtworkTypes.get(ftType)); int left = processArtworkToMovie(movie, ftType, requiredLanguage, requiredArtworkTypes.get(ftType), entry.getValue()); // Update the required artwork counter requiredArtworkTypes.put(ftType, left); // remove the count from the requiredQuantity } } int requiredQuantity = 0; for (Map.Entry<FTArtworkType, Integer> entry : requiredArtworkTypes.entrySet()) { requiredQuantity += entry.getValue(); } if (requiredQuantity > 0) { LOG.debug("Not all required artwork was found for '{}' - {}", movie.getBaseName(), requiredArtworkTypes.toString()); return false; } LOG.debug("All required artwork was found for '{}'", movie.getBaseName()); return true; } LOG.debug("No artwork found for {}", movie.getBaseName()); return false; }
From source file:org.apache.accumulo.core.util.shell.commands.ListIterCommand.java
@Override public Options getOptions() { final Options o = new Options(); nameOpt = new Option("n", "name", true, "iterator to list"); nameOpt.setArgName("itername"); allScopesOpt = new Option("all", "all-scopes", false, "list from all scopes"); o.addOption(allScopesOpt);/*from w ww .j a v a2s . com*/ scopeOpts = new EnumMap<IteratorScope, Option>(IteratorScope.class); scopeOpts.put(IteratorScope.minc, new Option(IteratorScope.minc.name(), "minor-compaction", false, "list iterator for minor compaction scope")); scopeOpts.put(IteratorScope.majc, new Option(IteratorScope.majc.name(), "major-compaction", false, "list iterator for major compaction scope")); scopeOpts.put(IteratorScope.scan, new Option(IteratorScope.scan.name(), "scan-time", false, "list iterator for scan scope")); OptionGroup grp = new OptionGroup(); grp.addOption(OptUtil.tableOpt("table to list the configured iterators on")); grp.addOption(OptUtil.namespaceOpt("namespace to list the configured iterators on")); o.addOptionGroup(grp); o.addOption(nameOpt); for (Option opt : scopeOpts.values()) { o.addOption(opt); } return o; }
From source file:io.lavagna.web.api.CardController.java
@ExpectPermission(Permission.READ) @RequestMapping(value = "/api/project/{projectShortName}/cards-by-milestone", method = RequestMethod.GET) public Milestones findCardsByMilestone(@PathVariable("projectShortName") String projectShortName) { Project project = projectService.findByShortName(projectShortName); Map<Integer, Integer> milestoneToIndex = new HashMap<>(); List<MilestoneInfo> milestones = new ArrayList<>(); getMilestones(project.getId(), milestoneToIndex, milestones); for (MilestoneCount count : statisticsService.findCardsCountByMilestone(project.getId())) { MilestoneInfo md = milestones.get(milestoneToIndex.get(count.getMilestoneId())); md.getCardsCountByStatus().put(count.getColumnDefinition(), count.getCount()); }//w w w . java 2s. co m Map<ColumnDefinition, Integer> statusColors = new EnumMap<>(ColumnDefinition.class); for (BoardColumnDefinition cd : projectService.findColumnDefinitionsByProjectId(project.getId())) { statusColors.put(cd.getValue(), cd.getColor()); } return new Milestones(milestones, statusColors); }
From source file:at.ac.tuwien.dsg.quelle.elasticityQuantification.engines.RequirementsMatchingEngine.java
/** * Tries to match as many requirements on a single ServiceUnit. If the unit * can have multiple Optional_Associations to Quality, Resource, each * possible association is inspected, and the SMALLEST (in terms of * quality/resources) that matches the requirements is chosen (intuitively * this should be the cheapest) in a greedy manner. Then, for all the * MANDATORY service unit associations, their concrete configurations are * chosen, and the number of requirements that they match also reported * * 1 Requirements group means that it should be instantiated on one cloud * offered ServiceUnit//from www.j a v a2 s . c o m * * @param unitToMatch * @param requirements * @return */ public ServiceUnitOptions analyzeServiceUnitMatching(CloudOfferedService unitToMatch, Requirements requirements) { //hold matched requirements by type Map<Metric.MetricType, List<Requirement>> matchedRequirementsMap = new EnumMap<Metric.MetricType, List<Requirement>>( Metric.MetricType.class); matchedRequirementsMap.put(Metric.MetricType.RESOURCE, new ArrayList<Requirement>()); matchedRequirementsMap.put(Metric.MetricType.COST, new ArrayList<Requirement>()); matchedRequirementsMap.put(Metric.MetricType.QUALITY, new ArrayList<Requirement>()); matchedRequirementsMap.put(Metric.MetricType.ELASTICITY, new ArrayList<Requirement>()); ServiceUnitOptions serviceUnitOptions = new ServiceUnitOptions(unitToMatch); // serviceUnitOptions.setMatchedRequirementsByServiceUnit(matchedRequirementsMap); serviceUnitOptions.setOverallUnMatched(new ArrayList<Requirement>(requirements.getRequirements())); if (requirements == null || unitToMatch == null) { return serviceUnitOptions; } //1 split requirements by type: Cost, Quality, Resource, Elasticity Map<Metric.MetricType, List<Requirement>> requirementsMapByType = new EnumMap<Metric.MetricType, List<Requirement>>( Metric.MetricType.class); for (Requirement requirement : requirements.getRequirements()) { Metric.MetricType requirementType = requirement.getMetric().getType(); if (requirementsMapByType.containsKey(requirementType)) { requirementsMapByType.get(requirementType).add(requirement); } else { List<Requirement> list = new ArrayList<Requirement>(); list.add(requirement); requirementsMapByType.put(requirementType, list); } } //2 match requirements //2.1 match Resource requirements if (requirementsMapByType.containsKey(Metric.MetricType.RESOURCE)) { List<Requirement> matchedRequirements = matchedRequirementsMap.get(Metric.MetricType.RESOURCE); List<Requirement> resourceRequirements = requirementsMapByType.get(Metric.MetricType.RESOURCE); //2.1.1 match requirements on fixed resources for (Resource resource : unitToMatch.getResourceProperties()) { Map<Metric, MetricValue> resourceProperties = resource.getProperties(); List<Requirement> requirementsMatchedForThisResource = matchRequirementsToProperties( resourceProperties, resourceRequirements); resourceRequirements.removeAll(requirementsMatchedForThisResource); matchedRequirements.addAll(requirementsMatchedForThisResource); serviceUnitOptions.addMatchedRequirements(requirementsMatchedForThisResource); } //the rest of the unmatched requirements are matched on the optional resources, //and the optional resource which matches the most requirements is selected //I need set cover here //2.1.2 match requirements on optional resources //first on mandatory reqs for (ElasticityCapability optionalResourceCapability : unitToMatch.getResourceAssociations()) { //get optional resources BECAUSE it is more tricky to get them //the unitToMatch.getOptionalResourceAssociations() ACTUAllY returns the target of the ElasticityCharacteristic, //which is a generic Entity: Example Computing. //The ResourceDAO returns example Computing x64, Computing x86 // List<Resource> optionalResourcesOptions = ResourceDAO.geResourceOptionsForServiceUnitNode(unitToMatch.getId(), optionalResource.getId()); //now match the remaining requirements on these options and sort them after how many req they fulfill Set<RequirementsMatchingReport<Resource>> matchingReports = matchOptionalResourceConfiguration( optionalResourceCapability.getMandatoryDependencies(), resourceRequirements); if (!matchingReports.isEmpty()) { serviceUnitOptions.addResourceOptions(optionalResourceCapability, matchingReports); //remove the requirements matched by the LARGEST match //get iterator next as the first in the matchingReports must be the BEST match (sorted in decreasing nr of matched policies) List<Requirement> matched = matchingReports.iterator().next().matchedRequirements .get(Metric.MetricType.RESOURCE); serviceUnitOptions.addMatchedRequirements(matched); resourceRequirements.removeAll(matched); } } for (ElasticityCapability optionalResourceCapability : unitToMatch.getResourceAssociations()) { //get optional resources BECAUSE it is more tricky to get them //the unitToMatch.getOptionalResourceAssociations() ACTUAllY returns the target of the ElasticityCharacteristic, //which is a generic Entity: Example Computing. //The ResourceDAO returns example Computing x64, Computing x86 // List<Resource> optionalResourcesOptions = ResourceDAO.geResourceOptionsForServiceUnitNode(unitToMatch.getId(), optionalResource.getId()); //now match the remaining requirements on these options and sort them after how many req they fulfill Set<RequirementsMatchingReport<Resource>> matchingReports = matchOptionalResourceConfiguration( optionalResourceCapability.getOptionalDependencies(), resourceRequirements); if (!matchingReports.isEmpty()) { serviceUnitOptions.addResourceOptions(optionalResourceCapability, matchingReports); //remove the requirements matched by the LARGEST match //get iterator next as the first in the matchingReports must be the BEST match (sorted in decreasing nr of matched policies) List<Requirement> matched = matchingReports.iterator().next().matchedRequirements .get(Metric.MetricType.RESOURCE); serviceUnitOptions.addMatchedRequirements(matched); resourceRequirements.removeAll(matched); } } } //2.2 match Quality requirements if (requirementsMapByType.containsKey(Metric.MetricType.QUALITY)) { List<Requirement> matchedRequirements = matchedRequirementsMap.get(Metric.MetricType.QUALITY); List<Requirement> qualityRequirements = requirementsMapByType.get(Metric.MetricType.QUALITY); //2.2.1 match requirements on fixed Quality for (Quality quality : unitToMatch.getQualityProperties()) { Map<Metric, MetricValue> properties = quality.getProperties(); List<Requirement> requirementsMatchedForThisResource = matchRequirementsToProperties(properties, qualityRequirements); qualityRequirements.removeAll(requirementsMatchedForThisResource); matchedRequirements.addAll(requirementsMatchedForThisResource); serviceUnitOptions.addMatchedRequirements(requirementsMatchedForThisResource); } //the rest of the unmatched requirements are matched on the optional resources, //and the optional resource which matches the most requirements is selected //I need set cover here //2.2.2 match requirements on optional Quality //first on mandatory quality for (ElasticityCapability optionalQualityCapability : unitToMatch.getQualityAssociations()) { //now match the remaining requirements on these options and sort them after how many req they fulfill Set<RequirementsMatchingReport<Quality>> matchingReports = matchOptionalQualityConfiguration( optionalQualityCapability.getMandatoryDependencies(), qualityRequirements); if (!matchingReports.isEmpty()) { serviceUnitOptions.addQualityOptions(optionalQualityCapability, matchingReports); //remove the requirements matched by the LARGEST match List<Requirement> matched = matchingReports.iterator().next().matchedRequirements .get(Metric.MetricType.QUALITY); serviceUnitOptions.addMatchedRequirements(matched); qualityRequirements.removeAll(matched); } } for (ElasticityCapability optionalQualityCapability : unitToMatch.getQualityAssociations()) { //now match the remaining requirements on these options and sort them after how many req they fulfill Set<RequirementsMatchingReport<Quality>> matchingReports = matchOptionalQualityConfiguration( optionalQualityCapability.getOptionalDependencies(), qualityRequirements); if (!matchingReports.isEmpty()) { serviceUnitOptions.addQualityOptions(optionalQualityCapability, matchingReports); //remove the requirements matched by the LARGEST match List<Requirement> matched = matchingReports.iterator().next().matchedRequirements .get(Metric.MetricType.QUALITY); serviceUnitOptions.addMatchedRequirements(matched); qualityRequirements.removeAll(matched); } } } //2.3 Get all MANDATORY association ServiceUnits and check how much do they fill requirements. for (ElasticityCapability serviceUnitElasticityCapability : unitToMatch.getServiceUnitAssociations()) { Requirements r = new Requirements(); r.setRequirements(serviceUnitOptions.getOverallUnMatched()); for (Unit entity : serviceUnitElasticityCapability.getMandatoryDependencies()) { ServiceUnitOptions options = analyzeServiceUnitMatching((CloudOfferedService) entity, r); if (options != null) { serviceUnitOptions.addMandatoryServiceUnitRaport(options); } } } // 2.4 The requirements left unmatched are matched with OptionalServiceUnit Associations //thus, for example, if one requires a VM with X resources, and Y IOperformance, //it could get a VM which optionally could have EBS, if (!serviceUnitOptions.getOverallUnMatched().isEmpty()) { for (ElasticityCapability optionalServiceUnitElasticityCapability : unitToMatch .getServiceUnitAssociations()) { Requirements r = new Requirements(); r.setRequirements(serviceUnitOptions.getOverallUnMatched()); //go and analyze each optional target from the elasticity stuff. not good though. I need to select one or the other? //TODO: structure reports after ElasticityCapabilities. Example: for capability A we have Reports for units B,C,D for (Unit entity : optionalServiceUnitElasticityCapability.getOptionalDependencies()) { ServiceUnitOptions options = analyzeServiceUnitMatching((CloudOfferedService) entity, r); if (options != null) { serviceUnitOptions.addOptionalServiceUnitRaport(options); } } } } // //2.5 match Cost requirements (cost needs to also include the cost of the MANDATORY associations // //TODO: to be implemented (Continue) // match to cost properties // if (requirementsMapByType.containsKey(Metric.MetricType.COST)) { // List<Requirement> matchedRequirements = matchedRequirementsMap.get(Metric.MetricType.COST); // List<Requirement> costRequirements = requirementsMapByType.get(Metric.MetricType.COST); // // List<CostFunction> costFunctions = unitToMatch.getCostFunctions(); // // for (CostFunction costFunction : unitToMatch.getCostFunctions()) { // for (CostElement element : costFunction.getCostElements()) { // Metric costMetric = element.getCostMetric(); // Map<MetricValue, Double> properties = element.getCostIntervalFunction(); // // // List<Requirement> requirementsMatchedForThisResource = new ArrayList<Requirement>(); // // Iterator<Requirement> requirementsIterator = costRequirements.iterator(); // while (requirementsIterator.hasNext()) { // Requirement requirement = requirementsIterator.next(); // // Metric requirementMetric = requirement.getMetric(); // if (costMetric.equals(requirementMetric)) { // boolean respectsAllConditions = true; // // for (Condition condition : requirement.getConditions()) { // //consider we only evaluate cost, not interval // boolean conditionIsRespected = true; // for (Double d : properties.values()) { // MetricValue costValue = new MetricValue(d); // if (condition.isRespectedByValue(costValue)) { // conditionIsRespected = true; // break; // } // } // if (!conditionIsRespected) { // respectsAllConditions = false; // break; // } // } // if (respectsAllConditions) { // //add requirement to matched requirements list // requirementsMatchedForThisResource.add(requirement); // //remove requirement so it is not matched again in the next searches // requirementsIterator.remove(); // } // } else { // continue; // } // // } // // costRequirements.removeAll(requirementsMatchedForThisResource); // matchedRequirements.addAll(requirementsMatchedForThisResource); // serviceUnitOptions.addMatchedRequirements(requirementsMatchedForThisResource); // } // } // } // // //TODO: to process also cost to apply an utility in conjunction with another // if (!serviceUnitOptions.getOverallUnMatched().isEmpty()) { // //here I must check and apply it for quality, resource or ServiceUnit // // for (CostFunction costFunction : unitToMatch.getCostFunctions()) { // for (Quality quality : costFunction.getAppliedInConjunctionWithQuality()) { // Map<Metric, MetricValue> properties = quality.getProperties(); // List<Requirement> requirementsMatchedForThisResource = matchRequirementsToProperties(properties, qualityRequirements); // qualityRequirements.removeAll(requirementsMatchedForThisResource); // matchedRequirements.addAll(requirementsMatchedForThisResource); // serviceUnitOptions.addMatchedRequirements(requirementsMatchedForThisResource); // } // } // // } //2.6 match Elasticity requirements (also need to consider the MANDATORY associations, which reduce the elasticity) //TODO: to be implemented return serviceUnitOptions; }
From source file:org.janusgraph.graphdb.database.log.TransactionLogHeader.java
private DataOutput serializeHeader(Serializer serializer, int capacity, LogTxStatus status) { return serializeHeader(serializer, capacity, status, new EnumMap<LogTxMeta, Object>(LogTxMeta.class)); }
From source file:hu.unideb.inf.rdfizers.rpm.ModelBuilder.java
/** * Processes an RPM header structure (signature or header). *///ww w . java2 s . com private Map processHeaderStructure(HeaderStructureType type) throws IOException { if (bytesRead % 8 != 0) { // header structure must be aligned 8 byte boundary in.skipBytes(8 - (bytesRead % 8)); bytesRead += 8 - (bytesRead % 8); } if (in.readUnsignedByte() != 0x8e || in.readUnsignedByte() != 0xad || in.readUnsignedByte() != 0xe8) throw new IOException("Invalid RPM header structure"); in.skipBytes(5); bytesRead += 8; final int num = in.readInt(); bytesRead += 4; logger.debug("Number of index entries: {}", num); final int size = in.readInt(); bytesRead += 4; logger.debug("Store size: {}", size); // read index entries IndexEntry[] index = new IndexEntry[num]; for (int i = 0; i < num; ++i) { index[i] = new IndexEntry(in.readInt(), in.readInt(), in.readInt(), in.readInt()); bytesRead += 16; } // read store byte[] store = new byte[size]; in.readFully(store); bytesRead += size; switch (type) { case SIGNATURE: { Map<SignatureTag, Object> map = new EnumMap<SignatureTag, Object>(SignatureTag.class); for (int i = 0; i < index.length; ++i) { SignatureTag tag = SignatureTag.get(index[i].tag); if (tag != null) { if (HeaderType.get(index[i].type) != tag.getType()) throw new IOException("Invalid RPM signature"); Object value = getValueFromStore(index[i], store); map.put(tag, value); } else logger.warn("Skipping tag in signature: {}", index[i].tag); } return map; } case HEADER: { Map map = new EnumMap<HeaderTag, Object>(HeaderTag.class); for (int i = 0; i < index.length; ++i) { HeaderTag tag = HeaderTag.get(index[i].tag); if (tag != null) { if (HeaderType.get(index[i].type) != tag.getType()) { throw new IOException("Invalid RPM header"); } Object value = getValueFromStore(index[i], store); map.put(tag, value); } else logger.warn("Skipping tag in header: {}", index[i].tag); } return map; } } return null; }
From source file:canreg.client.analysis.CasesByAgeGroupChartTableBuilder.java
@Override public LinkedList<String> buildTable(String tableHeader, String reportFileName, int startYear, int endYear, Object[][] incidenceData, PopulationDataset[] populations, // can be null PopulationDataset[] standardPopulations, LinkedList<ConfigFields> configList, String[] engineParameters, FileTypes fileType) throws NotCompatibleDataException { // String footerString = java.util.ResourceBundle.getBundle("canreg/client/analysis/resources/AgeSpecificCasesPerHundredThousandTableBuilder").getString("TABLE BUILT ") + new Date() + java.util.ResourceBundle.getBundle("canreg/client/analysis/resources/AgeSpecificCasesPerHundredThousandTableBuilder").getString(" BY CANREG5."); LinkedList<String> generatedFiles = new LinkedList<String>(); if (Arrays.asList(engineParameters).contains("barchart")) { chartType = ChartType.BAR;/*from www .ja v a 2s . c o m*/ } else { chartType = ChartType.PIE; } if (Arrays.asList(engineParameters).contains("legend")) { legendOn = true; } if (Arrays.asList(engineParameters).contains("r")) { useR = true; } localSettings = CanRegClientApp.getApplication().getLocalSettings(); rpath = localSettings.getProperty(LocalSettings.R_PATH); // does R exist? if (rpath == null || rpath.isEmpty() || !new File(rpath).exists()) { useR = false; // force false if R is not installed } icd10GroupDescriptions = ConfigFieldsReader.findConfig("ICD10_groups", configList); cancerGroupsLocal = EditorialTableTools.generateICD10Groups(icd10GroupDescriptions); // indexes keyGroupsMap = new EnumMap<KeyCancerGroupsEnum, Integer>(KeyCancerGroupsEnum.class); keyGroupsMap.put(KeyCancerGroupsEnum.allCancerGroupsIndex, EditorialTableTools.getICD10index("ALL", icd10GroupDescriptions)); keyGroupsMap.put(KeyCancerGroupsEnum.skinCancerGroupIndex, EditorialTableTools.getICD10index("C44", icd10GroupDescriptions)); keyGroupsMap.put(KeyCancerGroupsEnum.otherCancerGroupsIndex, EditorialTableTools.getICD10index("O&U", icd10GroupDescriptions)); keyGroupsMap.put(KeyCancerGroupsEnum.allCancerGroupsButSkinIndex, EditorialTableTools.getICD10index("ALLbC44", icd10GroupDescriptions)); skinCancerGroupIndex = keyGroupsMap.get(KeyCancerGroupsEnum.skinCancerGroupIndex); allCancerGroupsIndex = keyGroupsMap.get(KeyCancerGroupsEnum.allCancerGroupsIndex); allCancerGroupsButSkinIndex = keyGroupsMap.get(KeyCancerGroupsEnum.allCancerGroupsButSkinIndex); otherCancerGroupsIndex = keyGroupsMap.get(KeyCancerGroupsEnum.otherCancerGroupsIndex); numberOfCancerGroups = cancerGroupsLocal.length; int columnToCount = allCancerGroupsIndex; List<AgeGroup> ageGroups = new LinkedList<AgeGroup>(); // TODO: Make these dynamic? ageGroups.add(new AgeGroup(0, 14)); ageGroups.add(new AgeGroup(15, 29)); ageGroups.add(new AgeGroup(30, 49)); ageGroups.add(new AgeGroup(50, 69)); ageGroups.add(new AgeGroup(70, null)); double[] casesLine; if (incidenceData != null) { String sexString, icdString; String morphologyString; double casesArray[][][] = new double[numberOfSexes][ageGroups.size()][numberOfCancerGroups]; double cum64Array[][][] = new double[numberOfSexes][ageGroups.size()][numberOfCancerGroups]; double cum74Array[][][] = new double[numberOfSexes][ageGroups.size()][numberOfCancerGroups]; double asrArray[][][] = new double[numberOfSexes][ageGroups.size()][numberOfCancerGroups]; int sex, icdIndex, cases, age; List<Integer> dontCount = new LinkedList<Integer>(); // all sites but skin? if (Arrays.asList(engineParameters).contains("noC44")) { dontCount.add(skinCancerGroupIndex); tableHeader += ", excluding C44"; columnToCount = allCancerGroupsButSkinIndex; } for (Object[] dataLine : incidenceData) { // Set default icdIndex = -1; cases = 0; age = 0; // Extract data sexString = (String) dataLine[SEX_COLUMN]; sex = Integer.parseInt(sexString.trim()); // sex = 3 is unknown sex if (sex > 2) { sex = 3; } else { sex -= 1; // sex 1 male maps to column 0... } morphologyString = (String) dataLine[MORPHOLOGY_COLUMN]; icdString = (String) dataLine[ICD10_COLUMN]; icdIndex = Tools.assignICDGroupIndex(keyGroupsMap, icdString, morphologyString, cancerGroupsLocal); if (!dontCount.contains(icdIndex) && icdIndex != DONT_COUNT) { // Extract cases cases = (Integer) dataLine[CASES_COLUMN]; age = (Integer) dataLine[AGE_COLUMN]; for (int group = 0; group < ageGroups.size(); group++) { if (ageGroups.get(group).fitsInAgeGroup(age)) { if (sex <= numberOfSexes && icdIndex >= 0) { casesArray[sex][group][icdIndex] += cases; } else { if (otherCancerGroupsIndex >= 0) { casesArray[sex][group][otherCancerGroupsIndex] += cases; } } if (allCancerGroupsIndex >= 0) { casesArray[sex][group][allCancerGroupsIndex] += cases; } if (allCancerGroupsButSkinIndex >= 0 && skinCancerGroupIndex >= 0 && icdIndex != skinCancerGroupIndex) { casesArray[sex][group][allCancerGroupsButSkinIndex] += cases; } } } } else { // System.out.println("Not counted: " + icdString + "/" + morphologyString); } } //if (populations != null && populations.length > 0) { // // calculate pops // for (PopulationDataset pop : populations) { // for (AgeGroup ag : ageGroups) { // try { // addPopulationDataSetToAgeGroup(pop, ag); // } catch (IncompatiblePopulationDataSetException ex) { // Logger.getLogger(CasesByAgeGroupChartTableBuilder.class.getName()).log(Level.SEVERE, null, ex); // } // } // } // } format = NumberFormat.getInstance(); format.setMaximumFractionDigits(1); for (int sexNumber : new int[] { 0, 1 }) { String fileName = reportFileName + "-" + sexLabel[sexNumber] + "." + fileType.toString(); File file = new File(fileName); List<CancerCasesCount> casesCounts = new LinkedList<CancerCasesCount>(); Double total = 0.0; for (int group = 0; group < ageGroups.size(); group++) { CancerCasesCount thisElement = new CancerCasesCount(null, ageGroups.get(group).toString(), 0.0, group); casesLine = casesArray[sexNumber][group]; thisElement.setCount(thisElement.getCount() + casesLine[columnToCount]); total += casesLine[columnToCount]; casesCounts.add(thisElement); } if (useR && !fileType.equals(FileTypes.jchart) && !fileType.equals(FileTypes.csv)) { String header = tableHeader + ", \n" + TableBuilderInterface.sexLabel[sexNumber]; generatedFiles.addAll(Tools.generateRChart(casesCounts, fileName, header, fileType, chartType, false, 0.0, rpath, false, "Age Group")); } else { Color color; if (sexNumber == 0) { color = Color.BLUE; } else { color = Color.RED; } String header = tableHeader + ", " + TableBuilderInterface.sexLabel[sexNumber]; charts[sexNumber] = Tools.generateJChart(casesCounts, fileName, header, fileType, chartType, false, legendOn, 0.0, total, color, "Age Group"); try { generatedFiles.add(Tools.writeJChartToFile(charts[sexNumber], file, fileType)); } catch (IOException ex) { Logger.getLogger(TopNChartTableBuilder.class.getName()).log(Level.SEVERE, null, ex); } catch (DocumentException ex) { Logger.getLogger(TopNChartTableBuilder.class.getName()).log(Level.SEVERE, null, ex); } } } } return generatedFiles; }
From source file:com.tesora.dve.sql.schema.PETable.java
public PETable(SchemaContext pc, Name name, List<TableComponent<?>> fieldsAndKeys, DistributionVector dv, List<TableModifier> modifier, PEPersistentGroup defStorage, PEDatabase db, TableState theState) { super(pc, name, fieldsAndKeys, dv, defStorage, db, theState); loaded = true;// w ww . jav a2 s. c o m this.pk = null; this.referring = new ListSet<SchemaCacheKey<PEAbstractTable<?>>>(); this.keys = new ArrayList<PEKey>(); this.modifiers = new TableModifiers(modifier); this.triggers = new EnumMap<TriggerEvent, PETableTriggerEventInfo>(TriggerEvent.class); // do keys & columns first so that database can propagate charset/collation initializeColumnsAndKeys(pc, fieldsAndKeys, db); setDatabase(pc, db, false); autoIncTrackerID = null; forceStorage(pc); setPersistent(pc, null, null); cached = null; }