List of usage examples for java.util LinkedHashMap size
int size();
From source file:com.itemanalysis.jmetrik.stats.irt.linking.CommonItemSummaryStatistics.java
public CommonItemSummaryStatistics(LinkedHashMap<String, ItemResponseModel> irmX, LinkedHashMap<String, ItemResponseModel> irmY) { this.irmX = irmX; this.irmY = irmY; varNamesA = new ArrayList<VariableName>(); varNamesB = new ArrayList<VariableName>(); numberOfCommonItems = irmX.size(); createArrays();//from w w w . j a v a2 s . c o m }
From source file:com.hp.saas.agm.service.EntityService.java
private String queryToString(EntityQuery query) { ServerStrategy cust = restService.getServerStrategy(); EntityQuery clone = cust.preProcess(query.clone()); StringBuffer buf = new StringBuffer(); buf.append("fields="); LinkedHashMap<String, Integer> columns = clone.getColumns(); buf.append(TextUtils.join(",", columns.keySet().toArray(new String[columns.size()]))); buf.append("&query="); buf.append(EntityQuery.encode("{" + filterToString(clone, ApplicationManager.getMetadataService()) + "}")); buf.append("&order-by="); buf.append(EntityQuery.encode("{" + orderToString(clone) + "}")); if (query.getPageSize() != null) { buf.append("&page-size="); buf.append(query.getPageSize()); }/*from w w w . j a v a 2s . com*/ if (query.getStartIndex() != null) { buf.append("&start-index="); buf.append(query.getStartIndex()); } return buf.toString(); }
From source file:au.org.ala.biocache.util.CollectionsCache.java
/** * Do the web services call. Uses RestTemplate. * * @param type/*from w w w . java2 s . com*/ * @return */ protected LinkedHashMap<String, String> getCodesMap(ResourceType type, List<String> guids) { LinkedHashMap<String, String> entityMap = null; logger.info("Updating code map with " + guids); try { // grab cached values (map) in case WS is not available (uses reflection) Field f = CollectionsCache.class.getDeclaredField(type.getType() + "s"); // field is plural form entityMap = (LinkedHashMap<String, String>) f.get(this); logger.debug("checking map size: " + entityMap.size()); } catch (Exception ex) { logger.error("Java reflection error: " + ex.getMessage(), ex); } try { entityMap = new LinkedHashMap<String, String>(); // reset now we're inside the try final String jsonUri = registryUrl + "/" + type.getType() + ".json"; logger.debug("Requesting: " + jsonUri); List<LinkedHashMap<String, String>> entities = restTemplate.getForObject(jsonUri, List.class); logger.debug("number of entities = " + entities.size()); for (LinkedHashMap<String, String> je : entities) { if (addToCodeMap(je.get("uid"), guids)) { entityMap.put(je.get("uid"), je.get("name")); } } } catch (Exception ex) { logger.error("RestTemplate error: " + ex.getMessage(), ex); } return entityMap; }
From source file:com.itemanalysis.jmetrik.stats.itemanalysis.ItemAnalysisOutputFile.java
public File saveOutput(LinkedHashMap<VariableName, ClassicalItem> itemTreeMap, boolean allCategories, boolean addDIndex) throws IOException { this.outputfile = getUniqueFile(outputfile); int nItems = itemTreeMap.size(); int maxCategory = 0; ClassicalItem item = null;/*from w w w .ja va 2s .c om*/ for (VariableName v : itemTreeMap.keySet()) { item = itemTreeMap.get(v); maxCategory = Math.max(maxCategory, item.numberOfCategories()); } int totalColumns = 4 + 3 * maxCategory; LinkedHashMap<VariableName, VariableAttributes> variableAttributeMap = new LinkedHashMap<VariableName, VariableAttributes>(); VariableAttributes nameAtt = new VariableAttributes(new VariableName("name"), new VariableLabel("Item Name"), DataType.STRING, 0); VariableAttributes difficultyAtt = new VariableAttributes(new VariableName("difficulty"), new VariableLabel("Item Difficulty"), DataType.DOUBLE, 1); VariableAttributes stdDevAtt = new VariableAttributes(new VariableName("stdev"), new VariableLabel("Item Standard Deviation"), DataType.DOUBLE, 2); VariableAttributes discrimAtt = new VariableAttributes(new VariableName("discrimination"), new VariableLabel("Item Discrimination"), DataType.DOUBLE, 3); variableAttributeMap.put(nameAtt.getName(), nameAtt); variableAttributeMap.put(difficultyAtt.getName(), difficultyAtt); variableAttributeMap.put(stdDevAtt.getName(), stdDevAtt); variableAttributeMap.put(discrimAtt.getName(), discrimAtt); VariableAttributes lower = null; VariableAttributes upper = null; VariableAttributes dIndex = null; if (addDIndex) { lower = new VariableAttributes(new VariableName("lower"), new VariableLabel("Difficulty for lower 27%"), DataType.DOUBLE, 4); upper = new VariableAttributes(new VariableName("upper"), new VariableLabel("Difficulty for upper 27%"), DataType.DOUBLE, 5); dIndex = new VariableAttributes(new VariableName("D_index"), new VariableLabel("Discrimination index"), DataType.DOUBLE, 6); variableAttributeMap.put(lower.getName(), lower); variableAttributeMap.put(upper.getName(), upper); variableAttributeMap.put(dIndex.getName(), dIndex); } VariableAttributes vPropAtt = null; VariableAttributes vSDAtt = null; VariableAttributes vCorAtt = null; int colNumber = 4; if (addDIndex) colNumber = 7; if (allCategories) { for (int k = 0; k < maxCategory; k++) { vPropAtt = new VariableAttributes(new VariableName("prop" + (k + 1)), new VariableLabel("Proportion endorsing option " + (k + 1)), DataType.DOUBLE, colNumber++); vSDAtt = new VariableAttributes(new VariableName("stdev" + (k + 1)), new VariableLabel("Std. Dev. for option " + (k + 1)), DataType.DOUBLE, colNumber++); vCorAtt = new VariableAttributes(new VariableName("cor" + (k + 1)), new VariableLabel("Distractor-total correlation for option " + (k + 1)), DataType.DOUBLE, colNumber++); variableAttributeMap.put(vPropAtt.getName(), vPropAtt); variableAttributeMap.put(vSDAtt.getName(), vSDAtt); variableAttributeMap.put(vCorAtt.getName(), vCorAtt); } } int n = 0; try (JmetrikFileWriter writer = new JmetrikFileWriter(outputfile, variableAttributeMap)) { writer.openConnection(); writer.writeHeader(nItems); int index = 0; double df = 0, sd = 0, ds = 0, dL = 0, dU = 0, D = 0; for (VariableName v : itemTreeMap.keySet()) { index = 0; item = itemTreeMap.get(v); writer.writeValue(nameAtt.getName(), item.getName().toString()); df = item.getDifficulty(); if (Double.isNaN(df)) { writer.writeValue(difficultyAtt.getName(), ""); } else { writer.writeValue(difficultyAtt.getName(), df); } index++; sd = item.getStdDev(); if (Double.isNaN(sd)) { writer.writeValue(stdDevAtt.getName(), ""); } else { writer.writeValue(stdDevAtt.getName(), sd); } index++; ds = item.getDiscrimination(); if (Double.isNaN(ds)) { writer.writeValue(discrimAtt.getName(), ""); } else { writer.writeValue(discrimAtt.getName(), ds); } index++; if (addDIndex) { dL = item.getDindexLower(); if (Double.isNaN(dL)) { writer.writeValue(lower.getName(), ""); } else { writer.writeValue(lower.getName(), dL); } index++; dU = item.getDindexUpper(); if (Double.isNaN(dU)) { writer.writeValue(upper.getName(), ""); } else { writer.writeValue(upper.getName(), dU); } index++; D = dU - dL; if (Double.isNaN(D)) { writer.writeValue(dIndex.getName(), ""); } else { writer.writeValue(dIndex.getName(), D); } index++; } if (allCategories) { Object temp; Iterator<Object> iter = item.categoryIterator(); int catIndex = 1; VariableName catProp = null; VariableName catStDev = null; VariableName catDisc = null; while (iter.hasNext()) { temp = iter.next(); catProp = new VariableName("prop" + catIndex); catStDev = new VariableName("stdev" + catIndex); catDisc = new VariableName("cor" + catIndex); vPropAtt = variableAttributeMap.get(catProp); vSDAtt = variableAttributeMap.get(catStDev); vCorAtt = variableAttributeMap.get(catDisc); //category difficulty df = item.getDifficultyAt(temp); if (Double.isNaN(df)) { writer.writeValue(vPropAtt.getName(), ""); } else { writer.writeValue(vPropAtt.getName(), df); } index++; //category sd sd = item.getStdDevAt(temp); if (Double.isNaN(sd)) { writer.writeValue(vSDAtt.getName(), ""); } else { writer.writeValue(vSDAtt.getName(), sd); } index++; //category discrimination ds = item.getDiscriminationAt(temp); if (Double.isNaN(ds)) { writer.writeValue(vCorAtt.getName(), ""); } else { writer.writeValue(vCorAtt.getName(), ds); } index++; catIndex++; } //end loop over categories //index should be equal to totalColumns // if not, add null values to remaining columns // while(index<totalColumns-1){ // writer.writeValue(index++, ""); // } } writer.updateRow(); } //end loop over items } return outputfile; }
From source file:com.opengamma.analytics.financial.interestrate.capletstripping.CapletStrippingFunction.java
public CapletStrippingFunction(final List<CapFloor> caps, final YieldCurveBundle yieldCurves, final LinkedHashMap<String, double[]> knotPoints, final LinkedHashMap<String, Interpolator1D> interpolators, final LinkedHashMap<String, ParameterLimitsTransform> parameterTransforms, final LinkedHashMap<String, InterpolatedDoublesCurve> knownParameterTermSturctures) { Validate.notNull(caps, "caps null"); Validate.notNull(knotPoints, "null node points"); Validate.notNull(interpolators, "null interpolators"); Validate.isTrue(knotPoints.size() == interpolators.size(), "size mismatch between nodes and interpolators"); if (knownParameterTermSturctures == null) { Validate.isTrue(knotPoints.containsKey(ALPHA) && interpolators.containsKey(ALPHA), "alpha curve not found"); Validate.isTrue(knotPoints.containsKey(BETA) && interpolators.containsKey(BETA), "beta curve not found"); Validate.isTrue(knotPoints.containsKey(NU) && interpolators.containsKey(NU), "nu curve not found"); Validate.isTrue(knotPoints.containsKey(RHO) && interpolators.containsKey(RHO), "rho curve not found"); } else {// w ww . j a v a 2s.c o m Validate.isTrue((knotPoints.containsKey(ALPHA) && interpolators.containsKey(ALPHA)) ^ knownParameterTermSturctures.containsKey(ALPHA), "alpha curve not found"); Validate.isTrue((knotPoints.containsKey(BETA) && interpolators.containsKey(BETA)) ^ knownParameterTermSturctures.containsKey(BETA), "beta curve not found"); Validate.isTrue((knotPoints.containsKey(NU) && interpolators.containsKey(NU)) ^ knownParameterTermSturctures.containsKey(NU), "nu curve not found"); Validate.isTrue((knotPoints.containsKey(RHO) && interpolators.containsKey(RHO)) ^ knownParameterTermSturctures.containsKey(RHO), "rho curve not found"); } final LinkedHashMap<String, Interpolator1D> transInterpolators = new LinkedHashMap<String, Interpolator1D>(); final Set<String> names = interpolators.keySet(); for (final String name : names) { final Interpolator1D temp = new TransformedInterpolator1D(interpolators.get(name), parameterTransforms.get(name)); transInterpolators.put(name, temp); } _curveBuilder = new InterpolatedCurveBuildingFunction(knotPoints, transInterpolators); // _parameterTransforms = parameterTransforms; //TODO all the check for this _capPricers = new ArrayList<CapFloorPricer>(caps.size()); for (final CapFloor cap : caps) { _capPricers.add(new CapFloorPricer(cap, yieldCurves)); } _knownParameterTermStructures = knownParameterTermSturctures; }
From source file:org.netflux.core.RecordMetadata.java
/** * <p>/*from ww w .j a v a2 s. c o m*/ * Sets the list of field metadata that this record metadata will contain. This means that this metadata will describe a record that * may contain a list of fields, each of them described by the corresponding field metadata, and the fields will be arranged in the * order specified by the list. * </p> * <p> * Passing a <code>null</code> value or including a null field metadata in the list will cause the method to throw an exception. If * you want to have a record metadata describing a record that can't hold any field, either use the * {@linkplain RecordMetadata#RecordMetadata() default constructor} or pass an empty list to this method. * </p> * <p> * It isn't allowed to have two or more fields in the same record with the same name, so this method will check for duplicated names, * and throw an exception in that case. * </p> * * @param fieldMetadata The list of field metadata this record metadata will contain. * @throws NullPointerException if <code>fieldMetadata</code> is <code>null</code> or contains a <code>null</code> item. * @throws IllegalArgumentException if the list of field metadata contains duplicated field names. */ public void setFieldMetadata(List<FieldMetadata> fieldMetadata) { LinkedHashMap<String, Integer> fieldIndexes = new LinkedHashMap<String, Integer>(); int index = 0; for (FieldMetadata currentFieldMetadata : fieldMetadata) { fieldIndexes.put(currentFieldMetadata.getName(), index); index++; } if (fieldMetadata.size() == fieldIndexes.size()) { this.fieldMetadata = new ArrayList<FieldMetadata>(fieldMetadata); this.fieldIndexes = fieldIndexes; } else { // There are duplicated field names String errorMessage = RecordMetadata.messages.getString("message.duplicated.names"); if (RecordMetadata.log.isInfoEnabled()) { RecordMetadata.log .info(RecordMetadata.messages.getString("exception.duplicated.names") + errorMessage); } throw new IllegalArgumentException(errorMessage); } }
From source file:eu.supersede.gr.utility.PointsLogic.java
private void computePoints() { List<HAHPGamePlayerPoint> gamesPlayersPoints = gamesPlayersPointsRepository.findAll(); // cycle on every gamesPlayersPoints for (int i = 0; i < gamesPlayersPoints.size(); i++) { HAHPGame g = gamesRepository.findOne(gamesPlayersPoints.get(i).getGame().getGameId()); // set currentPlayer that is used for other methods g.setCurrentPlayer(gamesPlayersPoints.get(i).getUser()); List<HAHPCriteriasMatrixData> criteriasMatrixDataList = criteriaMatricesRepository.findByGame(g); // calculate the agreementIndex for every gamesPlayersPoints of a game and a specific user Map<String, Double> resultTotal = AHPRest.CalculateAHP(g.getCriterias(), g.getRequirements(), criteriasMatrixDataList, g.getRequirementsMatrixData()); Map<String, Double> resultPersonal = AHPRest.CalculatePersonalAHP( gamesPlayersPoints.get(i).getUser().getUserId(), g.getCriterias(), g.getRequirements(), criteriasMatrixDataList, g.getRequirementsMatrixData()); List<Requirement> gameRequirements = g.getRequirements(); Double sum = 0.0;//from ww w . j a v a2s . c om for (int j = 0; j < resultTotal.size(); j++) { Double requirementValueTotal = resultTotal .get(gameRequirements.get(j).getRequirementId().toString()); Double requirementValuePersonal = resultPersonal .get(gameRequirements.get(j).getRequirementId().toString()); sum = sum + (Math.abs(requirementValueTotal - requirementValuePersonal) * (1.0 - requirementValueTotal)); } Double agreementIndex = M - (M * sum); gamesPlayersPoints.get(i).setAgreementIndex(agreementIndex.longValue()); // calculate the positionInVoting for every gamesPlayersPoints of a game and a specific user List<User> players = g.getPlayers(); List<HAHPRequirementsMatrixData> lrmd = requirementsMatricesRepository.findByGame(g); Map<User, Float> gamePlayerVotes = new HashMap<>(); for (User player : players) { Integer total = 0; Integer voted = 0; if (lrmd != null) { for (HAHPRequirementsMatrixData data : lrmd) { for (HAHPPlayerMove pm : data.getPlayerMoves()) { if (pm.getPlayer().getUserId().equals(player.getUserId())) { total++; if (pm.getPlayed() == true && pm.getValue() != null && !pm.getValue().equals(-1l)) { voted++; } } } } } gamePlayerVotes.put(player, total.equals(0) ? 0f : ((new Float(voted) / new Float(total)) * 100)); } LinkedHashMap<User, Float> orderedList = sortHashMapByValues(gamePlayerVotes); List<User> indexes = new ArrayList<>(orderedList.keySet()); Integer index = indexes.indexOf(gamesPlayersPoints.get(i).getUser()); Double positionInVoting = (orderedList.size() - (new Double(index) + 1.0)) + 1.0; gamesPlayersPoints.get(i).setPositionInVoting(positionInVoting.longValue()); // calculate the virtualPosition of a user base on his/her points in a particular game HAHPGamePlayerPoint gpp = gamesPlayersPointsRepository .findByUserAndGame(gamesPlayersPoints.get(i).getUser(), g); List<HAHPGamePlayerPoint> specificGamePlayersPoints = gamesPlayersPointsRepository.findByGame(g); Collections.sort(specificGamePlayersPoints, new CustomComparator()); Long virtualPosition = specificGamePlayersPoints.indexOf(gpp) + 1l; gamesPlayersPoints.get(i).setVirtualPosition(virtualPosition); Long movesPoints = 0l; Long gameProgressPoints = 0l; Long positionInVotingPoints = 0l; Long gameStatusPoints = 0l; Long agreementIndexPoints = 0l; Long totalPoints = 0l; // set the movesPoints movesPoints = g.getMovesDone().longValue(); // setGameProgressPoints gameProgressPoints = (long) Math.floor(g.getPlayerProgress() / 10); // setPositionInVotingPoints if (positionInVoting == 1) { positionInVotingPoints = 5l; } else if (positionInVoting == 2) { positionInVotingPoints = 3l; } else if (positionInVoting == 3) { positionInVotingPoints = 2l; } // setGameStatusPoints if (g.getPlayerProgress() != 100) { gameStatusPoints = -20l; } else { gameStatusPoints = 0l; } // set AgreementIndexPoints agreementIndexPoints = agreementIndex.longValue(); totalPoints = movesPoints.longValue() + gameProgressPoints + positionInVotingPoints + gameStatusPoints + agreementIndexPoints; // set totalPoints 0 if the totalPoints are negative if (totalPoints < 0) { totalPoints = 0l; } gamesPlayersPoints.get(i).setPoints(totalPoints); gamesPlayersPointsRepository.save(gamesPlayersPoints.get(i)); } System.out.println("Finished computing votes"); }
From source file:gr.iit.demokritos.cru.cps.ai.ComputationalCreativityMetrics.java
public double MinClosure(String phrase, String story) { double closure = 0.0; //in case minclosue is not called by ComputeRar_Eff if (story.equalsIgnoreCase("")) { story = phrase;/* ww w . j a v a2s . c o m*/ } //hashmap of the terms and their index HashMap<String, Double> termIndex = new HashMap<String, Double>(); //take the top terms of the phrase by their stems tf // HashMap<ArrayList<String>, Double> termsTf = inf.TopTerms(story.toLowerCase(), true); for (String s : phrase.split(" ")) { termIndex.put(s, 1.0 * story.indexOf(s)); } //sort the hashamp (descending) and traverse it reversely, to start from the first word in the phrase LinkedHashMap<String, Double> sorted = inf.sortHashMapByValues(termIndex); ListIterator iter = new ArrayList(sorted.keySet()).listIterator(sorted.size()); HashMap<String, Double> graph = new HashMap<String, Double>(); //store the first word in the phrase, in order to be found in the first iteration graph.put(sorted.keySet().toArray()[sorted.keySet().size() - 1].toString(), 0.0); //for each word that comes next in the phrase while (iter.hasPrevious()) { String s = iter.previous().toString(); //find the shortest distance from it to the root (first word) double min = 1.0; //looking through every word that has already defined its min distance to the root for (String k : graph.keySet()) { double dist = getDistance(s, k); //+ graph.get(k); if (dist < min) { min = dist; } } graph.put(s, min); //keep the overal sum of weights of the edges closure += min; } return closure; }
From source file:com.proofpoint.jmx.MBeanRepresentation.java
public MBeanRepresentation(MBeanServer mbeanServer, ObjectName objectName, ObjectMapper objectMapper) throws JMException { this.objectName = objectName; MBeanInfo mbeanInfo = mbeanServer.getMBeanInfo(objectName); className = mbeanInfo.getClassName(); description = mbeanInfo.getDescription(); descriptor = toMap(mbeanInfo.getDescriptor()); ///*from w ww. jav a 2 s .c om*/ // Attributes // LinkedHashMap<String, MBeanAttributeInfo> attributeInfos = Maps.newLinkedHashMap(); for (MBeanAttributeInfo attributeInfo : mbeanInfo.getAttributes()) { attributeInfos.put(attributeInfo.getName(), attributeInfo); } String[] attributeNames = attributeInfos.keySet().toArray(new String[attributeInfos.size()]); ImmutableList.Builder<AttributeRepresentation> attributes = ImmutableList.builder(); for (Attribute attribute : mbeanServer.getAttributes(objectName, attributeNames).asList()) { String attributeName = attribute.getName(); // use remove so we only include one value for each attribute MBeanAttributeInfo attributeInfo = attributeInfos.remove(attributeName); if (attributeInfo == null) { // unknown extra attribute, could have been added after MBeanInfo was fetched continue; } Object attributeValue = attribute.getValue(); AttributeRepresentation attributeRepresentation = new AttributeRepresentation(attributeInfo, attributeValue, objectMapper); attributes.add(attributeRepresentation); } this.attributes = attributes.build(); // // Operations // ImmutableList.Builder<OperationRepresentation> operations = ImmutableList.builder(); for (MBeanOperationInfo operationInfo : mbeanInfo.getOperations()) { operations.add(new OperationRepresentation(operationInfo)); } this.operations = operations.build(); }
From source file:org.lokra.seaweedfs.core.FileTemplate.java
/** * Save files by stream map./* w w w. ja v a 2 s . c o m*/ * * @param streamMap Map of file name and file stream. * @param contentType File content type. * @return Files status. * @throws IOException Http connection is fail or server response within some error message. */ public LinkedHashMap<String, FileHandleStatus> saveFilesByStreamMap( LinkedHashMap<String, InputStream> streamMap, ContentType contentType) throws IOException { // Assign file key final AssignFileKeyParams params = new AssignFileKeyParams(assignFileKeyParams.getReplication(), streamMap.size(), assignFileKeyParams.getDataCenter(), assignFileKeyParams.getTtl(), assignFileKeyParams.getCollection()); final AssignFileKeyResult assignFileKeyResult = masterWrapper.assignFileKey(params); String uploadUrl; if (usingPublicUrl) uploadUrl = assignFileKeyResult.getPublicUrl(); else uploadUrl = assignFileKeyResult.getUrl(); // Upload file LinkedHashMap<String, FileHandleStatus> resultMap = new LinkedHashMap<String, FileHandleStatus>(); int index = 0; for (String fileName : streamMap.keySet()) { if (index == 0) resultMap.put(fileName, new FileHandleStatus(assignFileKeyResult.getFid(), volumeWrapper.uploadFile(uploadUrl, assignFileKeyResult.getFid(), fileName, streamMap.get(fileName), timeToLive, contentType))); else resultMap.put(fileName, new FileHandleStatus(assignFileKeyResult.getFid() + "_" + String.valueOf(index), volumeWrapper.uploadFile(uploadUrl, assignFileKeyResult.getFid() + "_" + String.valueOf(index), fileName, streamMap.get(fileName), timeToLive, contentType))); index++; } return resultMap; }