List of usage examples for java.util LinkedHashMap containsKey
boolean containsKey(Object key);
From source file:ubic.gemma.datastructure.matrix.ExpressionDataMatrixColumnSort.java
/** * Divide the biomaterials up into chunks based on the experimental factor given, keeping everybody in order. * /*from ww w. ja v a2 s . c o m*/ * @param ef * @param bms * @return ordered map of fv->bm where fv is of ef, or null if it couldn't be done properly. */ private static LinkedHashMap<FactorValueValueObject, List<BioMaterialValueObject>> chunkOnFactorVO( ExperimentalFactor ef, List<BioMaterialValueObject> bms) { if (bms == null) { return null; } LinkedHashMap<FactorValueValueObject, List<BioMaterialValueObject>> chunks = new LinkedHashMap<FactorValueValueObject, List<BioMaterialValueObject>>(); /* * Get the factor values in the order we have things right now */ Collection<Long> factorValueIds = EntityUtils.getIds(ef.getFactorValues()); for (BioMaterialValueObject bm : bms) { for (FactorValueValueObject fv : bm.getFactorValueObjects()) { if (!factorValueIds.contains(fv.getId())) { continue; } if (chunks.keySet().contains(fv)) { continue; } chunks.put(fv, new ArrayList<BioMaterialValueObject>()); } } /* * What if bm doesn't have a value for the factorvalue. Need a dummy value. */ FactorValueValueObject dummy = new FactorValueValueObject(); dummy.setFactorId(ef.getId()); dummy.setValue(""); dummy.setId(-1L); chunks.put(dummy, new ArrayList<BioMaterialValueObject>()); for (BioMaterialValueObject bm : bms) { boolean found = false; for (FactorValueValueObject fv : bm.getFactorValueObjects()) { if (factorValueIds.contains(fv.getId())) { found = true; assert chunks.containsKey(fv); chunks.get(fv).add(bm); } } if (!found) { if (log.isDebugEnabled()) log.debug(bm + " has no value for factor=" + ef + "; using dummy value"); chunks.get(dummy).add(bm); } } if (chunks.get(dummy).size() == 0) { if (log.isDebugEnabled()) log.debug("removing dummy"); chunks.remove(dummy); } log.debug(chunks.size() + " chunks for " + ef + ", from current chunk of size " + bms.size()); /* * Sanity check */ int total = 0; for (FactorValueValueObject fv : chunks.keySet()) { List<BioMaterialValueObject> chunk = chunks.get(fv); total += chunk.size(); } assert total == bms.size() : "expected " + bms.size() + ", got " + total; return chunks; }
From source file:amulet.resourceprofiler.ResourceProfiler.java
/** * Helper function for getting the AVG. EXECUTION TIME for a particular resource. * @param resource/*from ww w .ja v a2s . c o m*/ * @param qmapp * @param deviceInfo * @param steadyStateInfo * @param api_energy_lookup * @return */ private double getTimeHelper(Resource resource, QMApp qmapp, DeviceInfo deviceInfo, SteadyStateInfo steadyStateInfo, LinkedHashMap<String, EnergyParam> api_energy_lookup, double[][] fill_rect_lookup, double[][] clear_rect_lookup) { // Compute the avg. time to execute a line of code (LoC). double AVG_LOC_TIME = deviceInfo.avgNumInstructionsPerLoC * deviceInfo.avgBasicInstructionTime; // Define an arbitrary time to give to un-recognized function calls/operations. double UNKNOWN_TIME = UNKNOWN_QUANTITY_SCALAR * AVG_LOC_TIME; // A time variable for keeping track of the time value to be returned to the caller. double time = resource.time; switch (resource.type) { case AMULET_API_FUNCTION_CALL: if (api_energy_lookup.containsKey(resource.name)) { // If the Amulet API call is recognized, then we just assign the real measurement value. EnergyParam energyparam = api_energy_lookup.get(resource.name); time = energyparam.avgTime; if (resource.name.contains("ClearRect")) { // MAGIC NUMBERS!!! Gawd this whole thing is a bit hacky for the special lookup functions int w = Math.min(resource.getIntExtra("width"), 127); int h = Math.min(resource.getIntExtra("height"), 114); time = clear_rect_lookup[w][h]; } if (resource.name.contains("FillRect")) { int w = Math.min(resource.getIntExtra("width"), 127); int h = Math.min(resource.getIntExtra("height"), 114); time = fill_rect_lookup[w][h]; } } else { // If the Amulet API call is *not* recognized, then we just assign a fixed time. m_resourceProfilerWarnings.add(" + (!) LOOK-UP WARNING:: Time for Amulet API function '" + resource.name + "' not found in api_energy_lookup table; assigning UNKNOWN_TIME=" + UNKNOWN_TIME + "."); time = UNKNOWN_TIME; } // If this call is nested within a loop, the time of this function call (really, the // resource record) needs to be multiplied by the number of times this call is actually made. if (resource.isContainedInLoop()) { // System.out.println("**** RESOURCE " + resource.name + " time was = " + time); time *= resource.getNumLoopIterations(); // System.out.println("**** RESOURCE " + resource.name + " is now = " + time + " (orignal-time x " + resource.getNumLoopIterations() + ")"); } break; case NON_AMULET_API_FUNCTION_CALL: if (qmapp.operationTimeMap.containsKey(resource.name)) { // This is a function defined within the QM application (i.e., an "operation" by QM's definition). time = qmapp.operationTimeMap.get(resource.name); } else { // If the Non-Amulet API call is *not* recognized, then we just assign a fixed cost. m_resourceProfilerWarnings.add(" + (!) LOOK-UP WARNING:: Time for Non-Amulet API function '" + resource.name + "' not found; assigning UNKNOWN_TIME=" + UNKNOWN_TIME + "."); time = UNKNOWN_TIME; } // If this call is nested within a loop, the time of this function call (really, the // resource record) needs to be multiplied by the number of times this call is actually made. if (resource.isContainedInLoop()) { // System.out.println("**** RESOURCE " + resource.name + " time was = " + time); time *= resource.getNumLoopIterations(); // System.out.println("**** RESOURCE " + resource.name + " is now = " + time + " (orignal-time x " + resource.getNumLoopIterations() + ")"); } break; case COMPUTATION: if (resource.name.equals(ComputationType.BASIC_BLOCKS.text())) { // Get the number of lines of code. double nLinesOfCode = resource.getIntExtra(Resource.EXTRA_NUM_LINES_OF_CODE); time = nLinesOfCode * AVG_LOC_TIME; } else if (resource.name.equals(ComputationType.FOR_LOOP.text())) { try { // Get number of iterations in this loop. double nIterations = resource.getNumLoopIterations(); // Get the number of lines of code. double nLinesOfCode = resource.getIntExtra(Resource.EXTRA_LOOP_NUM_STATEMENTS); time = (nLinesOfCode * AVG_LOC_TIME) * nIterations; } catch (Exception e) { System.err.println("**FAILED RESOURCE PARSING: skipping this resource"); System.err.println(" RESOURCE: " + resource + ""); } } break; case SENSOR_SUBSCRIPTION: if (resource.name.equalsIgnoreCase("ACCELEROMETER")) { time = 0.0; // no time here exactly -- compute.js scales the subscription to be in terms of "per week" } else if (resource.name.equalsIgnoreCase("HEARTRATE")) { time = 0.0; // no time here exactly -- compute.js scales the subscription to be in terms of "per week" } break; case MEMORY: case GLOBAL_MEMORY: case UNKNOWN: default: } return time; }
From source file:org.cerberus.servlet.crud.testexecution.ReadTestCaseExecution.java
private List<TestCaseExecution> hashExecution(List<TestCaseExecution> testCaseExecutions, List<TestCaseExecutionInQueue> testCaseExecutionsInQueue) throws ParseException { LinkedHashMap<String, TestCaseExecution> testCaseExecutionsList = new LinkedHashMap(); SimpleDateFormat formater = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss"); for (TestCaseExecution testCaseExecution : testCaseExecutions) { String key = testCaseExecution.getBrowser() + "_" + testCaseExecution.getCountry() + "_" + testCaseExecution.getEnvironment() + "_" + testCaseExecution.getTest() + "_" + testCaseExecution.getTestCase(); testCaseExecutionsList.put(key, testCaseExecution); }// w w w.ja v a 2s.c om for (TestCaseExecutionInQueue testCaseExecutionInQueue : testCaseExecutionsInQueue) { TestCaseExecution testCaseExecution = testCaseExecutionInQueueService .convertToTestCaseExecution(testCaseExecutionInQueue); String key = testCaseExecution.getBrowser() + "_" + testCaseExecution.getCountry() + "_" + testCaseExecution.getEnvironment() + "_" + testCaseExecution.getTest() + "_" + testCaseExecution.getTestCase(); if ((testCaseExecutionsList.containsKey(key) && testCaseExecutionsList.get(key) .getStart() < testCaseExecutionInQueue.getRequestDate().getTime()) || !testCaseExecutionsList.containsKey(key)) { testCaseExecutionsList.put(key, testCaseExecution); } } List<TestCaseExecution> result = new ArrayList<TestCaseExecution>(testCaseExecutionsList.values()); return result; }
From source file:amulet.resourceprofiler.ResourceProfiler.java
/** * Helper function for getting the AVG. ENERGY COST for a particular resource. * @param resource//from w w w .j av a 2 s . c o m * @param qmapp * @param deviceInfo * @param steadyStateInfo * @param api_energy_lookup * @return */ private double getCostHelper(Resource resource, QMApp qmapp, DeviceInfo deviceInfo, SteadyStateInfo steadyStateInfo, LinkedHashMap<String, EnergyParam> api_energy_lookup, double[][] fill_rect_lookup, double[][] clear_rect_lookup) { // Compute the avg. energy cost to execute a line of code (LoC). double ENERGY_COST_PER_LOC = deviceInfo.avgNumInstructionsPerLoC * (deviceInfo.avgBasicInstructionPower * deviceInfo.avgBasicInstructionTime); // Define an arbitrary cost to give to un-recognized function calls. double UNKNOWN_COST = UNKNOWN_QUANTITY_SCALAR * ENERGY_COST_PER_LOC; // A cost variable for keeping track of the cost value to be returned to the caller. double cost = resource.cost; switch (resource.type) { case AMULET_API_FUNCTION_CALL: if (api_energy_lookup.containsKey(resource.name)) { // If the Amulet API call is recognized, then we just assign the real measurement value. EnergyParam energyparam = api_energy_lookup.get(resource.name); // TODO: is there a better/more correct way to do this?! // If cost is zero, calculate actual cost with known energy values; otherwise, leave it alone. double scalar = cost; if (cost == 0.0) { scalar = 1.0; } cost = scalar * energyparam.avgPower * energyparam.avgTime; if (resource.name.contains("ClearRect")) { int w = Math.min(resource.getIntExtra("width"), 127); int h = Math.min(resource.getIntExtra("height"), 114); double time_cost = clear_rect_lookup[w][h]; cost = scalar * energyparam.avgPower * time_cost; System.out.println("In clear " + cost); } if (resource.name.contains("FillRect")) { int w = Math.min(resource.getIntExtra("width"), 127); int h = Math.min(resource.getIntExtra("height"), 114); double time_cost = fill_rect_lookup[w][h]; cost = scalar * energyparam.avgPower * time_cost; } } else { // If the Amulet API call is *not* recognized, then we just assign a fixed cost. m_resourceProfilerWarnings.add(" + (!) LOOK-UP WARNING:: Cost for Amulet API function '" + resource.name + "' not found in api_energy_lookup table; assigning UNKNOWN_COST=" + UNKNOWN_COST + "."); cost = UNKNOWN_COST; } // If this call is nested within a loop, the cost of this function call needs to // be multiplied by the number of times this call is actually made. if (resource.isContainedInLoop()) { // System.out.println("**** RESOURCE " + resource.name + " cost was = " + cost); cost *= resource.getNumLoopIterations(); // System.out.println("**** RESOURCE " + resource.name + " is now = " + cost + " (orignal-cost x " + resource.getNumLoopIterations() + ")"); } break; case NON_AMULET_API_FUNCTION_CALL: if (qmapp.operationCostMap.containsKey(resource.name)) { // This is a function defined within the QM application (i.e., an "operation" by QM's definition). cost = qmapp.operationCostMap.get(resource.name) * qmapp.operationTimeMap.get(resource.name); } else { // If the Non-Amulet API call is *not* recognized, then we just assign a fixed cost. m_resourceProfilerWarnings.add(" + (!) LOOK-UP WARNING:: Cost for Non-Amulet API function '" + resource.name + "' not found; assigning UNKNOWN_COST=" + UNKNOWN_COST + "."); cost = UNKNOWN_COST; } // TODO: scalar handling?! // If this call is nested within a loop, the cost of this function call needs to // be multiplied by the number of times this call is actually made. if (resource.isContainedInLoop()) { // System.out.println("**** RESOURCE " + resource.name + " cost was = " + cost); cost *= resource.getNumLoopIterations(); // System.out.println("**** RESOURCE " + resource.name + " is now = " + cost + " (orignal-cost x " + resource.getNumLoopIterations() + ")"); } break; case COMPUTATION: if (resource.name.equals(ComputationType.BASIC_BLOCKS.text())) { // Get the number of lines of code. double nLinesOfCode = resource.getIntExtra(Resource.EXTRA_NUM_LINES_OF_CODE); // Calculate cost. cost = nLinesOfCode * ENERGY_COST_PER_LOC; } else if (resource.name.equals(ComputationType.FOR_LOOP.text())) { // Get the number of lines of code. // If double for-loop, then skip try { double nLinesOfCode = resource.getIntExtra(Resource.EXTRA_LOOP_NUM_STATEMENTS); // Get number of iterations in this loop. double nIterations = resource.getNumLoopIterations(); // Calculate cost. cost = (ENERGY_COST_PER_LOC * nLinesOfCode) * nIterations; } catch (Exception e) { System.err.println("**FAILED RESOURCE PARSING: skipping this resource"); System.err.println(" RESOURCE: " + resource + ""); } } break; case SENSOR_SUBSCRIPTION: if (resource.name.equalsIgnoreCase("ACCELEROMETER")) { cost = steadyStateInfo.sensorAccelerometer; } else if (resource.name.equalsIgnoreCase("HEARTRATE")) { cost = steadyStateInfo.sensorHeartRate; } break; case MEMORY: case GLOBAL_MEMORY: case UNKNOWN: default: } return cost; }
From source file:org.springframework.cloud.netflix.zuul.filters.discovery.DiscoveryClientRouteLocator.java
@Override protected LinkedHashMap<String, ZuulRoute> locateRoutes() { LinkedHashMap<String, ZuulRoute> routesMap = new LinkedHashMap<String, ZuulRoute>(); routesMap.putAll(super.locateRoutes()); if (this.discovery != null) { Map<String, ZuulRoute> staticServices = new LinkedHashMap<String, ZuulRoute>(); for (ZuulRoute route : routesMap.values()) { String serviceId = route.getServiceId(); if (serviceId == null) { serviceId = route.getId(); }//from w w w . j a va2 s . c om if (serviceId != null) { staticServices.put(serviceId, route); } } // Add routes for discovery services by default List<String> services = this.discovery.getServices(); String[] ignored = this.properties.getIgnoredServices().toArray(new String[0]); for (String serviceId : services) { // Ignore specifically ignored services and those that were manually // configured String key = "/" + mapRouteToService(serviceId) + "/**"; if (staticServices.containsKey(serviceId) && staticServices.get(serviceId).getUrl() == null) { // Explicitly configured with no URL, cannot be ignored // all static routes are already in routesMap // Update location using serviceId if location is null ZuulRoute staticRoute = staticServices.get(serviceId); if (!StringUtils.hasText(staticRoute.getLocation())) { staticRoute.setLocation(serviceId); } } if (!PatternMatchUtils.simpleMatch(ignored, serviceId) && !routesMap.containsKey(key)) { // Not ignored routesMap.put(key, new ZuulRoute(key, serviceId)); } } } if (routesMap.get(DEFAULT_ROUTE) != null) { ZuulRoute defaultRoute = routesMap.get(DEFAULT_ROUTE); // Move the defaultServiceId to the end routesMap.remove(DEFAULT_ROUTE); routesMap.put(DEFAULT_ROUTE, defaultRoute); } LinkedHashMap<String, ZuulRoute> values = new LinkedHashMap<>(); for (Entry<String, ZuulRoute> entry : routesMap.entrySet()) { String path = entry.getKey(); // Prepend with slash if not already present. if (!path.startsWith("/")) { path = "/" + path; } if (StringUtils.hasText(this.properties.getPrefix())) { path = this.properties.getPrefix() + path; if (!path.startsWith("/")) { path = "/" + path; } } values.put(path, entry.getValue()); } return values; }
From source file:org.spout.api.chat.ChatArguments.java
/** * Splits this ChatArguments instance into sections * * @param type How these arguments are to be split into sections * @return The split sections//from w ww. ja v a2 s . c o m */ public List<ChatSection> toSections(SplitType type) { List<ChatSection> sections = new ArrayList<ChatSection>(); StringBuilder currentWord = new StringBuilder(); LinkedHashMap<Integer, List<ChatStyle>> map; switch (type) { case WORD: map = new LinkedHashMap<Integer, List<ChatStyle>>(); int curIndex = 0; for (Object obj : getExpandedPlaceholders()) { if (obj instanceof ChatStyle) { ChatStyle style = (ChatStyle) obj; List<ChatStyle> list = map.get(curIndex); if (list == null) { list = new ArrayList<ChatStyle>(); map.put(curIndex, list); } ChatSectionUtils.removeConflicting(list, style); list.add(style); } else { String val = String.valueOf(obj); for (int i = 0; i < val.length(); ++i) { int codePoint = val.codePointAt(i); if (Character.isWhitespace(codePoint)) { sections.add(new ChatSectionImpl(type, new LinkedHashMap<Integer, List<ChatStyle>>(map), currentWord.toString())); curIndex = 0; currentWord = new StringBuilder(); if (map.size() > 0) { final List<ChatStyle> previousStyles = map.containsKey(-1) ? new ArrayList<ChatStyle>(map.get(-1)) : new ArrayList<ChatStyle>(); for (Map.Entry<Integer, List<ChatStyle>> entry : map.entrySet()) { if (entry.getKey() != -1) { for (ChatStyle style : entry.getValue()) { ChatSectionUtils.removeConflicting(previousStyles, style); previousStyles.add(style); } } } map.clear(); map.put(-1, previousStyles); } } else { currentWord.append(val.substring(i, i + 1)); curIndex++; } } } } if (currentWord.length() > 0) { sections.add(new ChatSectionImpl(type, map, currentWord.toString())); } break; case STYLE_CHANGE: StringBuilder curSection = new StringBuilder(); List<ChatStyle> activeStyles = new ArrayList<ChatStyle>(3); for (Object obj : getExpandedPlaceholders()) { if (obj instanceof ChatStyle) { ChatStyle style = (ChatStyle) obj; ChatSectionUtils.removeConflicting(activeStyles, style); activeStyles.add(style); map = new LinkedHashMap<Integer, List<ChatStyle>>(); map.put(-1, new ArrayList<ChatStyle>(activeStyles)); sections.add(new ChatSectionImpl(type, map, curSection.toString())); curSection = new StringBuilder(); } else { curSection.append(obj); } } break; case ALL: return Collections.<ChatSection>singletonList( new ChatSectionImpl(getSplitType(), getActiveStyles(), getPlainString())); default: throw new IllegalArgumentException("Unknown SplitOption " + type + "!"); } return sections; }
From source file:OSFFM_ORC.FederationActionManager.java
/** * * @param tmpMapcred//from www . java2s .c o m * @return */ private FednetsLink createCredMapwithoutDuplicate( HashMap<String, ArrayList<ArrayList<OpenstackInfoContainer>>> tmpMapcred, FednetsLink fe) { LinkedHashMap<String, OpenstackInfoContainer> cleanMap = new LinkedHashMap<>(); Set<String> s = tmpMapcred.keySet(); for (String st : s) { ArrayList<ArrayList<OpenstackInfoContainer>> tmpar = tmpMapcred.get(st); for (ArrayList<OpenstackInfoContainer> at : tmpar) { for (OpenstackInfoContainer oic : at) { if (!cleanMap.containsKey(oic.getIdCloud())) { cleanMap.put(oic.getIdCloud(), oic); fe.addDcInFednet(oic.getIdCloud()); } } } } fe.setCloudId_To_OIC(cleanMap); return fe; }
From source file:org.apache.jackrabbit.oak.plugins.segment.file.TarReader.java
/** * Scans through the tar file, looking for all segment entries. * * @throws IOException if the tar file could not be read *//* ww w. j ava 2 s . c o m*/ private static void recoverEntries(File file, RandomAccessFile access, LinkedHashMap<UUID, byte[]> entries) throws IOException { byte[] header = new byte[BLOCK_SIZE]; while (access.getFilePointer() + BLOCK_SIZE <= access.length()) { // read the tar header block access.readFully(header); // compute the header checksum int sum = 0; for (int i = 0; i < BLOCK_SIZE; i++) { sum += header[i] & 0xff; } // identify possible zero block if (sum == 0 && access.getFilePointer() + 2 * BLOCK_SIZE == access.length()) { return; // found the zero blocks at the end of the file } // replace the actual stored checksum with spaces for comparison for (int i = 148; i < 148 + 8; i++) { sum -= header[i] & 0xff; sum += ' '; } byte[] checkbytes = String.format("%06o\0 ", sum).getBytes(UTF_8); for (int i = 0; i < checkbytes.length; i++) { if (checkbytes[i] != header[148 + i]) { log.warn("Invalid entry checksum at offset {} in tar file {}, skipping...", access.getFilePointer() - BLOCK_SIZE, file); } } // The header checksum passes, so read the entry name and size ByteBuffer buffer = ByteBuffer.wrap(header); String name = readString(buffer, 100); buffer.position(124); int size = readNumber(buffer, 12); if (access.getFilePointer() + size > access.length()) { // checksum was correct, so the size field should be accurate log.warn("Partial entry {} in tar file {}, ignoring...", name, file); return; } Matcher matcher = NAME_PATTERN.matcher(name); if (matcher.matches()) { UUID id = UUID.fromString(matcher.group(1)); String checksum = matcher.group(3); if (checksum != null || !entries.containsKey(id)) { byte[] data = new byte[size]; access.readFully(data); // skip possible padding to stay at block boundaries long position = access.getFilePointer(); long remainder = position % BLOCK_SIZE; if (remainder != 0) { access.seek(position + (BLOCK_SIZE - remainder)); } if (checksum != null) { CRC32 crc = new CRC32(); crc.update(data); if (crc.getValue() != Long.parseLong(checksum, 16)) { log.warn("Checksum mismatch in entry {} of tar file {}, skipping...", name, file); continue; } } entries.put(id, data); } } else if (!name.equals(file.getName() + ".idx")) { log.warn("Unexpected entry {} in tar file {}, skipping...", name, file); long position = access.getFilePointer() + size; long remainder = position % BLOCK_SIZE; if (remainder != 0) { position += BLOCK_SIZE - remainder; } access.seek(position); } } }
From source file:org.finra.dm.service.impl.BusinessObjectFormatServiceImpl.java
/** * Validates a list of schema columns.//from w w w. ja v a2s. co m * * @param schemaColumns the list of schema columns. * @param schemaEqualityValidationMap a map of schema column names to their schema column. This is used to check equality across all data schema columns as * well as partition schema columns. */ private void validateSchemaColumns(List<SchemaColumn> schemaColumns, LinkedHashMap<String, SchemaColumn> schemaEqualityValidationMap) { // Validate schema columns if they are specified. if (!CollectionUtils.isEmpty(schemaColumns)) { // Create a schema column name map that we will use to check for duplicate // columns for the specified list of schema columns (i.e. data or partition). LinkedHashMap<String, SchemaColumn> schemaColumnNameValidationMap = new LinkedHashMap<>(); // Loop through each schema column in the list. for (SchemaColumn schemaColumn : schemaColumns) { // Perform validation. Assert.hasText(schemaColumn.getName(), "A schema column name must be specified."); Assert.hasText(schemaColumn.getType(), "A schema column data type must be specified."); // Remove leading and trailing spaces. schemaColumn.setName(schemaColumn.getName().trim()); schemaColumn.setType(schemaColumn.getType().trim()); schemaColumn.setSize(schemaColumn.getSize() == null ? null : schemaColumn.getSize().trim()); schemaColumn.setDefaultValue( schemaColumn.getDefaultValue() == null ? null : schemaColumn.getDefaultValue().trim()); // Ensure the column name isn't a duplicate within this list only by using a map. String lowercaseSchemaColumnName = schemaColumn.getName().toLowerCase(); if (schemaColumnNameValidationMap.containsKey(lowercaseSchemaColumnName)) { throw new IllegalArgumentException( String.format("Duplicate schema column name \"%s\" found.", schemaColumn.getName())); } schemaColumnNameValidationMap.put(lowercaseSchemaColumnName, schemaColumn); // Ensure a partition column and a data column are equal (i.e. contain the same configuration). SchemaColumn existingSchemaColumn = schemaEqualityValidationMap.get(lowercaseSchemaColumnName); if ((existingSchemaColumn != null) && !schemaColumn.equals(existingSchemaColumn)) { throw new IllegalArgumentException( "Schema data and partition column configurations with name \"" + schemaColumn.getName() + "\" have conflicting values. All column values are case sensitive and must be identical."); } schemaEqualityValidationMap.put(lowercaseSchemaColumnName, schemaColumn); } } }
From source file:com.jsonstore.api.JSONStoreCollection.java
/** * @exclude// w w w . jav a 2 s . c o m */ private void addNonDuplicates(LinkedHashMap<Integer, JSONObject> resultHash, List<JSONObject> results) throws JSONStoreFindException { try { if (results != null) { //JSONObject is awful and apparently equals doesn't work, so we have to track dupes ourself. for (JSONObject jso : results) { Integer id = jso.getInt(DatabaseConstants.FIELD_ID); if (!resultHash.containsKey(id)) { resultHash.put(id, jso); } } } } catch (JSONException e) { String message = "Error when attempting to find a document. A JSONException occurred."; JSONStoreFindException jsException = new JSONStoreFindException(message, e); logger.logError(message, jsException); throw jsException; } }