List of usage examples for java.util Map containsKey
boolean containsKey(Object key);
From source file:com.nuvolect.deepdive.webserver.OmniRest.java
public static InputStream process(Context ctx, Map<String, String> params) { long timeStart = System.currentTimeMillis(); CMD_ID cmd_id = CMD_ID.NIL;//from ww w. ja va 2s . c o m String error = ""; String volumeId = App.getUser().getDefaultVolumeId(); if (params.containsKey("volume_id")) volumeId = params.get("volume_id"); String path = ""; if (params.containsKey("path")) path = params.get("path"); try { String uri = params.get("uri"); String segments[] = uri.split("/"); cmd_id = CMD_ID.valueOf(segments[2]); } catch (IllegalArgumentException e) { error = "Error, invalid command: " + params.get("cmd"); } JSONObject wrapper = new JSONObject(); String extra = ""; try { switch (cmd_id) { case NIL: break; case decode_hash: { String result = decode_hash(params.get("data")); wrapper.put("result", result); break; } case delete: { JSONObject result = new JSONObject(); OmniFile file = new OmniFile(params.get("encoded_path")); boolean delete_result = file.delete(); result.put("result", delete_result); result.put("error", ""); wrapper.put("result", result); break; } case encode_hash: { String result = encode_hash(params.get("data")); wrapper.put("result", result); break; } case get: { // InputStream is = null; // OmniFile of = new OmniFile( encodedPath); // // if( of.exists()) // is = of.getFileInputStream(); // else // is = IOUtils.getInputStreamFromString("File not found: " + encodedPath); // // try { // org.apache.commons.io.IOUtils.copy(is, response.getOutputStream()); // response.flushBuffer(); // // } catch (IOException e) { // LogUtil.logException(OmniRest.class, e); // } break; } case get_info: { String result = ProbeUtil.getInfo(params.get("encoded_path")).toString(); wrapper.put("result", result); break; } case get_text: { JSONObject result = OmniUtil.getText(volumeId, path); wrapper.put("result", result.toString()); break; } case list_files: { JSONObject result = new JSONObject(); OmniFile om = new OmniFile(params.get("encoded_path")); OmniFile[] files = om.listFiles(); JSONArray jsonArray = new JSONArray(); for (OmniFile file : files) { JSONObject fileObj = FileObj.makeObj(file, ""); jsonArray.put(fileObj); } result.put("result", jsonArray.toString()); result.put("error", ""); wrapper.put("result", result); break; } case mime: { String result = MimeUtil.getMime(params.get("data")); wrapper.put("result", result); break; } case mkdir: { JSONObject result = new JSONObject(); OmniFile file = new OmniFile(params.get("encoded_path")); boolean mkdir_result = file.mkdirs(); result.put("result", mkdir_result); result.put("error", ""); wrapper.put("result", result); break; } case upload: break; } if (!error.isEmpty()) LogUtil.log(OmniRest.class, "Error: " + error); wrapper.put("error", error); wrapper.put("cmd_id", cmd_id.toString()); wrapper.put("delta_time", String.valueOf(System.currentTimeMillis() - timeStart) + " ms"); return new ByteArrayInputStream(wrapper.toString().getBytes("UTF-8")); } catch (Exception e) { e.printStackTrace(); } return null; }
From source file:de.micromata.genome.jpa.Clauses.java
/** * for the hql arguments, you need variables, which not conflicts. * * If the column name is already in currentArgs, uses an incremented name. * * @param column the column//w w w.j a v a 2s . c om * @param currentArgs the current args * @return the variable */ public static String getVariable(String column, Map<String, Object> currentArgs) { if (currentArgs.containsKey(column) == false) { return column; } int counter = 1; do { if (currentArgs.containsKey(column + counter) == false) { return column + counter; } ++counter; } while (true); }
From source file:de.qaware.chronix.solr.query.analysis.SolrDocumentBuilder.java
/** * Collects the given document and groups them using the join function result * * @param docs the found documents that should be grouped by the join function * @param joinFunction the join function * @return the grouped documents/*from w ww.ja va 2 s . c om*/ */ public static Map<String, List<SolrDocument>> collect(SolrDocumentList docs, Function<SolrDocument, String> joinFunction) { Map<String, List<SolrDocument>> collectedDocs = new HashMap<>(); for (SolrDocument doc : docs) { String key = joinFunction.apply(doc); if (!collectedDocs.containsKey(key)) { collectedDocs.put(key, new ArrayList<>()); } collectedDocs.get(key).add(doc); } return collectedDocs; }
From source file:gov.va.chir.tagline.dao.DatasetUtil.java
private static void processFeatures(final Map<String, Object> features, final Map<String, Integer> featureType, final Map<String, Set<String>> nominalFeatureMap) { for (String feature : features.keySet()) { final Object obj = features.get(feature); if (!featureType.containsKey(feature)) { // Determine feature type (simplistic determination) if (obj instanceof Integer || obj instanceof Double) { featureType.put(feature, Attribute.NUMERIC); } else { featureType.put(feature, Attribute.NOMINAL); nominalFeatureMap.put(feature, new TreeSet<String>()); }//from ww w . j ava2 s . c om } // Add distinct value for nominal attributes if (featureType.get(feature) == Attribute.NOMINAL) { nominalFeatureMap.get(feature).add(obj.toString()); } } }
From source file:io.github.moosbusch.lumpi.util.FormUtil.java
public static Map<String, Class<?>> getPropertyTypesMap(final Class<?> type, Map<String, Set<Class<?>>> excludedProperties) { final Map<String, Class<?>> result = new HashMap<>(); PropertyDescriptor[] propDescs = PropertyUtils.getPropertyDescriptors(type); Map<String, Set<Class<?>>> excludedProps = excludedProperties; if (excludedProps != null) { for (PropertyDescriptor propDesc : propDescs) { Class<?> propertyType = propDesc.getPropertyType(); if (propertyType != null) { String propertyName = propDesc.getName(); if (excludedProps.containsKey(propertyName)) { Set<Class<?>> ignoredPropertyTypes = excludedProps.get(propertyName); if (!ignoredPropertyTypes.contains(type)) { Set<Class<?>> superTypes = LumpiUtil.getSuperTypes(type, false, true, true); for (Class<?> superType : superTypes) { if (ignoredPropertyTypes.contains(superType)) { result.put(propertyName, propertyType); break; }/* w w w. ja v a 2s .c om*/ } } } } } } return result; }
From source file:Main.java
public static Map groupMap(Collection collection, String keyName) { Map map = new HashMap(); if (collection == null || collection.isEmpty()) { return map; }/*from ww w . j a v a 2 s . c om*/ Map eachMap = null; Object key = null; List groupList = null; Iterator iter = collection.iterator(); while (iter.hasNext()) { eachMap = (Map) iter.next(); key = eachMap.get(keyName); if (key == null) { continue; } if (map.containsKey(key)) { groupList = (List) map.get(key); groupList.add(eachMap); } else { groupList = new ArrayList(); groupList.add(eachMap); map.put(key, groupList); } } return map; }
From source file:edu.byu.nlp.data.app.AnnotationStream2Annotators.java
public static double[][][] aggregateAnnotatorParameterClusters(double[][][] annotatorParameters, int[] clusterAssignments) { // group clustered parameters Map<Integer, Set<double[][]>> clusterMap = Maps.newHashMap(); for (int i = 0; i < clusterAssignments.length; i++) { int clusterAssignment = clusterAssignments[i]; if (!clusterMap.containsKey(clusterAssignment)) { clusterMap.put(clusterAssignment, Sets.<double[][]>newIdentityHashSet()); }//from w ww . ja va 2 s .co m clusterMap.get(clusterAssignment).add(annotatorParameters[i]); } // aggregate clustered parameters List<double[][]> clusteredAnnotatorParameters = Lists.newArrayList(); for (Set<double[][]> cluster : clusterMap.values()) { double[][][] clusterTensor = cluster.toArray(new double[][][] {}); double[][] averagedConfusions = Matrices.sumOverFirst(clusterTensor); Matrices.divideToSelf(averagedConfusions, cluster.size()); clusteredAnnotatorParameters.add(averagedConfusions); } // re-assign confusions return clusteredAnnotatorParameters.toArray(new double[][][] {}); }
From source file:com.ms.commons.test.classloader.util.VelocityTemplateUtil.java
public static String mergeContent(final Map<Object, Object> context, String text, final KeyConverter converter) { StringReader sr = new StringReader(text); StringWriter sw = new StringWriter(); Context c = new Context() { public boolean containsKey(Object key) { return context.containsKey(convertKey(key, true)); }/*from ww w .j a va 2 s. c o m*/ public Object get(String key) { return context.get(convertKey(key, true)); } public Object[] getKeys() { Object[] keys = context.keySet().toArray(); Object[] cKeys = new Object[keys.length]; for (int i = 0; i < keys.length; i++) { cKeys[i] = convertKey(keys[i], false); } return cKeys; } public Object put(String key, Object value) { return null; } public Object remove(Object key) { return null; } protected Object convertKey(Object key, boolean isToPro) { if (converter == null) { return key; } else { return converter.convert(key, isToPro); } } }; try { VELOCITY_ENGINE.evaluate(c, sw, VelocityTemplateUtil.class.getSimpleName(), sr); } catch (Exception e) { throw new RuntimeException(e); } return sw.toString(); }
From source file:Main.java
public static Object setObjectFileValue(Object obj, Map<String, String> data) throws Exception { Class<?> cls = obj.getClass(); Field[] fields = cls.getDeclaredFields(); for (Field field : fields) { Class<?> clsType = field.getType(); String name = field.getName(); String strSet = "set" + name.substring(0, 1).toUpperCase() + name.substring(1, name.length()); Method methodSet = cls.getDeclaredMethod(strSet, clsType); if (data.containsKey(name)) { Object objValue = typeConversion(clsType, data.get(name)); methodSet.invoke(obj, objValue); }//from w w w . j a va 2 s. co m } return obj; }
From source file:com.ntsync.shared.ContactGroup.java
/** * Creates and returns an instance of the RawContact from encrypted data * //from www. java 2s. c o m * */ public static ContactGroup valueOf(String rowId, Map<Byte, ByteBuffer> values, Key privateKey) throws InvalidKeyException { try { String sourceId = null; Long rawId = null; if (values.containsKey(GroupConstants.SERVERROW_ID)) { sourceId = readRawString(values.get(GroupConstants.SERVERROW_ID)); } if (sourceId == null || !sourceId.equals(rowId)) { // If ServerContactId is different, then rowId is the clientId rawId = Long.parseLong(rowId); } if (sourceId == null && rawId < 0) { throw new IllegalArgumentException("Missing RowId in data"); } AEADBlockCipher cipher = CryptoHelper.getCipher(); final boolean deleted = values.containsKey(GroupConstants.DELETED); final String textData = CryptoHelper.decodeStringValue(GroupConstants.TEXTDATA, values, cipher, privateKey); if (textData == null && !deleted) { LOG.error("No textdata found for row with Id:" + rowId); return null; } String title = null; String notes = null; if (!isEmpty(textData)) { JsonFactory fac = new JsonFactory(); JsonParser jp = fac.createParser(textData); jp.nextToken(); while (jp.nextToken() != JsonToken.END_OBJECT) { String fieldname = jp.getCurrentName(); // move to value, or START_OBJECT/START_ARRAY jp.nextToken(); if (GroupConstants.TITLE.equals(fieldname)) { title = jp.getValueAsString(); } else if (GroupConstants.NOTES.equals(fieldname)) { notes = jp.getValueAsString(); } else { LOG.error("Unrecognized field for row with Id:" + rowId + " Fieldname:" + fieldname); } } jp.close(); } String modStr = readRawString(values.get(GroupConstants.MODIFIED)); Date lastModified = null; if (!isEmpty(modStr)) { lastModified = new Date(Long.parseLong(modStr)); } return new ContactGroup(rawId, sourceId, title, notes, deleted, lastModified, -1); } catch (InvalidCipherTextException ex) { throw new InvalidKeyException("Invalid key detected.", ex); } catch (final Exception ex) { LOG.info("Error parsing contactgroup data. Reason:" + ex.toString(), ex); } return null; }