List of usage examples for java.util ArrayList isEmpty
public boolean isEmpty()
From source file:MSUmpire.FragmentLib.FragmentLibManager.java
public void ImportFragLibTopFrag(ArrayList<LCMSID> LCMSIDList, float Freq, int topNFrag) { FragmentSelection(LCMSIDList, Freq, topNFrag); for (LCMSID lcmsid : LCMSIDList) { for (PepIonID pepIonID : lcmsid.GetPepIonList().values()) { if (!PeptideFragmentLib.containsKey(pepIonID.GetKey())) { PepFragmentLib fraglib = new PepFragmentLib(); fraglib.Sequence = pepIonID.Sequence; fraglib.ModificationString = pepIonID.GetModificationString(); fraglib.Charge = pepIonID.Charge; fraglib.ModSequence = pepIonID.ModSequence; fraglib.PrecursorMz = pepIonID.NeutralPrecursorMz(); fraglib.MS1Score = pepIonID.PeakClusterScore; fraglib.RetentionTime.add(pepIonID.PeakRT); if (pepIonID.MaxProbability > fraglib.MaxProbability) { fraglib.MaxProbability = pepIonID.MaxProbability; }/*from ww w . java 2 s . c o m*/ if (pepIonID.PeakClusterScore > fraglib.MS1Score) { fraglib.MS1Score = pepIonID.PeakClusterScore; } PeptideFragmentLib.put(pepIonID.GetKey(), fraglib); } if (pepIonID.FragmentPeaks != null && !pepIonID.FragmentPeaks.isEmpty()) { //PeptideFragmentLib.get(pepIonID.GetKey()).AddFragments(pepIonID.FragmentPeaks); ArrayList<FragmentPeak> frags = new ArrayList<>(); for (FragmentPeak fra : pepIonID.FragmentPeaks) { if (fragselection.TopFrags.get(pepIonID.GetKey()).contains(fra.GetFragKey())) { frags.add(fra); } } if (!frags.isEmpty()) { PeptideFragmentLib.get(pepIonID.GetKey()).AddFragments(frags); } else { Logger.getRootLogger() .warn("Skipped peptide ion: " + pepIonID.GetKey() + " because it does not have enough matched fragments from file: " + lcmsid.mzXMLFileName); } } else { Logger.getRootLogger().warn("Skipped peptide ion: " + pepIonID.GetKey() + " because it does not have any matched fragment from file: " + lcmsid.mzXMLFileName); } } } try { GenerateDecoyLib(); } catch (MatrixLoaderException ex) { Logger.getRootLogger().error(ExceptionUtils.getStackTrace(ex)); } }
From source file:jp.or.openid.eiwg.scim.operation.Operation.java
/** * /*from w w w . j a va 2 s . c om*/ * * @param context * @param request * @param attributes * @param requestJson */ public boolean deleteUserInfo(ServletContext context, HttpServletRequest request, String targetId) { // ? setError(0, null, null); // id? LinkedHashMap<String, Object> targetInfo = null; @SuppressWarnings("unchecked") ArrayList<LinkedHashMap<String, Object>> users = (ArrayList<LinkedHashMap<String, Object>>) context .getAttribute("Users"); Iterator<LinkedHashMap<String, Object>> usersIt = null; if (users != null && !users.isEmpty()) { usersIt = users.iterator(); while (usersIt.hasNext()) { LinkedHashMap<String, Object> userInfo = usersIt.next(); Object id = SCIMUtil.getAttribute(userInfo, "id"); if (id != null && id instanceof String) { if (targetId.equals(id.toString())) { targetInfo = userInfo; break; } } } } if (targetInfo == null) { // ??? setError(HttpServletResponse.SC_NOT_FOUND, null, MessageConstants.ERROR_NOT_FOUND); return false; } // (??) usersIt.remove(); context.setAttribute("Users", users); return true; }
From source file:org.ala.repository.Validator.java
/** * Validate a DC file (parsed into list of String[]) * * @param lines/*from w w w .jav a2 s .co m*/ * @throws IllegalArgumentException * @throws NoSuchFieldError * @throws Exception */ protected void validateRdfFile(List<String[]> lines) throws IllegalArgumentException, NoSuchFieldError, Exception { // initialise requiredRdfFields ArrayList<String> requiredRdfFields = new ArrayList<String>(); requiredRdfFields.add(Field.SCI_NAME.name); for (String[] data : lines) { logger.debug("RDF entries (" + data.length + ") = " + StringUtils.join(data, "|")); Assert.isTrue(data.length == FileType.RDF.getFieldCount(), "RDF Entry not expected size of " + FileType.RDF.getFieldCount() + ", got " + data.length + " - " + StringUtils.join(data, "|")); if (data[1].endsWith(Field.SCI_NAME.name)) { // Check hasScientificName requiredRdfFields.remove(Field.SCI_NAME.name); Assert.isTrue(data[2].length() > 0, Field.SCI_NAME.name + " is empty"); } } if (!requiredRdfFields.isEmpty()) { throw new NoSuchFieldError("Required fields not found: " + StringUtils.join(requiredRdfFields, ", ")); } }
From source file:edu.isi.karma.er.helper.SPARQLGeneratorUtil.java
private String generate_sparql(TriplesMap node, String node_symbol, String graph) { ArrayList<Object> queue = new ArrayList<>(); queue.add(node);//from w ww.j a v a 2s .c o m StringBuffer query = new StringBuffer(); this.var_count = 1; this.prefix_list = new HashMap<>(); this.select_params = new StringBuffer(); HashMap<TriplesMap, String> markedTriples = new HashMap<>(); this.ParentMapingInfoList = new HashMap<>(); HashMap<Predicate, String> predicateList = new HashMap<>(); // using a BFS approach, we traverse the tree from the root node and add triples/predicates to the queue while (!queue.isEmpty()) { Object currentObj = queue.remove(0); // if this is a tripleMap, then add all its RefObjects to the queue // for the predicates, add only the ones that satisfy the criteria of being <...hasValue> if (currentObj instanceof TriplesMap) { String var = "x" + var_count; TriplesMap triple = (TriplesMap) currentObj; boolean foundHasValue = false; List<PredicateObjectMap> predicates = triple.getPredicateObjectMaps(); for (PredicateObjectMap p_map : predicates) { // if there are tripleMaps linked to the current tripleMap, then // we need to save their relation/linkage between them if (p_map.getObject().hasRefObjectMap()) { RefObjectMap objMap = p_map.getObject().getRefObjectMap(); queue.add(objMap.getParentTriplesMap()); logger.info(triple.getSubject().getId() + " ---> " + objMap.getParentTriplesMap().getSubject().getId()); // maintain a list of mapping properties between triples ParentMapingInfoList.put(objMap.getParentTriplesMap().getSubject().getId(), new ParentMapingInfo(triple, p_map.getPredicate())); } else if (!foundHasValue) { if (p_map.getPredicate().getTemplate().toString() .equalsIgnoreCase("<http://www.opengis.net/gml/hasValue>")) { queue.add(p_map.getPredicate()); predicateList.put(p_map.getPredicate(), var); foundHasValue = true; } } } // if this triple is marked to be included in the query, // we add it to the markedTriples list and add to the query string // for its class type Eg. // Prefix pref1: <.../.../Input> // x2 a pref1: if (foundHasValue) { markedTriples.put(triple, var); String rdfsTypes = triple.getSubject().getRdfsType().get(0).toString(); this.prefix_list.put(rdfsTypes, "pref" + var_count); query.append(" ?" + var + " a pref" + var_count + ": ."); // if the parent of this triple is also marked for the query // then we add the relation to between triples to the query. Eg. // TriplesMap parentTriple = parent.get(triple.getSubject().getId()); ParentMapingInfo parentTriple = ParentMapingInfoList.get(triple.getSubject().getId()); if (parentTriple != null && markedTriples.containsKey(parentTriple.parent)) { String predicate = parentTriple.predicate.getTemplate().toString(); // PredicateObjectMap parentPredicate = getPredicateBetweenTriples(triple, parentTriple); if (predicate != null) { query.append(" ?" + markedTriples.get(parentTriple.parent) + " " + predicate + " ?" + var + " . "); } else { logger.error("predicate is null from parent : " + triple.getSubject().getRdfsType().toString()); } } } var_count++; } // if it is a predicate Object, create a variable in in the query string else if (currentObj instanceof Predicate) { Predicate predicate = (Predicate) currentObj; query.append(" ?" + predicateList.get(predicate) + " " + predicate.getTemplate() + " ?z" + var_count + " . "); select_params.append(" ?z" + var_count); var_count++; } // if this is a RefObject add the Child Triple to the queue else if (currentObj instanceof RefObjectMap) { RefObjectMap refObj = (RefObjectMap) currentObj; TriplesMap t = refObj.getParentTriplesMap(); queue.add(t); } } // generate the query from the list of prefix and the param lists Iterator<String> itr = this.prefix_list.keySet().iterator(); StringBuffer sQuery = new StringBuffer(); while (itr.hasNext()) { String key = itr.next(); sQuery.append(" PREFIX ").append(this.prefix_list.get(key)).append(": ").append(key); } if (graph == null || graph.isEmpty()) { sQuery.append(" select ").append(select_params).append(" where { ").append(query.toString()) .append(" } "); } else { sQuery.append(" select ").append(select_params).append(" where { GRAPH <").append(graph).append("> { ") .append(query.toString()).append(" } }"); } logger.info("Genreated Query : " + sQuery); return sQuery.toString(); }
From source file:info.savestate.saveybot.JSONFileManipulator.java
public String markOfAll(int sentenceSize) { JSONArray json = getJSON();/* w w w . j a va 2 s. c om*/ ArrayList<String> words = new ArrayList<>(); if (sentenceSize <= 0) sentenceSize = ((int) (Math.random() * 6)) + 5; if (sentenceSize > 50) sentenceSize = 50; for (int i = 0; i < json.length(); i++) { JSONObject savestate = json.getJSONObject(i); String[] splitMessage = savestate.getString("message").split("\\s+"); words.addAll(Arrays.asList(splitMessage)); } if (words.isEmpty()) { return "lmao WTF this ERROR should never happen!!! (ZERO SAVESTATES ??? WTF)"; } StringBuilder sb = new StringBuilder(); for (int i = 0; i < sentenceSize; i++) /**/ sb.append(words.get(rand.nextInt(words.size()))).append(' '); return sb.toString().trim(); }
From source file:edu.uci.ics.jung.graph.OrderedKAryTree.java
/** * @see edu.uci.ics.jung.graph.Hypergraph#getIncidentEdges(java.lang.Object) *//*from www . j a v a2 s .co m*/ public Collection<E> getIncidentEdges(V vertex) { if (!containsVertex(vertex)) return null; ArrayList<E> edges = new ArrayList<E>(order + 1); VertexData v_data = vertex_data.get(vertex); if (v_data.parent_edge != null) edges.add(v_data.parent_edge); if (v_data.child_edges != null) { for (E edge : v_data.child_edges) if (edge != null) edges.add(edge); } if (edges.isEmpty()) return Collections.emptySet(); return Collections.unmodifiableCollection(edges); }
From source file:com.github.pjungermann.config.specification.constraint.AbstractConstraint.java
@Nullable protected ConfigError validateCollection(@NotNull final Config config, @NotNull final CollectionKey key) { final Object collectionObject = config.get(key.collectionKey); if (collectionObject == null && skipNullValues()) { return null; }//from w w w .j a v a2s.co m if (!(collectionObject instanceof Collection)) { return new NoCollectionError(key, collectionObject); } final Collection collection = (Collection) config.get(key.collectionKey); if (collection.isEmpty()) { return null; } final Object[] array = collection.toArray(); RangeInfo rangeInfo = key.entrySelection.subListBorders(array.length); int from = rangeInfo.from; int to = rangeInfo.to; // adjust the collection size // TODO: use strict mode to create errors here as well? could also be covered by specifying the size if (array.length - 1 < from) { // no entry to check return null; } if (array.length < to) { to = array.length; } final ArrayList<ConfigError> errors = new ArrayList<>(); for (int i = from; i < to; i++) { ConfigError error; Object entry = array[i]; if (key.propertyKey == null) { error = validateValue(config, entry); } else if (entry instanceof Config) { error = validate(config, key.propertyKey); } else if (entry instanceof Map) { error = validateValue(config, ((Map) entry).get(key.propertyKey)); } else { error = validateObjectProperty(config, key, entry, key.propertyKey); } if (error != null) { errors.add(error); } } if (errors.isEmpty()) { return null; } return new MultiConfigError(key, errors); }
From source file:com.esri.core.geometry.OperatorImportFromGeoJsonLocal.java
@Override public MapOGCStructure executeOGC(int import_flags, String geoJsonString, ProgressTracker progress_tracker) throws JSONException { JSONObject geoJsonObject = new JSONObject(geoJsonString); ArrayList<OGCStructure> structureStack = new ArrayList<OGCStructure>(0); ArrayList<JSONObject> objectStack = new ArrayList<JSONObject>(0); AttributeStreamOfInt32 indices = new AttributeStreamOfInt32(0); AttributeStreamOfInt32 numGeometries = new AttributeStreamOfInt32(0); OGCStructure root = new OGCStructure(); root.m_structures = new ArrayList<OGCStructure>(0); structureStack.add(root); // add dummy root objectStack.add(geoJsonObject);//w w w .j av a2s. c o m indices.add(0); numGeometries.add(1); while (!objectStack.isEmpty()) { if (indices.getLast() == numGeometries.getLast()) { structureStack.remove(structureStack.size() - 1); indices.removeLast(); numGeometries.removeLast(); continue; } OGCStructure lastStructure = structureStack.get(structureStack.size() - 1); JSONObject lastObject = objectStack.get(objectStack.size() - 1); objectStack.remove(objectStack.size() - 1); indices.write(indices.size() - 1, indices.getLast() + 1); String typeString = lastObject.getString("type"); if (typeString.equalsIgnoreCase("GeometryCollection")) { OGCStructure next = new OGCStructure(); next.m_type = 7; next.m_structures = new ArrayList<OGCStructure>(0); lastStructure.m_structures.add(next); structureStack.add(next); JSONArray geometries = getJSONArray(lastObject, "geometries"); indices.add(0); numGeometries.add(geometries.length()); for (int i = geometries.length() - 1; i >= 0; i--) objectStack.add(geometries.getJSONObject(i)); } else { int ogcType; if (typeString.equalsIgnoreCase("Point")) ogcType = 1; else if (typeString.equalsIgnoreCase("LineString")) ogcType = 2; else if (typeString.equalsIgnoreCase("Polygon")) ogcType = 3; else if (typeString.equalsIgnoreCase("MultiPoint")) ogcType = 4; else if (typeString.equalsIgnoreCase("MultiLineString")) ogcType = 5; else if (typeString.equalsIgnoreCase("MultiPolygon")) ogcType = 6; else throw new UnsupportedOperationException(); Geometry geometry = importGeometryFromGeoJson_(import_flags, Geometry.Type.Unknown, lastObject); OGCStructure leaf = new OGCStructure(); leaf.m_type = ogcType; leaf.m_geometry = geometry; lastStructure.m_structures.add(leaf); } } MapOGCStructure mapOGCStructure = new MapOGCStructure(); mapOGCStructure.m_ogcStructure = root; mapOGCStructure.m_spatialReference = importSpatialReferenceFromGeoJson_(geoJsonObject); return mapOGCStructure; }
From source file:lux.solr.XQueryComponent.java
protected void evaluateQuery(ResponseBuilder rb, int start, int len) { String query = rb.getQueryString(); SolrQueryRequest req = rb.req;//w ww .jav a 2 s . c o m SolrQueryResponse rsp = rb.rsp; if (StringUtils.isBlank(query)) { rsp.add("xpath-error", "query was blank"); return; } SolrParams params = req.getParams(); long timeAllowed = (long) params.getInt(CommonParams.TIME_ALLOWED, -1); XQueryExecutable expr; LuxSearcher searcher = new LuxSearcher(rb.req.getSearcher()); DocWriter docWriter = new SolrDocWriter(this, rb.req.getCore()); Compiler compiler = solrIndexConfig.getCompiler(); SolrQueryContext context = new SolrQueryContext(this, req); if (rb.shards != null && rb.req.getParams().getBool("distrib", true)) { // This is a distributed request; pass in the ResponseBuilder so it will be // available to a subquery. context.setResponseBuilder(rb); // also capture the current set of shards shards = rb.shards; slices = rb.slices; } SolrSearchService searchService = new SolrSearchService(context, new LuxSearchQueryParser()); Evaluator eval = new Evaluator(compiler, searcher, docWriter, searchService); // track which evaluator we are using in a threadlocal container evalHolder.set(eval); TransformErrorListener errorListener = eval.getErrorListener(); try { URI baseURI = queryPath == null ? null : java.net.URI.create(queryPath); expr = compiler.compile(query, errorListener, baseURI, null); } catch (LuxException ex) { // ex.printStackTrace(); String err = formatError(query, errorListener); if (StringUtils.isEmpty(err)) { err = ex.getMessage(); } rsp.add("xpath-error", err); // don't close: this forces a commit() // evaluator.close(); return; } // SolrIndexSearcher.QueryResult result = new // SolrIndexSearcher.QueryResult(); NamedList<Object> xpathResults = new NamedList<Object>(); long tstart = System.currentTimeMillis(); int count = 0; bindRequestVariables(rb, req, expr, compiler, eval, context); Iterator<XdmItem> queryResults = eval.iterator(expr, context); String err = null; while (queryResults.hasNext()) { XdmItem xpathResult = queryResults.next(); if (++count < start) { continue; } if (count == 1 && !xpathResult.isAtomicValue()) { net.sf.saxon.s9api.QName name = ((XdmNode) xpathResult).getNodeName(); if (name != null && name.getNamespaceURI().equals(EXPATH_HTTP_NS) && name.getLocalName().equals("response")) { err = handleEXPathResponse(req, rsp, xpathResults, xpathResult); if (queryResults.hasNext()) { logger.warn( "Ignoring results following http:response, which should be the sole item in its result"); } break; } } err = safeAddResult(xpathResults, xpathResult); if (err != null) { xpathResult = null; break; } if ((len > 0 && xpathResults.size() >= len) || (timeAllowed > 0 && (System.currentTimeMillis() - tstart) > timeAllowed)) { break; } } ArrayList<TransformerException> errors = eval.getErrorListener().getErrors(); if (!errors.isEmpty()) { err = formatError(query, errors, eval.getQueryStats()); if (xpathResults.size() == 0) { xpathResults = null; // throw a 400 error; don't return partial // results } } if (err != null) { rsp.add("xpath-error", err); } if (rb.getResults() == null) { // create a dummy doc list if previous query processing didn't retrieve any docs // In distributed operation, there will be doc results, otherwise none. SolrIndexSearcher.QueryResult result = new SolrIndexSearcher.QueryResult(); result.setDocList(new DocSlice(0, 0, null, null, eval.getQueryStats().docCount, 0)); rb.setResult(result); rsp.add("response", rb.getResults().docList); } if (xpathResults != null) { rsp.add("xpath-results", xpathResults); if (logger.isDebugEnabled()) { logger.debug("retrieved: " + eval.getDocReader().getCacheMisses() + " docs, " + xpathResults.size() + " results, " + (System.currentTimeMillis() - tstart) + "ms"); } } else { logger.warn("xquery evaluation error: " + eval.getDocReader().getCacheMisses() + " docs, " + "0 results, " + (System.currentTimeMillis() - tstart) + "ms"); } if (err == null && context.isCommitPending()) { doCommit(); } }
From source file:jp.or.openid.eiwg.scim.operation.Operation.java
/** * /*from w w w. j a v a 2 s. c o m*/ * * @param context * @param request * @param targetId * @param attributes * @param filter * @param sortBy * @param sortOrder * @param startIndex * @param count */ public ArrayList<LinkedHashMap<String, Object>> searchUserInfo(ServletContext context, HttpServletRequest request, String targetId, String attributes, String filter, String sortBy, String sortOrder, String startIndex, String count) { ArrayList<LinkedHashMap<String, Object>> result = null; Set<String> returnAttributeNameSet = new HashSet<>(); // ? setError(0, null, null); // ?? if (attributes != null && !attributes.isEmpty()) { // String[] tempList = attributes.split(","); for (int i = 0; i < tempList.length; i++) { String attributeName = tempList[i].trim(); // ??????? LinkedHashMap<String, Object> attributeSchema = SCIMUtil.getUserAttributeInfo(context, attributeName, true); if (attributeSchema != null && !attributeSchema.isEmpty()) { returnAttributeNameSet.add(attributeName); } else { // ??????? String message = String.format(MessageConstants.ERROR_INVALID_ATTRIBUTES, attributeName); setError(HttpServletResponse.SC_BAD_REQUEST, null, message); return result; } } } // ???????? // (sortBy)? // (sortOrder)? // ?(startIndex)? // 1??(count)? // // () // result = new ArrayList<LinkedHashMap<String, Object>>(); // ? @SuppressWarnings("unchecked") ArrayList<LinkedHashMap<String, Object>> users = (ArrayList<LinkedHashMap<String, Object>>) context .getAttribute("Users"); if (users != null && !users.isEmpty()) { Iterator<LinkedHashMap<String, Object>> usersIt = users.iterator(); while (usersIt.hasNext()) { boolean isMatched = false; LinkedHashMap<String, Object> userInfo = usersIt.next(); // id??????? if (targetId != null && !targetId.isEmpty()) { Object id = SCIMUtil.getAttribute(userInfo, "id"); if (id != null && id instanceof String) { // id???? if (targetId.equals(id.toString())) { if (filter != null && !filter.isEmpty()) { // ???? boolean matched = false; try { matched = SCIMUtil.checkUserSimpleFilter(context, userInfo, filter); } catch (SCIMUtilException e) { result = null; setError(e.getCode(), e.getType(), e.getMessage()); break; } if (matched) { isMatched = true; } } else { isMatched = true; } } } } else { if (filter != null && !filter.isEmpty()) { // ???? boolean matched = false; try { matched = SCIMUtil.checkUserSimpleFilter(context, userInfo, filter); } catch (SCIMUtilException e) { result = null; setError(e.getCode(), e.getType(), e.getMessage()); break; } if (matched) { isMatched = true; } } else { isMatched = true; } } if (isMatched) { // ?? LinkedHashMap<String, Object> resultInfo = new LinkedHashMap<String, Object>(); Iterator<String> attributeIt = userInfo.keySet().iterator(); while (attributeIt.hasNext()) { // ??? String attributeName = attributeIt.next(); // ? LinkedHashMap<String, Object> attributeSchema = SCIMUtil.getUserAttributeInfo(context, attributeName, true); Object returned = attributeSchema.get("returned"); if (returned != null && returned.toString().equalsIgnoreCase("never")) { continue; } // ? Object attributeValue = userInfo.get(attributeName); resultInfo.put(attributeName, attributeValue); } result.add(resultInfo); } } } return result; }