List of usage examples for java.util Collection removeAll
boolean removeAll(Collection<?> c);
From source file:ubic.gemma.persistence.service.association.coexpression.CoexpressionDaoImpl.java
/** * Mark the genes as being tested for coexpression in the data set and persist the information in the database. This * is run at the tail end of coexpression analysis for the data set. * * @param ee the data set//from w ww .ja v a 2 s . c om * @param genesTested the genes */ private void updatedTestedIn(BioAssaySet ee, Collection<Gene> genesTested) { Session sess = this.getSessionFactory().getCurrentSession(); Query q = sess.createQuery("from GeneCoexpressionTestedIn where geneId in (:ids)"); Set<Long> seenGenes = new HashSet<>(); Collection<Long> geneids = EntityUtils.getIds(genesTested); BatchIterator<Long> bi = new BatchIterator<>(geneids, 512); for (; bi.hasNext();) { q.setParameterList("ids", bi.next()); List<GeneCoexpressionTestedIn> list = q.list(); int count = 0; for (GeneCoexpressionTestedIn gcti : list) { // int old = gcti.getNumIds(); // debug code gcti.addEntity(ee.getId()); sess.update(gcti); // gcti.setBytes( gcti.getBytes() ); assert gcti.isIncluded(ee.getId()); seenGenes.add(gcti.getGeneId()); if (++count % 256 == 0) { sess.flush(); sess.clear(); } } } if (!seenGenes.isEmpty()) { CoexpressionDaoImpl.log.info("Updated tested-in information for " + seenGenes.size() + " genes"); this.geneTestedInCache.clearCache(); // TODO do it just for the genes changed. } sess.flush(); sess.clear(); // discover genes which don't have an entry at all. geneids.removeAll(seenGenes); if (geneids.isEmpty()) { return; } CoexpressionDaoImpl.log.info("Adding tested-in information for " + geneids.size() + " genes"); int count = 0; for (Long id : geneids) { GeneCoexpressionTestedIn gcti = new GeneCoexpressionTestedIn(id); gcti.addEntity(ee.getId()); assert gcti.isIncluded(ee.getId()); assert gcti.getNumIds() == 1; sess.save(gcti); if (++count % 256 == 0) { sess.flush(); sess.clear(); } } }
From source file:org.sakaiproject.tool.assessment.facade.AssessmentGradingFacadeQueries.java
public void saveOrUpdateAll(Collection<ItemGradingData> c) { int retryCount = persistenceHelper.getRetryCount().intValue(); c.removeAll(Collections.singleton(null)); while (retryCount > 0) { try {// www . j a v a 2 s . c o m for (ItemGradingData itemGradingData : c) { getHibernateTemplate().saveOrUpdate(itemGradingData); } retryCount = 0; } catch (Exception e) { log.warn("problem inserting assessmentGrading: " + e.getMessage()); retryCount = persistenceHelper.retryDeadlock(e, retryCount); } } }
From source file:gda.data.scan.datawriter.NexusDataWriter.java
/** * this is run when processing the first ScanDataPoint * the file is in the root node/*from w w w .java 2 s . com*/ * we add all the one off metadata here */ protected Collection<Scannable> makeConfiguredScannablesAndMonitors( Collection<Scannable> scannablesAndMonitors) { Set<String> metadatascannablestowrite = new HashSet<String>(metadatascannables); for (Detector det : thisPoint.getDetectors()) { logger.info("found detector named: " + det.getName()); String detname = det.getName(); if (metadataScannablesPerDetector.containsKey(detname)) { HashSet<String> metasPerDet = metadataScannablesPerDetector.get(detname); if (metasPerDet != null && !metasPerDet.isEmpty()) { metadatascannablestowrite.addAll(metasPerDet); } } } try { file.opengroup(this.entryName, "NXentry"); Set<Scannable> wehavewritten = new HashSet<Scannable>(); for (Iterator<Scannable> iterator = scannablesAndMonitors.iterator(); iterator.hasNext();) { Scannable scannable = iterator.next(); String scannableName = scannable.getName(); if (weKnowTheLocationFor(scannableName)) { wehavewritten.add(scannable); Collection<String> prerequisites = locationmap.get(scannableName) .getPrerequisiteScannableNames(); if (prerequisites != null) metadatascannablestowrite.addAll(prerequisites); scannableID.addAll(locationmap.get(scannableName).makeScannable(file, scannable, getSDPositionFor(scannableName), generateDataDim(false, scanDimensions, null))); } } int oldsize; do { // add dependencies of metadata scannables oldsize = metadatascannablestowrite.size(); Set<String> aux = new HashSet<String>(); for (String s : metadatascannablestowrite) { if (weKnowTheLocationFor(s)) { Collection<String> prerequisites = locationmap.get(s).getPrerequisiteScannableNames(); if (prerequisites != null) aux.addAll(prerequisites); } } metadatascannablestowrite.addAll(aux); } while (metadatascannablestowrite.size() > oldsize); // remove the ones in the scan, as they are not metadata for (Scannable scannable : scannablesAndMonitors) { metadatascannablestowrite.remove(scannable.getName()); } // only use default writing for the ones we haven't written yet scannablesAndMonitors.removeAll(wehavewritten); makeMetadataScannables(metadatascannablestowrite); // Close NXentry file.closegroup(); } catch (NexusException e) { // FIXME NexusDataWriter should allow exceptions to be thrown logger.error("TODO put description of error here", e); } return scannablesAndMonitors; }
From source file:com.joliciel.jochre.graphics.SegmenterImpl.java
/** * Split a shape into 2 or more shapes, in the case where two letters have been mistakenly joined together. * @param shape the shape to split/*from www . j a v a 2s . co m*/ * @param sourceImage the source image containing this shape * @param maxBridgeWidth maximum width of a bridge between the two letters (measured vertically) * @param minLetterWeight minimum pixel count for a shape portion to be counted a separate letter * @param maxOverlap maximum vertical overlap (in pixels) between a right-hand and left-hand shape to be counted as separate letters * @return List of Shape, where the list is empty if no split was performed */ List<Shape> splitShape(Shape shape, SourceImage sourceImage, int maxBridgeWidth, int minLetterWeight, int maxOverlap) { LOG.debug("Trying to split shape: " + shape.toString()); LOG.debug("maxBridgeWidth " + maxBridgeWidth); LOG.debug("minLetterWeight " + minLetterWeight); LOG.debug("maxOverlap " + maxOverlap); Collection<BridgeCandidate> bridgeCandidates = ((ShapeInternal) shape).getBridgeCandidates(maxBridgeWidth); if (bridgeCandidates.size() > 0) { // (B) weight of right shape & weight of left shape > a certain threshold // (C) little overlap right boundary of left shape, left boundary of right shape // check if the right and left weight of each bridge candidate is sufficiently big LOG.debug("minLetterWeight: " + minLetterWeight); LOG.debug("maxOverlap: " + maxOverlap); LOG.debug("Eliminating candidates based on pixel count and overlap"); Set<BridgeCandidate> candidatesToEliminate = new HashSet<BridgeCandidate>(); for (BridgeCandidate candidate : bridgeCandidates) { LOG.debug("Bridge candidate: leftPixels = " + candidate.leftPixels + ", rightPixels = " + candidate.rightPixels); LOG.debug("leftShapeRightBoundary = " + candidate.leftShapeRightBoundary + ", rightShapeLeftBoundary = " + candidate.rightShapeLeftBoundary); boolean isBridge = true; if (candidate.rightPixels < minLetterWeight || candidate.leftPixels < minLetterWeight) isBridge = false; if (candidate.leftShapeRightBoundary - candidate.rightShapeLeftBoundary > maxOverlap) isBridge = false; if (!isBridge) candidatesToEliminate.add(candidate); } bridgeCandidates.removeAll(candidatesToEliminate); LOG.debug("Remaining bridge candidate size: " + bridgeCandidates.size()); } // have candidates List<Shape> shapes = new ArrayList<Shape>(); // apply any splits detected if (bridgeCandidates.size() > 0) { int[] startingPoint = shape.getStartingPoint(); int startX = startingPoint[0]; int startY = startingPoint[1]; for (BridgeCandidate bridge : bridgeCandidates) { bridge.leftGroup.touched = false; bridge.rightGroup.touched = false; } // perform split for (BridgeCandidate bridge : bridgeCandidates) { Shape leftShape = graphicsService.getDot(sourceImage, startX, startY); leftShape.setLeft(shape.getRight()); leftShape.setRight(shape.getLeft()); leftShape.setTop(shape.getBottom()); leftShape.setBottom(shape.getTop()); Shape rightShape = graphicsService.getDot(sourceImage, startX, startY); rightShape.setLeft(shape.getRight()); rightShape.setRight(shape.getLeft()); rightShape.setTop(shape.getBottom()); rightShape.setBottom(shape.getTop()); Stack<VerticalLineGroup> groupStack = new Stack<VerticalLineGroup>(); groupStack.push(bridge.leftGroup); while (!groupStack.isEmpty()) { VerticalLineGroup lineGroup = groupStack.pop(); if (lineGroup.touched) continue; lineGroup.touched = true; LOG.debug("Touching group, pixelCount: " + lineGroup.pixelCount + ", leftBoundary: " + lineGroup.leftBoundary + ", rightBoundary: " + lineGroup.rightBoundary); if (shape.getLeft() + lineGroup.leftBoundary < leftShape.getLeft()) leftShape.setLeft(shape.getLeft() + lineGroup.leftBoundary); if (shape.getLeft() + lineGroup.rightBoundary > leftShape.getRight()) leftShape.setRight(shape.getLeft() + lineGroup.rightBoundary); if (shape.getTop() + lineGroup.topBoundary < leftShape.getTop()) leftShape.setTop(shape.getTop() + lineGroup.topBoundary); if (shape.getTop() + lineGroup.bottomBoundary > leftShape.getBottom()) leftShape.setBottom(shape.getTop() + lineGroup.bottomBoundary); for (BridgeCandidate leftCandidate : lineGroup.leftCandidates) { if (!bridge.equals(leftCandidate) && !(bridgeCandidates.contains(leftCandidate))) { groupStack.push(leftCandidate.leftGroup); } } for (BridgeCandidate rightCandidate : lineGroup.rightCandidates) { if (!bridge.equals(rightCandidate) && !(bridgeCandidates.contains(rightCandidate))) { groupStack.push(rightCandidate.rightGroup); } } } // next left group groupStack.push(bridge.rightGroup); while (!groupStack.isEmpty()) { VerticalLineGroup lineGroup = groupStack.pop(); if (lineGroup.touched) continue; lineGroup.touched = true; LOG.debug("Touching group, pixelCount: " + lineGroup.pixelCount + ", leftBoundary: " + lineGroup.leftBoundary + ", rightBoundary: " + lineGroup.rightBoundary); if (shape.getLeft() + lineGroup.leftBoundary < rightShape.getLeft()) rightShape.setLeft(shape.getLeft() + lineGroup.leftBoundary); if (shape.getLeft() + lineGroup.rightBoundary > rightShape.getRight()) rightShape.setRight(shape.getLeft() + lineGroup.rightBoundary); if (shape.getTop() + lineGroup.topBoundary < rightShape.getTop()) rightShape.setTop(shape.getTop() + lineGroup.topBoundary); if (shape.getTop() + lineGroup.bottomBoundary > rightShape.getBottom()) rightShape.setBottom(shape.getTop() + lineGroup.bottomBoundary); for (BridgeCandidate leftCandidate : lineGroup.leftCandidates) { if (!bridge.equals(leftCandidate) && !(bridgeCandidates.contains(leftCandidate))) { groupStack.push(leftCandidate.leftGroup); } } for (BridgeCandidate rightCandidate : lineGroup.rightCandidates) { if (!bridge.equals(rightCandidate) && !(bridgeCandidates.contains(rightCandidate))) { groupStack.push(rightCandidate.rightGroup); } } } // next right group if (leftShape.getWidth() > 0) { LOG.debug("Adding left split: " + leftShape); shapes.add(leftShape); } if (rightShape.getWidth() > 0) { LOG.debug("Adding right split: " + rightShape); shapes.add(rightShape); } } // next bridge } // do we have any bridges? // TODO: we need to join split shapes back together when more than 1 split is applied // and the shape in the middle is too small on its own (< minPixelCount) return shapes; }
From source file:ubic.gemma.core.search.SearchServiceImpl.java
/** * FIXME this comes too late in the process to be effective - for queries that may retrieve many results, we have to * filter as we go.// w w w . ja v a2 s. co m * * @param excludeWithoutTaxon if true: If the SearchResults have no "getTaxon" method then the results will get * filtered out Results with no taxon associated will also get removed. */ private void filterByTaxon(SearchSettings settings, Collection<SearchResult> results, boolean excludeWithoutTaxon) { if (settings.getTaxon() == null) { return; } Collection<SearchResult> toRemove = new HashSet<>(); Taxon t = settings.getTaxon(); if (results == null) return; for (SearchResult sr : results) { Object o = sr.getResultObject(); try { Taxon currentTaxon; if (o instanceof ExpressionExperiment) { ExpressionExperiment ee = (ExpressionExperiment) o; currentTaxon = expressionExperimentService.getTaxon(ee); } else if (o instanceof ExpressionExperimentSet) { ExpressionExperimentSet ees = (ExpressionExperimentSet) o; currentTaxon = ees.getTaxon(); } else if (o instanceof Gene) { Gene gene = (Gene) o; currentTaxon = gene.getTaxon(); } else if (o instanceof GeneSet) { GeneSet geneSet = (GeneSet) o; currentTaxon = geneSetService.getTaxon(geneSet); } else if (o instanceof CharacteristicValueObject) { CharacteristicValueObject charVO = (CharacteristicValueObject) o; currentTaxon = taxonDao.findByCommonName(charVO.getTaxon()); } else { Method m = o.getClass().getMethod("getTaxon"); currentTaxon = (Taxon) m.invoke(o); } if (currentTaxon == null || !currentTaxon.getId().equals(t.getId())) { if (currentTaxon == null) { // Sanity check for bad data in db (could happen if EE has no samples). Can happen that // searchResults have a vaild getTaxon method // but the method returns null (shouldn't make it this far) SearchServiceImpl.log.debug("Object has getTaxon method but it returns null. Obj is: " + o); } toRemove.add(sr); } } catch (SecurityException | IllegalArgumentException | InvocationTargetException | IllegalAccessException e) { throw new RuntimeException(e); } catch (NoSuchMethodException e) { /* * In case of a programming error where the results don't have a taxon at all, we assume we should * filter them out but issue a warning. */ if (excludeWithoutTaxon) { toRemove.add(sr); SearchServiceImpl.log.warn("No getTaxon method for: " + o.getClass() + ". Filtering from results. Error was: " + e); } } } results.removeAll(toRemove); }
From source file:pcgen.core.Equipment.java
/** * Returns a list of the types of this item. * // w w w . j a va 2s . c om * @param bPrimary * if true return the types if the primary head, otherwise * return the types of the secondary head * @return a list of the types of this item. */ private List<String> typeList(final boolean bPrimary) { if (bPrimary && usePrimaryCache) { return typeListCachePrimary; } if (!bPrimary && useSecondaryCache) { return typeListCacheSecondary; } // Use the primary type(s) if none defined for secondary List<Type> initializingList = getEquipmentHead(2).getListFor(ListKey.TYPE); if (bPrimary || (initializingList == null) || initializingList.isEmpty()) { initializingList = getTrueTypeList(false); } else if (!isDouble()) { return new ArrayList<>(); } Set<String> calculatedTypeList = new LinkedHashSet<>(); if (initializingList != null) { for (Type t : initializingList) { calculatedTypeList.add(t.getComparisonString()); } } final Collection<String> modTypeList = new ArrayList<>(); // // Add in all type modfiers from "ADDTYPE" modifier // EquipmentModifier aEqMod = getEqModifierKeyed("ADDTYPE", bPrimary); if (aEqMod != null) { for (String aType : getAssociationList(aEqMod)) { aType = aType.toUpperCase(); if (!calculatedTypeList.contains(aType)) { modTypeList.add(aType); } } } /* * CONSIDER I think there is a weird order of operations issue nere, need to check * if it existed way back, e.g. SVN 6206. The issue is if a Type is introduced by a * MOD, then the ChangeArmorType system doesn't seem to be able to grab/modify it * Is that correct? - thpr 10/3/08 */ // // Add in all of the types from each EquipmentModifier // currently applied to this piece of equipment // final List<EquipmentModifier> eqModList = getEqModifierList(bPrimary); for (EquipmentModifier eqMod : eqModList) { // // If we've just replaced the armor type, then make sure it is // not in the equipment modifier list // Set<String> newTypeList = new LinkedHashSet<>(calculatedTypeList); for (ChangeArmorType cat : eqMod.getSafeListFor(ListKey.ARMORTYPE)) { List<String> tempTypeList = cat.applyProcessor(newTypeList); LinkedHashSet<String> tempTypeSet = new LinkedHashSet<>(tempTypeList); boolean noMatch = newTypeList.size() != tempTypeList.size() || newTypeList.equals(tempTypeSet); newTypeList = tempTypeSet; if (!noMatch) { break; } } Collection<String> removedTypeList = new ArrayList<>(calculatedTypeList); removedTypeList.removeAll(newTypeList); modTypeList.removeAll(removedTypeList); calculatedTypeList = newTypeList; for (String aType : eqMod.getSafeListFor(ListKey.ITEM_TYPES)) { aType = aType.toUpperCase(); // If it's BOTH & MELEE, we cannot add RANGED or THROWN to // it // BOTH is only used after the split of a Thrown weapon in 2 // (melee and ranged) if (calculatedTypeList.contains("BOTH") && calculatedTypeList.contains("MELEE") && ("RANGED".equals(aType) || "THROWN".equals(aType))) { continue; } if (!calculatedTypeList.contains(aType) && !modTypeList.contains(aType)) { modTypeList.add(aType); } } } calculatedTypeList.addAll(modTypeList); // // Make sure MAGIC tag is the 1st entry // List<String> resultingTypeList = new ArrayList<>(calculatedTypeList); final int idx = resultingTypeList.indexOf("MAGIC"); if (idx > 0) { resultingTypeList.remove(idx); resultingTypeList.add(0, "MAGIC"); } if (bPrimary) { typeListCachePrimary = resultingTypeList; usePrimaryCache = true; } else { typeListCacheSecondary = resultingTypeList; useSecondaryCache = true; } return resultingTypeList; }
From source file:org.fornax.cartridges.sculptor.smartclient.server.ScServlet.java
private void mapRequestToObj(HashMap<String, Object> data, Class expectedClass, Object obj) throws Exception { if (obj == null) { throw new ApplicationException("mapRequestToObj called on NULL obj", "ERR9001"); }/*from w w w . j a v a 2 s . co m*/ try { Method versionMethod = expectedClass.getMethod("getVersion", (Class<?>[]) null); Long objVersion = (Long) versionMethod.invoke(obj, (Object[]) null); String clientVersion = (String) data.get("version"); if (objVersion != null && clientVersion != null) { try { long clientVersionLong = Long.parseLong(clientVersion); if (!objVersion.equals(clientVersionLong)) { throw makeApplicationException("Can't save object", "ERR9016", (Serializable[]) null); } } catch (NumberFormatException nfe) { // Version from client isn't number - ignore } } } catch (NoSuchMethodException nme) { // No version control } Method[] methods = expectedClass.getMethods(); for (Method m : methods) { Class<?>[] paramTypes = m.getParameterTypes(); Class persistentClass = null; if (paramTypes.length == 1) { if (paramTypes[0].getAnnotation(Entity.class) != null) { persistentClass = paramTypes[0]; } else if (paramTypes[0].getAnnotation(Embeddable.class) != null) { persistentClass = paramTypes[0]; } } ServiceDescription srvParam = paramTypes.length == 1 ? findServiceByClassName(paramTypes[0].getName()) : null; if ((m.getName().startsWith(SET_PREFIX) && paramTypes.length == 1 && (paramTypes[0].isAssignableFrom(String.class) || paramTypes[0].equals(Integer.class) || paramTypes[0].equals(Integer.TYPE) || paramTypes[0].equals(Long.class) || paramTypes[0].equals(Long.TYPE) || paramTypes[0].equals(Float.class) || paramTypes[0].equals(Float.TYPE) || paramTypes[0].equals(Boolean.class) || paramTypes[0].equals(Boolean.TYPE) || paramTypes[0].equals(Double.class) || paramTypes[0].equals(Double.TYPE) || paramTypes[0].equals(Date.class) || Enum.class.isAssignableFrom(paramTypes[0]) || (srvParam != null && srvParam.getFindById() != null) || persistentClass != null)) || (m.getName().startsWith(GET_PREFIX) && paramTypes.length == 0 && (Set.class.isAssignableFrom(m.getReturnType()) || List.class.isAssignableFrom(m.getReturnType())))) { String fldName; if (m.getName().startsWith(GET_TRANSLATE)) { fldName = m.getName().substring(GET_TRANSLATE_LENGTH, GET_TRANSLATE_LENGTH + 1).toLowerCase() + m.getName().substring(GET_TRANSLATE_LENGTH + 1); } else { fldName = m.getName().substring(3, 4).toLowerCase() + m.getName().substring(4); } Object value = data.get(fldName); if (value == null) { fldName = m.getName().substring(3); value = data.get(fldName); } if (value != null) { Object typedVal; String val = null; if (value instanceof String) { val = (String) value; } log.log(Level.FINER, " value = " + value); if (m.getName().startsWith(GET_PREFIX) && paramTypes.length == 0 && (Set.class.isAssignableFrom(m.getReturnType()) || List.class.isAssignableFrom(m.getReturnType()))) { log.log(Level.FINER, "GET"); String attrName = m.getName().substring(3, 4).toLowerCase() + m.getName().substring(4); Type[] actualTypeArguments = null; Class iterClass = expectedClass; while (iterClass != null) { try { Field field = iterClass.getDeclaredField(attrName); ParameterizedType genericType = (ParameterizedType) field.getGenericType(); actualTypeArguments = genericType.getActualTypeArguments(); break; } catch (NoSuchFieldException nsfe) { // do nothing iterate again } iterClass = iterClass.getSuperclass(); iterClass = iterClass.equals(Object.class) ? null : iterClass; } if (actualTypeArguments != null && actualTypeArguments.length == 1 && actualTypeArguments[0] instanceof Class) { Class assocClass = (Class) actualTypeArguments[0]; ServiceDescription assocService = findServiceByClassName(assocClass.getName()); Collection dbValueSet = (Collection) m.invoke(obj, (Object[]) null); if (value == null || !(value instanceof HashMap)) { log.log(Level.FINE, "No data for db property {0}", attrName); } else if (assocService != null) { HashMap<String, Object> guiValueMap = (HashMap<String, Object>) value; ArrayList<Object> removeIt = new ArrayList<Object>(); Iterator dbIterator = dbValueSet.iterator(); while (dbIterator.hasNext()) { Object dbVal = dbIterator.next(); String dbValId = getIdFromObj(dbVal); if (dbValId != null) { boolean wasMatchingGuiVal = false; ArrayList<String> removeKeys = new ArrayList<String>(); for (String key : guiValueMap.keySet()) { Object object = guiValueMap.get(key); if (object instanceof HashMap) { Object guiValue = ((HashMap<String, Object>) object).get("id"); if (guiValue.equals(dbValId)) { removeKeys.add(key); wasMatchingGuiVal = true; mapRequestToObj((HashMap<String, Object>) guiValue, assocClass, dbVal); break; } } else if (object instanceof String) { // Association if (dbValId.equals(object)) { removeKeys.add(key); wasMatchingGuiVal = true; } } else { log.log(Level.WARNING, "Wrong object type from GUI under key {0}", key); } } // Remove processed elements // Direct remove is firing concurrent modification exception for (String removeKey : removeKeys) { guiValueMap.remove(removeKey); } if (!wasMatchingGuiVal) { // Is not in list comming from GUI - delete removeIt.add(dbVal); } } else { log.log(Level.WARNING, "No ID in object {0}", dbVal); } } dbValueSet.removeAll(removeIt); // Rest are new records for (String key : guiValueMap.keySet()) { Object object = guiValueMap.get(key); if (object instanceof HashMap) { Object subObj = makeNewInstance(assocClass, (HashMap<String, Object>) object); mapRequestToObj((HashMap<String, Object>) object, assocClass, subObj); dbValueSet.add(subObj); } else if (object instanceof String) { // Association try { Long id = new Long((String) object); Object assocObj = assocService.getFindById().invoke( assocService.getInstance(), ServiceContextStore.get(), id); if (assocObj != null) { dbValueSet.add(assocObj); } else { log.log(Level.WARNING, "Object with ID {0} not availabla via service {1}", new Object[] { id, assocService.getName() }); } } catch (Exception ex) { log.log(Level.WARNING, "No ID parsable from value {0} under key {1}", new Object[] { object, key }); } } else { log.log(Level.WARNING, "Wrong sub type {0}", attrName); } } } else if (assocClass != null) { HashMap<String, Object> guiValueMap = (HashMap<String, Object>) value; ArrayList<Object> removeIt = new ArrayList<Object>(); Iterator dbIterator = dbValueSet.iterator(); while (dbIterator.hasNext()) { Object dbVal = dbIterator.next(); String dbValId = getIdFromObj(dbVal); if (dbValId != null) { Object matchingGuiVal = null; for (String key : guiValueMap.keySet()) { Object object = guiValueMap.get(key); if (object instanceof HashMap) { HashMap<String, Object> guiVal = (HashMap<String, Object>) object; if (dbValId.equals(guiVal.get("id"))) { guiValueMap.remove(key); matchingGuiVal = guiVal; break; } } else { log.log(Level.WARNING, "Wrong object type from GUI under key {0}", key); } } if (matchingGuiVal != null) { // Coming from GUI - update mapRequestToObj((HashMap<String, Object>) matchingGuiVal, assocClass, dbVal); } else { // Not in GUI - delete removeIt.add(dbVal); } } else { log.log(Level.WARNING, "No ID in object {0}", dbVal); } } dbValueSet.removeAll(removeIt); // Rest are new records for (String key : guiValueMap.keySet()) { Object object = guiValueMap.get(key); if (object instanceof HashMap) { Object subObj = makeNewInstance(assocClass, (HashMap<String, Object>) object); mapRequestToObj((HashMap<String, Object>) object, assocClass, subObj); dbValueSet.add(subObj); } else { log.log(Level.WARNING, "Wrong sub type {0}", attrName); } } } } else { log.log(Level.WARNING, "No DB mapping or not of collection type: {0}", attrName); } typedVal = null; } else if (paramTypes[0].isAssignableFrom(String.class)) { typedVal = val; } else if (paramTypes[0].equals(Integer.class) || paramTypes[0].equals(Integer.TYPE)) { typedVal = Integer.parseInt(val); } else if (paramTypes[0].equals(Long.class) || paramTypes[0].equals(Long.TYPE)) { typedVal = Long.parseLong(val); } else if (paramTypes[0].equals(Double.class) || paramTypes[0].equals(Double.TYPE)) { typedVal = Double.parseDouble(val); } else if (paramTypes[0].equals(Float.class) || paramTypes[0].equals(Float.TYPE)) { typedVal = Float.parseFloat(val); } else if (paramTypes[0].equals(Boolean.class) || paramTypes[0].equals(Boolean.TYPE)) { typedVal = "true".equalsIgnoreCase(val) || "t".equalsIgnoreCase(val) || "y".equalsIgnoreCase(val); } else if (paramTypes[0].isAssignableFrom(Date.class)) { typedVal = dateFormat.parse(val); } else if (Enum.class.isAssignableFrom(paramTypes[0])) { try { Method fromValueMethod = paramTypes[0].getMethod("fromValue", String.class); typedVal = fromValueMethod.invoke(null, val); } catch (Exception ex) { typedVal = null; } try { if (typedVal == null) { Method valueOfMethod = paramTypes[0].getMethod("valueOf", String.class); typedVal = valueOfMethod.invoke(null, val); } } catch (Exception ex) { typedVal = null; } } else if (persistentClass != null && persistentClass.equals(FileUpload.class)) { FileItem fileItem = uploadServlet.getFileItem(sessionId.get(), fldName, val); if (fileItem != null) { typedVal = fileUploadService.uploadFile(ServiceContextStore.get(), fileItem.getName(), fileItem.getContentType(), fileItem.getInputStream()); } else { typedVal = null; } } else if (srvParam != null && srvParam.getFindById() != null) { if (value instanceof HashMap) { HashMap<String, Object> embeddedObj = (HashMap<String, Object>) value; typedVal = srvParam.getFindById().invoke(srvParam.getInstance(), ServiceContextStore.get(), new Long((String) embeddedObj.get("id"))); mapRequestToObj(embeddedObj, srvParam.getExpectedClass(), typedVal); } else { try { Long parsedId = new Long(val); typedVal = srvParam.getFindById().invoke(srvParam.getInstance(), ServiceContextStore.get(), parsedId); } catch (NumberFormatException nfe) { // wrong value typedVal = null; } } } else if (persistentClass != null) { String getMethodName = "g" + m.getName().substring(1); try { Method getMethod = obj.getClass().getMethod(getMethodName, (Class[]) null); typedVal = getMethod.invoke(obj, (Object[]) null); } catch (NoSuchMethodException nsme) { typedVal = null; } if (typedVal == null) { typedVal = makeNewInstance(persistentClass, (HashMap<String, Object>) value); } mapRequestToObj((HashMap<String, Object>) value, typedVal.getClass(), typedVal); } else { log.log(Level.WARNING, "Can't convert value for: {0}.{1} ({2})", new Object[] { expectedClass.getName(), m.getName(), (paramTypes.length == 1 ? paramTypes[0].getName() : paramTypes.toString()) }); typedVal = null; } if (typedVal != null) { m.invoke(obj, typedVal); } } } else if (m.getName().startsWith(SET_PREFIX)) { log.log(Level.WARNING, "Unusable setter method: {0}.{1} ({2})", new Object[] { expectedClass.getName(), m.getName(), (paramTypes.length == 1 ? paramTypes[0].getName() : paramTypes.toString()) }); } } }
From source file:ubic.gemma.search.SearchServiceImpl.java
/** * @param settings/*from w ww . jav a 2 s . co m*/ * @param results * @param excludeWithoutTaxon if true: If the SearchResults have no "getTaxon" method then the results will get * filtered out Results with no taxon associated will also get removed. */ private void filterByTaxon(SearchSettings settings, Collection<SearchResult> results, boolean excludeWithoutTaxon) { if (settings.getTaxon() == null) { return; } Collection<SearchResult> toRemove = new HashSet<SearchResult>(); Taxon t = settings.getTaxon(); if (results == null) return; for (SearchResult sr : results) { Object o = sr.getResultObject(); try { Taxon currentTaxon = null; if (o instanceof ExpressionExperiment) { ExpressionExperiment ee = (ExpressionExperiment) o; currentTaxon = expressionExperimentService.getTaxon(ee); } else if (o instanceof ExpressionExperimentSet) { ExpressionExperimentSet ees = (ExpressionExperimentSet) o; currentTaxon = ees.getTaxon(); } else if (o instanceof Gene) { Gene gene = (Gene) o; currentTaxon = gene.getTaxon(); } else if (o instanceof GeneSet) { GeneSet geneSet = (GeneSet) o; currentTaxon = geneSetService.getTaxon(geneSet); } else if (o instanceof CharacteristicValueObject) { CharacteristicValueObject charVO = (CharacteristicValueObject) o; currentTaxon = taxonDao.findByCommonName(charVO.getTaxon()); } else { Method m = o.getClass().getMethod("getTaxon", new Class[] {}); currentTaxon = (Taxon) m.invoke(o, new Object[] {}); } if (currentTaxon == null || !currentTaxon.getId().equals(t.getId())) { if (currentTaxon == null) { // Sanity check for bad data in db (could happen if EE has no samples). Can happen that // searchResults have a vaild getTaxon method // but the method returns null (shouldn't make it this far) log.debug("Object has getTaxon method but it returns null. Obj is: " + o); } toRemove.add(sr); } } catch (SecurityException e) { throw new RuntimeException(e); } catch (NoSuchMethodException e) { /* * In case of a programming error where the results don't have a taxon at all, we assume we should * filter them out but issue a warning. */ if (excludeWithoutTaxon) { toRemove.add(sr); log.warn("No getTaxon method for: " + o.getClass() + ". Filtering from results. Error was: " + e); } } catch (IllegalArgumentException e) { throw new RuntimeException(e); } catch (IllegalAccessException e) { throw new RuntimeException(e); } catch (InvocationTargetException e) { throw new RuntimeException(e); } } results.removeAll(toRemove); }
From source file:uk.ac.ebi.intact.dbupdate.prot.actions.impl.DuplicatesFinderImpl.java
/** * * @param possibleDuplicates : the list containing the possible duplicates of a same transcript * @param context : the datacontext//from www . j a v a 2 s. co m * @param processor : the proteinProcessor * @param isSpliceVariant : a boolean value to indicate if the transcript is a splice variant (true) or a feature chain (false) * @return the collection of duplicetFoundEvent for each set of duplicated transcript in the list of possible duplicates * @throws ProcessorException */ private Collection<DuplicatesFoundEvent> findProteinTranscriptDuplicates( List<ProteinTranscript> possibleDuplicates, DataContext context, ProteinProcessor processor, boolean isSpliceVariant) throws ProcessorException { // the list containing the duplicateFoundEvents Collection<DuplicatesFoundEvent> duplicateEvents = new ArrayList<DuplicatesFoundEvent>(); // if there are possible duplicates (more than 1 result), check and fix when necessary if (possibleDuplicates.size() > 1) { // the collection containing all the possible duplicates Collection<ProteinTranscript> totalProteins = new ArrayList(possibleDuplicates); // the collection which will contain the duplicates of a same protein transcript Collection<ProteinTranscript> duplicates = new ArrayList<ProteinTranscript>(possibleDuplicates.size()); // while the list of possible duplicates has not been fully treated, we need to check the duplicates while (totalProteins.size() > 0) { // clear the list of duplicates of a same transcript duplicates.clear(); // pick the first protein of the list and add it in the list of duplicates Iterator<ProteinTranscript> iterator = totalProteins.iterator(); ProteinTranscript trans = iterator.next(); Protein protToCompare = trans.getProtein(); // get the uniprot identity of this protein InteractorXref firstIdentity = ProteinUtils.getUniprotXref(protToCompare); String firstUniprotAc = null; if (firstIdentity != null) { firstUniprotAc = firstIdentity.getPrimaryId(); } // this first protein represents a uniprot transcript and is added to the list of duplicates of this transcript duplicates.add(trans); // extract the parents of this protein Collection<InteractorXref> transcriptParent; // if splice variant, the isoform-parents if (isSpliceVariant) { transcriptParent = ProteinUtils.extractIsoformParentCrossReferencesFrom(protToCompare); } // if feature chain, the chain-parents else { transcriptParent = ProteinUtils.extractChainParentCrossReferencesFrom(protToCompare); } // we compare the parents of this first protein against the parents of the other proteins while (iterator.hasNext()) { // we extract the parents of the next protein to compare ProteinTranscript trans2 = iterator.next(); Protein proteinCompared = trans2.getProtein(); // get the uniprot identity of this protein InteractorXref secondIdentity = ProteinUtils.getUniprotXref(proteinCompared); String secondUniprotAc = null; if (secondIdentity != null) { secondUniprotAc = secondIdentity.getPrimaryId(); } // if both uniprot identities are identical or null, we may have a duplicate. Need to check the parents if ((firstUniprotAc != null && secondUniprotAc != null && firstUniprotAc.equalsIgnoreCase(secondUniprotAc)) || (firstUniprotAc == null && secondUniprotAc == null)) { Collection<InteractorXref> transcriptParents2; if (isSpliceVariant) { transcriptParents2 = ProteinUtils .extractIsoformParentCrossReferencesFrom(proteinCompared); } else { transcriptParents2 = ProteinUtils .extractChainParentCrossReferencesFrom(proteinCompared); } // if the parents are identical, we add the protein to the list of duplicates if (hasSameParents(transcriptParent, transcriptParents2)) { duplicates.add(trans2); } } } // if we have more than two proteins in the duplicate list, we merge them if (duplicates.size() > 1) { // get the uniprot transcript UniprotProteinTranscript transcript = trans.getUniprotVariant(); // set the uniprot sequence and CRC64 of the event String uniprotSequence = null; String uniprotCrc64 = null; String primaryAc = null; String organism = null; if (transcript != null) { uniprotSequence = transcript.getSequence(); uniprotCrc64 = Crc64.getCrc64(uniprotSequence); primaryAc = transcript.getPrimaryAc(); organism = transcript.getOrganism() != null ? String.valueOf(transcript.getOrganism().getTaxid()) : null; } // list of duplicates Collection<Protein> duplicateToFix = new ArrayList<Protein>(duplicates.size()); for (ProteinTranscript t : duplicates) { duplicateToFix.add(t.getProtein()); } // create the DuplicateFoundEvent and add it to the list of duplicateFoundEvent DuplicatesFoundEvent duplEvt = new DuplicatesFoundEvent(processor, context, duplicateToFix, uniprotSequence, uniprotCrc64, primaryAc, organism); duplicateEvents.add(duplEvt); } // we remove the processed proteins from the list of protein to process totalProteins.removeAll(duplicates); } } return duplicateEvents; }
From source file:org.apache.syncope.core.provisioning.java.data.AnyObjectDataBinderImpl.java
@Override public PropagationByResource update(final AnyObject toBeUpdated, final AnyObjectPatch anyObjectPatch) { // Re-merge any pending change from workflow tasks AnyObject anyObject = anyObjectDAO.save(toBeUpdated); PropagationByResource propByRes = new PropagationByResource(); SyncopeClientCompositeException scce = SyncopeClientException.buildComposite(); AnyUtils anyUtils = anyUtilsFactory.getInstance(AnyTypeKind.ANY_OBJECT); Collection<String> currentResources = anyObjectDAO.findAllResourceKeys(anyObject.getKey()); // fetch connObjectKeys before update Map<String, String> oldConnObjectKeys = getConnObjectKeys(anyObject, anyUtils); // realm//from w w w .j a v a 2 s. c o m setRealm(anyObject, anyObjectPatch); // name if (anyObjectPatch.getName() != null && StringUtils.isNotBlank(anyObjectPatch.getName().getValue())) { propByRes.addAll(ResourceOperation.UPDATE, anyObjectDAO.findAllResourceKeys(anyObject.getKey())); anyObject.setName(anyObjectPatch.getName().getValue()); } // attributes and resources propByRes.merge(fill(anyObject, anyObjectPatch, anyUtils, scce)); // relationships anyObjectPatch.getRelationships().stream().filter(patch -> patch.getRelationshipTO() != null) .forEachOrdered((patch) -> { RelationshipType relationshipType = relationshipTypeDAO .find(patch.getRelationshipTO().getType()); if (relationshipType == null) { LOG.debug("Ignoring invalid relationship type {}", patch.getRelationshipTO().getType()); } else { Optional<? extends ARelationship> relationship = anyObject.getRelationship(relationshipType, patch.getRelationshipTO().getOtherEndKey()); if (relationship.isPresent()) { anyObject.getRelationships().remove(relationship.get()); relationship.get().setLeftEnd(null); } if (patch.getOperation() == PatchOperation.ADD_REPLACE) { if (StringUtils.isBlank(patch.getRelationshipTO().getOtherEndType()) || AnyTypeKind.USER.name().equals(patch.getRelationshipTO().getOtherEndType()) || AnyTypeKind.GROUP.name() .equals(patch.getRelationshipTO().getOtherEndType())) { SyncopeClientException invalidAnyType = SyncopeClientException .build(ClientExceptionType.InvalidAnyType); invalidAnyType.getElements() .add(AnyType.class.getSimpleName() + " not allowed for relationship: " + patch.getRelationshipTO().getOtherEndType()); scce.addException(invalidAnyType); } else { AnyObject otherEnd = anyObjectDAO.find(patch.getRelationshipTO().getOtherEndKey()); if (otherEnd == null) { LOG.debug("Ignoring invalid any object {}", patch.getRelationshipTO().getOtherEndKey()); } else if (anyObject.getRealm().getFullPath() .startsWith(otherEnd.getRealm().getFullPath())) { ARelationship newRelationship = entityFactory.newEntity(ARelationship.class); newRelationship.setType(relationshipType); newRelationship.setRightEnd(otherEnd); newRelationship.setLeftEnd(anyObject); anyObject.add(newRelationship); } else { LOG.error("{} cannot be assigned to {}", otherEnd, anyObject); SyncopeClientException unassignable = SyncopeClientException .build(ClientExceptionType.InvalidRelationship); unassignable.getElements().add("Cannot be assigned: " + otherEnd); scce.addException(unassignable); } } } } }); // prepare for membership-related resource management Collection<ExternalResource> resources = anyObjectDAO.findAllResources(anyObject); Map<String, Set<String>> reasons = new HashMap<>(); anyObject.getResources().forEach(resource -> { reasons.put(resource.getKey(), new HashSet<>(Collections.singleton(anyObject.getKey()))); }); anyObjectDAO.findAllGroupKeys(anyObject).forEach(group -> { groupDAO.findAllResourceKeys(group).forEach(resource -> { if (!reasons.containsKey(resource)) { reasons.put(resource, new HashSet<>()); } reasons.get(resource).add(group); }); }); Set<String> toBeDeprovisioned = new HashSet<>(); Set<String> toBeProvisioned = new HashSet<>(); SyncopeClientException invalidValues = SyncopeClientException.build(ClientExceptionType.InvalidValues); // memberships anyObjectPatch.getMemberships().stream().filter((membPatch) -> (membPatch.getGroup() != null)) .forEachOrdered(membPatch -> { Optional<? extends AMembership> membership = anyObject.getMembership(membPatch.getGroup()); if (membership.isPresent()) { anyObject.getMemberships().remove(membership.get()); membership.get().setLeftEnd(null); anyObject.getPlainAttrs(membership.get()).forEach(attr -> { anyObject.remove(attr); attr.setOwner(null); }); if (membPatch.getOperation() == PatchOperation.DELETE) { groupDAO.findAllResourceKeys(membership.get().getRightEnd().getKey()).stream() .filter(resource -> reasons.containsKey(resource)).forEach(resource -> { reasons.get(resource).remove(membership.get().getRightEnd().getKey()); toBeProvisioned.add(resource); }); } } if (membPatch.getOperation() == PatchOperation.ADD_REPLACE) { Group group = groupDAO.find(membPatch.getGroup()); if (group == null) { LOG.debug("Ignoring invalid group {}", membPatch.getGroup()); } else if (anyObject.getRealm().getFullPath().startsWith(group.getRealm().getFullPath())) { AMembership newMembership = entityFactory.newEntity(AMembership.class); newMembership.setRightEnd(group); newMembership.setLeftEnd(anyObject); anyObject.add(newMembership); membPatch.getPlainAttrs().forEach(attrTO -> { PlainSchema schema = getPlainSchema(attrTO.getSchema()); if (schema == null) { LOG.debug("Invalid " + PlainSchema.class.getSimpleName() + "{}, ignoring...", attrTO.getSchema()); } else { Optional<? extends APlainAttr> attr = anyObject.getPlainAttr(schema.getKey(), newMembership); if (!attr.isPresent()) { LOG.debug("No plain attribute found for {} and membership of {}", schema, newMembership.getRightEnd()); APlainAttr newAttr = anyUtils.newPlainAttr(); newAttr.setOwner(anyObject); newAttr.setMembership(newMembership); newAttr.setSchema(schema); anyObject.add(newAttr); AttrPatch patch = new AttrPatch.Builder().attrTO(attrTO).build(); processAttrPatch(anyObject, patch, schema, newAttr, anyUtils, resources, propByRes, invalidValues); } } }); if (!invalidValues.isEmpty()) { scce.addException(invalidValues); } toBeProvisioned.addAll(groupDAO.findAllResourceKeys(group.getKey())); } else { LOG.error("{} cannot be assigned to {}", group, anyObject); SyncopeClientException unassignabled = SyncopeClientException .build(ClientExceptionType.InvalidMembership); unassignabled.getElements().add("Cannot be assigned: " + group); scce.addException(unassignabled); } } }); // finalize resource management reasons.entrySet().stream().filter(entry -> entry.getValue().isEmpty()) .forEach(entry -> toBeDeprovisioned.add(entry.getKey())); propByRes.addAll(ResourceOperation.DELETE, toBeDeprovisioned); propByRes.addAll(ResourceOperation.UPDATE, toBeProvisioned); // in case of new memberships all current resources need to be updated in order to propagate new group // attribute values. if (!toBeDeprovisioned.isEmpty() || !toBeProvisioned.isEmpty()) { currentResources.removeAll(toBeDeprovisioned); propByRes.addAll(ResourceOperation.UPDATE, currentResources); } // check if some connObjectKey was changed by the update above Map<String, String> newcCnnObjectKeys = getConnObjectKeys(anyObject, anyUtils); oldConnObjectKeys.entrySet().stream().filter(entry -> newcCnnObjectKeys.containsKey(entry.getKey()) && !entry.getValue().equals(newcCnnObjectKeys.get(entry.getKey()))).forEach(entry -> { propByRes.addOldConnObjectKey(entry.getKey(), entry.getValue()); propByRes.add(ResourceOperation.UPDATE, entry.getKey()); }); Pair<Set<String>, Set<String>> dynGroupMembs = anyObjectDAO.saveAndGetDynGroupMembs(anyObject); // finally check if any resource assignment is to be processed due to dynamic group membership change dynGroupMembs.getLeft().stream().filter(group -> !dynGroupMembs.getRight().contains(group)) .forEach(delete -> { groupDAO.find(delete).getResources().stream() .filter(resource -> !propByRes.contains(resource.getKey())).forEach(resource -> { propByRes.add(ResourceOperation.DELETE, resource.getKey()); }); }); dynGroupMembs.getLeft().stream().filter(group -> dynGroupMembs.getRight().contains(group)) .forEach(update -> { groupDAO.find(update).getResources().stream() .filter(resource -> !propByRes.contains(resource.getKey())).forEach(resource -> { propByRes.add(ResourceOperation.UPDATE, resource.getKey()); }); }); dynGroupMembs.getRight().stream().filter(group -> !dynGroupMembs.getLeft().contains(group)) .forEach(create -> { groupDAO.find(create).getResources().stream() .filter(resource -> !propByRes.contains(resource.getKey())).forEach(resource -> { propByRes.add(ResourceOperation.CREATE, resource.getKey()); }); }); // Throw composite exception if there is at least one element set in the composing exceptions if (scce.hasExceptions()) { throw scce; } return propByRes; }