List of usage examples for java.util ArrayList remove
public boolean remove(Object o)
From source file:gov.nih.nci.cabig.caaers.api.impl.SAEEvaluationServiceImpl.java
/** * To Create or Update Advese Events.//from w ww . j av a 2 s . c o m * Sync Flag is used only incase of SAE Evaluation service, As service can soft-delete the Adverse Events. * @param rpSrc * @param errors * @param syncFlag * @return */ private AdverseEventReportingPeriod createOrUpdateAdverseEvents(AdverseEventReportingPeriod rpSrc, ValidationErrors errors, boolean syncFlag) { Study study = fetchStudy(rpSrc.getStudy().getFundingSponsorIdentifierValue()); if (study == null) { logger.error("Study not present in caAERS with the sponsor identifier : " + rpSrc.getStudy().getFundingSponsorIdentifierValue()); errors.addValidationError( "WS_AEMS_003", "Study with sponsor identifier " + rpSrc.getStudy().getFundingSponsorIdentifierValue() + " does not exist in caAERS", rpSrc.getStudy().getFundingSponsorIdentifierValue()); return null; } //async update study removed because the updates would most likely happen after this process is finished. //migrate the domain object AdverseEventReportingPeriod rpDest = new AdverseEventReportingPeriod(); DomainObjectImportOutcome<AdverseEventReportingPeriod> rpOutcome = new DomainObjectImportOutcome<AdverseEventReportingPeriod>(); reportingPeriodMigrator.migrate(rpSrc, rpDest, rpOutcome); logger.info("Reporting period migration result :" + String.valueOf(rpOutcome.getMessages())); if (rpOutcome.hasErrors()) { //translate error and create a response. logger.error("Errors while migrating :" + String.valueOf(rpOutcome.getErrorMessages())); errors.addValidationErrors(rpOutcome.getValidationErrors().getErrors()); return null; } //check if we need the create path or update path. String tac = rpDest.getTreatmentAssignment() != null ? rpDest.getTreatmentAssignment().getCode() : null; String epochName = rpDest.getEpoch() != null ? rpDest.getEpoch().getName() : null; AdverseEventReportingPeriod rpFound = rpDest.getAssignment().findReportingPeriod(rpDest.getExternalId(), rpDest.getStartDate(), rpDest.getEndDate(), rpDest.getCycleNumber(), epochName, tac); ArrayList<AdverseEventReportingPeriod> reportingPeriodList = new ArrayList<AdverseEventReportingPeriod>( rpDest.getAssignment().getActiveReportingPeriods()); if (rpFound != null) { // This is used only incase of SAE Evaluation Service. if (syncFlag) { syncAdverseEventWithSrc(rpFound, rpSrc); } int i = findIndexFromReportPeriodList(reportingPeriodList, rpFound); if (i >= 0) reportingPeriodList.remove(i); } ValidationErrors dateValidationErrors = validateRepPeriodDates(rpDest, reportingPeriodList, rpDest.getAssignment().getStartDateOfFirstCourse(), rpDest.getEpoch()); logger.info("Reporting period validation result :" + String.valueOf(dateValidationErrors)); if (dateValidationErrors.hasErrors()) { //translate errors and create a response logger.error("Errors while migrating :" + String.valueOf(dateValidationErrors)); errors.addValidationErrors(dateValidationErrors.getErrors()); return null; } //validate adverse events for (AdverseEvent adverseEvent : rpDest.getAdverseEvents()) { if (adverseEvent.getGradedDate() == null) adverseEvent.setGradedDate(new Date()); Set<ConstraintViolation<AdverseEvent>> constraintViolations = validator.validate(adverseEvent, AdverseEventGroup.class, Default.class); if (!constraintViolations.isEmpty()) { //translate errors to response. for (ConstraintViolation<AdverseEvent> v : constraintViolations) { errors.addValidationError("WS_GEN_006", v.getMessage(), v.getPropertyPath()); } return null; } } // validate Reporting Period. AdverseEventReportingPeriod rpTarget = rpFound; if (rpTarget == null) rpTarget = rpDest; Set<ConstraintViolation<AdverseEventReportingPeriod>> constraintViolations = validator.validate(rpTarget, CourseCycleGroup.class, Default.class); if (!constraintViolations.isEmpty()) { //translate errors to response. for (ConstraintViolation<AdverseEventReportingPeriod> v : constraintViolations) { errors.addValidationError("WS_GEN_006", v.getMessage(), v.getPropertyPath()); } return null; } if (rpFound == null) { //new reporting period rpFound = rpDest; rpFound.getAssignment().addReportingPeriod(rpFound); // Validate the Reporting Period before saving. adverseEventValidatior.validate(rpFound, rpFound.getStudy(), errors); adverseEventReportingPeriodDao.save(rpFound); if (configuration.get(Configuration.ENABLE_WORKFLOW)) { Long wfId = adverseEventRoutingAndReviewRepository.enactReportingPeriodWorkflow(rpFound); logger.debug("Enacted workflow : " + wfId); } } else { //existing reporting period. reportingPeriodSynchronizer.migrate(rpDest, rpFound, rpOutcome); // Validate the Reporting Period before saving. adverseEventValidatior.validate(rpFound, rpFound.getStudy(), errors); if (errors.hasErrors()) { logger.error( "Error(s) while validating with Adverse Event " + String.valueOf(errors.getErrorCount())); return null; } adverseEventReportingPeriodDao.save(rpFound); } return rpFound; }
From source file:com.hichinaschool.flashcards.libanki.Models.java
public void moveTemplate(JSONObject m, JSONObject template, int idx) { try {//from ww w .j ava 2 s .c om JSONArray ja = m.getJSONArray("tmpls"); int oldidx = -1; ArrayList<JSONObject> l = new ArrayList<JSONObject>(); HashMap<Integer, Integer> oldidxs = new HashMap<Integer, Integer>(); for (int i = 0; i < ja.length(); ++i) { if (ja.get(i).equals(template)) { oldidx = i; if (idx == oldidx) { return; } } JSONObject t = ja.getJSONObject(i); oldidxs.put(t.hashCode(), t.getInt("ord")); l.add(t); } l.remove(oldidx); l.add(idx, template); m.put("tmpls", new JSONArray(l)); _updateTemplOrds(m); // generate change map - We use StringBuilder StringBuilder sb = new StringBuilder(); ja = m.getJSONArray("tmpls"); for (int i = 0; i < ja.length(); ++i) { JSONObject t = ja.getJSONObject(i); sb.append("when ord = ").append(oldidxs.get(t.hashCode())).append(" then ").append(t.getInt("ord")); if (i != ja.length() - 1) { sb.append(" "); } } // apply save(m); mCol.getDb().execute( "update cards set ord = (case " + sb.toString() + " end),usn=?,mod=? where nid in (select id from notes where mid = ?)", new Object[] { mCol.usn(), Utils.intNow(), m.getLong("id") }); } catch (JSONException e) { throw new RuntimeException(e); } }
From source file:com.hichinaschool.flashcards.libanki.Models.java
public void moveField(JSONObject m, JSONObject field, int idx) { mCol.modSchema();//from w ww . j a va 2s . co m try { JSONArray ja = m.getJSONArray("flds"); ArrayList<JSONObject> l = new ArrayList<JSONObject>(); int oldidx = -1; for (int i = 0; i < ja.length(); ++i) { l.add(ja.getJSONObject(i)); if (field.equals(ja.getJSONObject(i))) { oldidx = i; if (idx == oldidx) { return; } } } // remember old sort field String sortf = Utils.jsonToString(m.getJSONArray("flds").getJSONObject(m.getInt("sortf"))); // move l.remove(oldidx); l.add(idx, field); m.put("flds", new JSONArray(l)); // restore sort field ja = m.getJSONArray("flds"); for (int i = 0; i < ja.length(); ++i) { if (Utils.jsonToString(ja.getJSONObject(i)).equals(sortf)) { m.put("sortf", i); break; } } _updateFieldOrds(m); save(m); _transformFields(m, new TransformFieldMove(idx, oldidx)); renameField(m, field, null); } catch (JSONException e) { throw new RuntimeException(e); } }
From source file:com.krawler.spring.crm.common.crmManagerDAOImpl.java
public KwlReturnObject getMaster(HashMap<String, Object> requestParams) throws ServiceException { List ll = null;/* ww w .ja va 2 s .c om*/ int dl = 0; try { ArrayList filter_params = new ArrayList(); ArrayList hqlfilter_names = new ArrayList(); hqlfilter_names.add("NOTINcomboname"); String hql = "from CrmCombomaster cm"; ArrayList hqlfilter_params = new ArrayList(); String filtercombos = Constants.MASTERCONFIG_HIDECOMBO; if (!requestParams.containsKey("allowLeadType")) { filtercombos += ",'Lead Type'"; } hqlfilter_params.add(filtercombos); String hqlfilterQuery = StringUtil.filterQuery(hqlfilter_names, "where"); int ind = hqlfilterQuery.indexOf("("); if (ind > -1) { int index = Integer.valueOf(hqlfilterQuery.substring(ind + 1, ind + 2)); hqlfilterQuery = hqlfilterQuery.replaceAll("(" + index + ")", hqlfilter_params.get(index).toString()); hqlfilter_params.remove(index); } hql += hqlfilterQuery; if (requestParams.containsKey(Constants.filter_params)) { ArrayList filter_params1 = (ArrayList) requestParams.get(Constants.filter_params); filter_params.addAll(filter_params1); ArrayList filter_names = (ArrayList) requestParams.get("filter_names"); String filterQuery = StringUtil.filterQuery(filter_names, "and"); hql += filterQuery; } hql += " order by comboname"; ll = executeQuery(hql, filter_params.toArray()); dl = ll.size(); } catch (Exception e) { throw ServiceException.FAILURE("crmManager.getMaster", e); } return new KwlReturnObject(true, KWLErrorMsgs.S01, "", ll, dl); }
From source file:ca.uhn.fhir.jpa.term.TerminologyLoaderSvc.java
private void dropCircularRefs(TermConcept theConcept, ArrayList<String> theChain, Map<String, TermConcept> theCode2concept, Counter theCircularCounter) { theChain.add(theConcept.getCode());/* www . ja va 2s .c o m*/ for (Iterator<TermConceptParentChildLink> childIter = theConcept.getChildren().iterator(); childIter .hasNext();) { TermConceptParentChildLink next = childIter.next(); TermConcept nextChild = next.getChild(); if (theChain.contains(nextChild.getCode())) { StringBuilder b = new StringBuilder(); b.append("Removing circular reference code "); b.append(nextChild.getCode()); b.append(" from parent "); b.append(next.getParent().getCode()); b.append(". Chain was: "); for (String nextInChain : theChain) { TermConcept nextCode = theCode2concept.get(nextInChain); b.append(nextCode.getCode()); b.append('['); b.append(StringUtils.substring(nextCode.getDisplay(), 0, 20).replace("[", "").replace("]", "") .trim()); b.append("] "); } ourLog.info(b.toString(), theConcept.getCode()); childIter.remove(); nextChild.getParents().remove(next); } else { dropCircularRefs(nextChild, theChain, theCode2concept, theCircularCounter); } } theChain.remove(theChain.size() - 1); }
From source file:com.mellanox.r4h.DFSInputStream.java
private ByteBuffer getFirstToComplete(CompletionService<ByteBuffer> hedgedService, ArrayList<Future<ByteBuffer>> futures) throws InterruptedException { if (futures.isEmpty()) { throw new InterruptedException("let's retry"); }/*ww w . jav a 2 s . c o m*/ Future<ByteBuffer> future = null; try { future = hedgedService.take(); ByteBuffer bb = future.get(); futures.remove(future); return bb; } catch (ExecutionException e) { // already logged in the Callable futures.remove(future); } catch (CancellationException ce) { // already logged in the Callable futures.remove(future); } throw new InterruptedException("let's retry"); }
From source file:com.cttapp.bby.mytlc.layer8apps.CalendarHandler.java
private ArrayList<Shift> checkForDuplicates(ArrayList<Shift> newShifts) { Preferences pf = new Preferences(this); ArrayList<String> oldShifts = pf.getSavedShifts(); ContentResolver cr = getContentResolver(); for (String id : oldShifts) { Cursor cursor = cr.query(getEventsUri(), new String[] { "dtstart", "dtend" }, "CALENDAR_ID = " + calID + " AND _ID = " + id, null, null); if (!cursor.moveToFirst()) { continue; }/*w w w.j ava2 s . c om*/ do { Calendar now = Calendar.getInstance(); Calendar startTime = Calendar.getInstance(); startTime.setTimeInMillis(cursor.getLong(0)); Calendar endTime = Calendar.getInstance(); endTime.setTimeInMillis(cursor.getLong(1)); if (endTime.compareTo(now) == -1) { deleteShift(id); continue; } boolean shiftFound = false; for (Shift shift : newShifts) { if (shift.getStartDate().get(Calendar.HOUR_OF_DAY) == startTime.get(Calendar.HOUR_OF_DAY) && shift.getEndDate().get(Calendar.MINUTE) == shift.getEndDate().get(Calendar.MINUTE)) { newShifts.remove(shift); shiftFound = true; break; } } if (!shiftFound) { cr.delete(getEventsUri(), "CALENDAR_ID = " + calID + " AND _ID = " + String.valueOf(id), null); } } while (cursor.moveToNext()); } return newShifts; }
From source file:edu.isi.karma.er.helper.SPARQLGeneratorUtil.java
/** * @author shri/*from w ww . j a va 2 s.c o m*/ * This method will genereate a sparql query to select the list of columns (in the order they are provided) * * @param root This object of TriplesMap is the root from which this method begins to fetch columns * @param columns This ArrayList<String> has the list of columns to be fetched. These columns are identifyed by their complete URL as defined in the ontology. <br /> * For example: <http://isi.edu/integration/karma/ontologies/model/accelerometer#AccelerometerReading>. Now there may exists many instance of a class in within the same ontology. * */ public String get_query(TriplesMap root, ArrayList<HashMap<String, String>> columns, boolean distinct_query) { ArrayList<Object> queue = new ArrayList<>(); queue.add(root); StringBuffer query = new StringBuffer(); this.var_count = 1; this.prefix_list = new HashMap<>(); this.select_params = new StringBuffer(); // ArrayList<String> select_param = new ArrayList<String>(); HashMap<TriplesMap, String> markedTriples = new HashMap<>(); ArrayList<String> visited_columns = new ArrayList<>(); this.ParentMapingInfoList = new HashMap<>(); // save the column predicate url and the column name to be dislayed HashMap<Predicate, String> predicateList = new HashMap<>(); HashMap<String, String> columnList = new HashMap<>(); if (columns != null && !columns.isEmpty()) { // for (String k : columns) { // int index = 0; // if(k.indexOf("#") > 0) { // index = k.lastIndexOf('#')+1; // } else if(k.indexOf("/") > 0) { // index = k.lastIndexOf('/')+1; // } // columnList.put(k, k.substring(index, k.length())); // } for (HashMap<String, String> col : columns) { columnList.put(col.get("name"), col.get("url")); } } // using a BFS approach, we traverse the tree from the root node and add triples/predicates to the queue while (!queue.isEmpty()) { Object currentObj = queue.remove(0); // if this is a tripleMap, then add all its RefObjects to the queue // for the predicates, add only the ones that satisfy the criteria of being <...hasValue> if (currentObj instanceof TriplesMap) { String var = "x" + var_count; TriplesMap triple = (TriplesMap) currentObj; boolean foundHasValue = false; List<PredicateObjectMap> predicates = triple.getPredicateObjectMaps(); for (PredicateObjectMap p_map : predicates) { if (p_map.getObject().hasRefObjectMap()) { RefObjectMap objMap = p_map.getObject().getRefObjectMap(); queue.add(objMap.getParentTriplesMap()); logger.info(triple.getSubject().getId() + " ---> " + objMap.getParentTriplesMap().getSubject().getId()); // maintain a list of mapping properties between triples ParentMapingInfoList.put(objMap.getParentTriplesMap().getSubject().getId(), new ParentMapingInfo(triple, p_map.getPredicate())); } else { queue.add(p_map.getPredicate()); predicateList.put(p_map.getPredicate(), var); foundHasValue = true; } } // if this triple is marked to be included in the query, // we add it to the markedTriples list and add to the query string // for its class type Eg. // Prefix pref1: <.../.../Input> // x2 a pref1: if (foundHasValue) { markedTriples.put(triple, var); String rdfsTypes = triple.getSubject().getRdfsType().get(0).toString(); this.prefix_list.put("pref" + var_count, rdfsTypes); query.append(" ?" + var + " a pref" + var_count + ": ."); // if the parent of this triple is also marked for the query // then we add the relation to between triples to the query. Eg. // TriplesMap parentTriple = parent.get(triple.getSubject().getId()); ParentMapingInfo parentTriple = ParentMapingInfoList.get(triple.getSubject().getId()); // from the current node, keep poping out till we reach the last node in the // parent map to see if any of the parents are connected if (parentTriple != null) { String sq = checkParentMarked(triple, markedTriples, var); if (sq.length() > 1) { query.append(sq); } } // if( parentTriple != null && markedTriples.containsKey(parentTriple.parent)) { // String predicate = parentTriple.predicate.getTemplate().toString(); //// PredicateObjectMap parentPredicate = getPredicateBetweenTriples(triple, parentTriple); // if(predicate != null) { // query.append(" ?" + markedTriples.get(parentTriple.parent) + " " + // predicate + " ?"+var + " . "); // } else { // System.out.println("predicate is null from parent : " + triple.getSubject().getRdfsType().toString()); // } // } } var_count++; } // if it is a predicate Object, create a variable in in the query string else if (currentObj instanceof Predicate) { Predicate predicate = (Predicate) currentObj; String k = predicate.getTemplate().toString(); k = k.replace('<', ' ').replace('>', ' ').trim(); if (columns != null && !columns.isEmpty()) { // if(columnList.containsKey(k)) { if (columnList.containsValue(k)) { Iterator<String> itr = columnList.keySet().iterator(); while (itr.hasNext()) { String cName = itr.next(); if (columnList.get(cName).equals(k) && !visited_columns.contains(cName)) { // get the column name from the end of this url - either the last '/' or the '#' query.append(" ?" + predicateList.get(predicate)).append(" ") .append(predicate.getTemplate()).append(" ?").append(cName + " . "); //columnList.remove(cName); visited_columns.add(cName); var_count++; break; } } // get the column name from the end of this url - either the last '/' or the '#' // query.append(" ?" + predicateList.get(predicate)) // .append(" ") // .append(predicate.getTemplate()) // .append(" ?") // .append(columnList.get(k) + " . "); // var_count++; } else { logger.info("ColumnList does not contain : " + k + " " + currentObj); } } else { int index = 0; if (k.indexOf("#") > 0) { index = k.lastIndexOf('#') + 1; } else if (k.indexOf("/") > 0) { index = k.lastIndexOf('/') + 1; } query.append(" ?" + predicateList.get(predicate)).append(" ").append(predicate.getTemplate()) .append(" ?").append(k.substring(index, k.length())).append(" ."); var_count++; } } // if this is a RefObject add the Child Triple to the queue else if (currentObj instanceof RefObjectMap) { RefObjectMap refObj = (RefObjectMap) currentObj; TriplesMap t = refObj.getParentTriplesMap(); queue.add(t); } } // append the list of prefixes Iterator<String> itr = this.prefix_list.keySet().iterator(); StringBuffer sQuery = new StringBuffer(); while (itr.hasNext()) { String key = itr.next(); sQuery.append(" PREFIX ").append(key).append(": ").append(this.prefix_list.get(key)); } // append the columns to be selected in the order they are specified sQuery.append(" select "); if (distinct_query) { sQuery.append(" distinct "); } for (HashMap<String, String> s : columns) { sQuery.append(" ?" + s.get("name")); } sQuery.append(" where { ").append(query.toString()).append(" } "); logger.info("Generated Query : " + sQuery); return sQuery.toString(); }
From source file:org.opendatakit.security.server.SecurityServiceUtil.java
/** * Method to enforce an access configuration constraining only registered users, authenticated * users and anonymous access./* www . j a v a2 s .c o m*/ * * Add additional checks of the incoming parameters and patch things up if the incoming list of * users omits the super-user. * * @param users * @param anonGrants * @param allGroups * @param cc * @throws DatastoreFailureException * @throws AccessDeniedException */ public static final void setStandardSiteAccessConfiguration(ArrayList<UserSecurityInfo> users, ArrayList<GrantedAuthorityName> allGroups, CallingContext cc) throws DatastoreFailureException, AccessDeniedException { // remove anonymousUser from the set of users and collect its // permissions (anonGrantStrings) which will be placed in // the granted authority hierarchy table. List<String> anonGrantStrings = new ArrayList<String>(); { UserSecurityInfo anonUser = null; for (UserSecurityInfo i : users) { if (i.getType() == UserType.ANONYMOUS) { anonUser = i; // clean up grants for anonymousUser -- // ignore anonAuth (the grant under which we will place things) // and forbid Site Admin for (GrantedAuthorityName a : i.getAssignedUserGroups()) { if (anonAuth.getAuthority().equals(a.name())) continue; // avoid circularity... // only allow ROLE_ATTACHMENT_VIEWER and GROUP_ assignments. if (!a.name().startsWith(GrantedAuthorityName.GROUP_PREFIX)) { continue; } // do not allow Site Admin assignments for Anonymous -- // or Tables super-user or Tables Administrator. // those all give access to the full set of users on the system // and giving that information to Anonymous is a security // risk. if (GrantedAuthorityName.GROUP_SITE_ADMINS.equals(a) || GrantedAuthorityName.GROUP_ADMINISTER_TABLES.equals(a) || GrantedAuthorityName.GROUP_SUPER_USER_TABLES.equals(a)) { continue; } anonGrantStrings.add(a.name()); } break; } } if (anonUser != null) { users.remove(anonUser); } } // scan through the users and remove any entries under assigned user groups // that do not begin with GROUP_. // // Additionally, if the user is an e-mail, remove the GROUP_DATA_COLLECTORS // permission since ODK Collect does not support oauth2 authentication. { TreeSet<GrantedAuthorityName> toRemove = new TreeSet<GrantedAuthorityName>(); for (UserSecurityInfo i : users) { // only working with registered users if (i.getType() != UserType.REGISTERED) { continue; } // get the list of assigned groups // -- this is not a copy -- we can directly manipulate this. TreeSet<GrantedAuthorityName> assignedGroups = i.getAssignedUserGroups(); // scan the set of assigned groups and remove any that don't begin with GROUP_ toRemove.clear(); for (GrantedAuthorityName name : assignedGroups) { if (!name.name().startsWith(GrantedAuthorityName.GROUP_PREFIX)) { toRemove.add(name); } } if (!toRemove.isEmpty()) { assignedGroups.removeAll(toRemove); } // for e-mail accounts, remove the Data Collector permission since ODK Collect // does not support an oauth2 authentication mechanism. if (i.getEmail() != null) { assignedGroups.remove(GrantedAuthorityName.GROUP_DATA_COLLECTORS); } } } // find the entry(entries) for the designated super-user(s) String superUserUsername = cc.getUserService().getSuperUserUsername(); int expectedSize = ((superUserUsername != null) ? 1 : 0); ArrayList<UserSecurityInfo> superUsers = new ArrayList<UserSecurityInfo>(); for (UserSecurityInfo i : users) { if (i.getType() == UserType.REGISTERED) { if (i.getUsername() != null && superUserUsername != null && i.getUsername().equals(superUserUsername)) { superUsers.add(i); } } } if (superUsers.size() != expectedSize) { // we are missing one or both super-users. // remove any we have and recreate them from scratch. users.removeAll(superUsers); superUsers.clear(); // Synthesize a UserSecurityInfo object for the super-user(s) // and add it(them) to the list. try { List<RegisteredUsersTable> tList = RegisteredUsersTable.assertSuperUsers(cc); for (RegisteredUsersTable t : tList) { UserSecurityInfo i = new UserSecurityInfo(t.getUsername(), t.getFullName(), t.getEmail(), UserSecurityInfo.UserType.REGISTERED); superUsers.add(i); users.add(i); } } catch (ODKDatastoreException e) { e.printStackTrace(); throw new DatastoreFailureException("Incomplete update"); } } // reset super-user privileges to have (just) site admin privileges // even if caller attempts to change, add, or remove them. for (UserSecurityInfo i : superUsers) { TreeSet<GrantedAuthorityName> grants = new TreeSet<GrantedAuthorityName>(); grants.add(GrantedAuthorityName.GROUP_SITE_ADMINS); grants.add(GrantedAuthorityName.ROLE_SITE_ACCESS_ADMIN); // override whatever the user gave us. i.setAssignedUserGroups(grants); } try { // enforce our fixed set of groups and their inclusion hierarchy. // this is generally a no-op during normal operations. GrantedAuthorityHierarchyTable.assertGrantedAuthorityHierarchy(siteAuth, SecurityServiceUtil.siteAdministratorGrants, cc); GrantedAuthorityHierarchyTable.assertGrantedAuthorityHierarchy(administerTablesAuth, SecurityServiceUtil.administerTablesGrants, cc); GrantedAuthorityHierarchyTable.assertGrantedAuthorityHierarchy(superUserTablesAuth, SecurityServiceUtil.superUserTablesGrants, cc); GrantedAuthorityHierarchyTable.assertGrantedAuthorityHierarchy(synchronizeTablesAuth, SecurityServiceUtil.synchronizeTablesGrants, cc); GrantedAuthorityHierarchyTable.assertGrantedAuthorityHierarchy(dataOwnerAuth, SecurityServiceUtil.dataOwnerGrants, cc); GrantedAuthorityHierarchyTable.assertGrantedAuthorityHierarchy(dataViewerAuth, SecurityServiceUtil.dataViewerGrants, cc); GrantedAuthorityHierarchyTable.assertGrantedAuthorityHierarchy(dataCollectorAuth, SecurityServiceUtil.dataCollectorGrants, cc); // place the anonymous user's permissions in the granted authority table. GrantedAuthorityHierarchyTable.assertGrantedAuthorityHierarchy(anonAuth, anonGrantStrings, cc); // get all granted authority names TreeSet<String> authorities = GrantedAuthorityHierarchyTable .getAllPermissionsAssignableGrantedAuthorities(cc.getDatastore(), cc.getCurrentUser()); // remove the groups that have structure (i.e., those defined above). authorities.remove(siteAuth.getAuthority()); authorities.remove(administerTablesAuth.getAuthority()); authorities.remove(superUserTablesAuth.getAuthority()); authorities.remove(synchronizeTablesAuth.getAuthority()); authorities.remove(dataOwnerAuth.getAuthority()); authorities.remove(dataViewerAuth.getAuthority()); authorities.remove(dataCollectorAuth.getAuthority()); authorities.remove(anonAuth.getAuthority()); // delete all hierarchy structures under anything else. // i.e., if somehow USER_IS_REGISTERED had been granted GROUP_FORM_MANAGER // then this loop would leave USER_IS_REGISTERED without any grants. // (it repairs the database to conform to our privilege hierarchy expectations). List<String> empty = Collections.emptyList(); for (String s : authorities) { GrantedAuthorityHierarchyTable.assertGrantedAuthorityHierarchy(new SimpleGrantedAuthority(s), empty, cc); } // declare all the users (and remove users that are not in this set) Map<UserSecurityInfo, String> pkMap = setUsers(users, cc); // now, for each GROUP_..., update the user granted authority // table with the users that have that GROUP_... assignment. setUsersOfGrantedAuthority(pkMap, siteAuth, cc); setUsersOfGrantedAuthority(pkMap, administerTablesAuth, cc); setUsersOfGrantedAuthority(pkMap, superUserTablesAuth, cc); setUsersOfGrantedAuthority(pkMap, synchronizeTablesAuth, cc); setUsersOfGrantedAuthority(pkMap, dataOwnerAuth, cc); setUsersOfGrantedAuthority(pkMap, dataViewerAuth, cc); setUsersOfGrantedAuthority(pkMap, dataCollectorAuth, cc); // all super-users would already have their site admin role and // we leave that unchanged. The key is to ensure that the // super users are in the users list so they don't get // accidentally removed and that they have siteAuth group // membership. I.e., we don't need to manage ROLE_SITE_ACCESS_ADMIN // here. it is done elsewhere. } catch (ODKDatastoreException e) { e.printStackTrace(); throw new DatastoreFailureException("Incomplete update"); } finally { Datastore ds = cc.getDatastore(); User user = cc.getCurrentUser(); try { SecurityRevisionsTable.setLastRegisteredUsersRevisionDate(ds, user); } catch (ODKDatastoreException e) { // if it fails, use RELOAD_INTERVAL to force reload. e.printStackTrace(); } try { SecurityRevisionsTable.setLastRoleHierarchyRevisionDate(ds, user); } catch (ODKDatastoreException e) { // if it fails, use RELOAD_INTERVAL to force reload. e.printStackTrace(); } } }
From source file:europarl.PhraseTranslation.java
private void postProcessData(ArrayList<PhraseTranslation> translations, Bag<String> words_list) { //it can remove useless attributes (in a COHERENT way: both from words_list and from translations) //it can remove instances (so it can even remove whole classes) log.debug("Start preprocessing"); HashSet<String> to_remove = new HashSet<String>(); //BEGIN removing too many classes Bag<String> classes = new Bag<String>(); for (PhraseTranslation phraseTranslation : translations) { classes.add(phraseTranslation.getTranslatedWord()); }/*from w w w. java 2 s.com*/ if (log.isDebugEnabled()) for (String translation : classes) if (classes.getCount(translation) > 2) System.out.println("Class " + translation + " : " + classes.getCount(translation)); ArrayList<Integer> class_occurrencies = new ArrayList<Integer>(classes.values()); java.util.Collections.sort(class_occurrencies); System.out.println("CLASS OCC " + class_occurrencies); ArrayList<PhraseTranslation> tr_to_remove = new ArrayList<PhraseTranslation>(); for (String cl : classes) { if (classes.getCount(cl) < class_occurrencies .get(class_occurrencies.size() - Cfg.cfg.getInt("target_classes", 4))) { for (PhraseTranslation phraseTranslation : translations) { if (phraseTranslation.getTranslatedWord().equals(cl)) tr_to_remove.add(phraseTranslation); } } } for (PhraseTranslation phraseTranslation : tr_to_remove) { for (String word : phraseTranslation.getPhraseWords()) { words_list.countdown(word); } translations.remove(phraseTranslation); } System.out.println(translations.size()); //END removing too many classes //BEGIN removing "useless" words, ie words with less than K occurrences for (String word : words_list) { assert 2 == Cfg.cfg.getInt("minimum_word_occurrencies"); if (words_list.getCount(word) <= Cfg.cfg.getInt("minimum_word_occurrencies") || words_list.getCount(word) >= translations.size() * 50 / 100) { log.debug(word + "occurs only" + words_list.getCount(word) + " times"); to_remove.add(word); } } for (String word : to_remove) { words_list.remove(word); for (PhraseTranslation trans : translations) trans.removeWord(word); } log.info("Useless words: " + to_remove.size() + ". Now: " + words_list.size()); to_remove.clear(); //END removing "useless" words }