List of usage examples for java.util HashSet addAll
boolean addAll(Collection<? extends E> c);
From source file:org.lexgrid.valuesets.helper.VSDServiceHelper.java
/** * Return a string representation the URI's of all of the coding schemes * used in the supplied value domain/*from w w w .j av a2s . c om*/ * * @param vdDef * supplied value domain * @return List of unique URIs. Returned as strings because we aren't all * that picky about the syntax * @throws LBException * @throws URISyntaxException */ public HashSet<String> getCodingSchemeURIs(ValueSetDefinition vdDef) throws LBException { HashSet<String> csRefs = new HashSet<String>(); if (vdDef != null && vdDef.getDefinitionEntry() != null) { // Always add the default coding scheme, even if it isn't used if (!StringUtils.isEmpty(vdDef.getDefaultCodingScheme())) csRefs.add(getURIForCodingSchemeName(vdDef.getMappings(), vdDef.getDefaultCodingScheme())); // Iterate over all of the individual definitions Iterator<DefinitionEntry> deIter = vdDef.getDefinitionEntryAsReference().iterator(); while (deIter.hasNext()) { DefinitionEntry de = deIter.next(); String csName = null; if (de.getCodingSchemeReference() != null) { csName = de.getCodingSchemeReference().getCodingScheme(); } else if (de.getEntityReference() != null) { String entityNamespaceName = de.getEntityReference().getEntityCodeNamespace(); if (!StringUtils.isEmpty(entityNamespaceName)) { csName = getCodingSchemeNameForNamespaceName(vdDef.getMappings(), entityNamespaceName); } } else if (de.getPropertyReference() != null) { csName = de.getPropertyReference().getCodingScheme(); } else if (de.getValueSetDefinitionReference() != null) { try { csRefs.addAll(getCodingSchemeURIs(vsds_.getValueSetDefinitionByUri( new URI(de.getValueSetDefinitionReference().getValueSetDefinitionURI())))); } catch (URISyntaxException e) { // TODO Decide what to do here - the value domain URI // isn't valid? e.printStackTrace(); } } else { assert false : "Invalid value domain definition"; } if (!StringUtils.isEmpty(csName) && !StringUtils.equals(csName, vdDef.getDefaultCodingScheme())) { String csURI = getURIForCodingSchemeName(vdDef.getMappings(), csName); if (!StringUtils.isEmpty(csURI)) csRefs.add(csURI); } } } return csRefs; }
From source file:net.sourceforge.fenixedu.domain.student.Student.java
public List<ExecutionYear> getTutorshipsExecutionYears() { HashSet<ExecutionYear> coveredYears = new HashSet<ExecutionYear>(); for (Tutorship tutorship : getTutorships()) { coveredYears.addAll(tutorship.getCoveredExecutionYears()); }//www .ja v a 2 s . co m return new ArrayList<ExecutionYear>(coveredYears); }
From source file:net.countercraft.movecraft.async.translation.TranslationTask.java
private void captureYield(MovecraftLocation[] blocksList, List<MovecraftLocation> harvestedBlocks) { if (harvestedBlocks.isEmpty()) { return;//from w w w . j a v a 2s . c o m } ArrayList<Inventory> chests = new ArrayList<Inventory>(); HashSet<ItemDropUpdateCommand> itemDropUpdateSet = new HashSet<ItemDropUpdateCommand>(); HashMap<MovecraftLocation, ItemStack[]> harvestedMap = new HashMap<MovecraftLocation, ItemStack[]>(); //find chests for (MovecraftLocation loc : getCraft().getBlockList()) { Block block = getCraft().getW().getBlockAt(loc.getX(), loc.getY(), loc.getZ()); if (block.getType() == Material.CHEST || block.getType() == Material.TRAPPED_CHEST) chests.add(((InventoryHolder) (block.getState())).getInventory()); } for (MovecraftLocation harvestedBlock : harvestedBlocks) { Block block = getCraft().getW().getBlockAt(harvestedBlock.getX(), harvestedBlock.getY(), harvestedBlock.getZ()); ItemStack[] drops = block.getDrops().toArray(new ItemStack[block.getDrops().size()]); //generate seed drops if (block.getType() == Material.CROPS) { Random rand = new Random(); int amount = rand.nextInt(4); if (amount > 0) { ItemStack seeds = new ItemStack(Material.SEEDS, amount); HashSet<ItemStack> d = new HashSet<ItemStack>(Arrays.asList(drops)); d.add(seeds); drops = d.toArray(new ItemStack[d.size()]); } } //get contents of inventories before deposting if (block.getState() instanceof InventoryHolder) { if (block.getState() instanceof Chest) { //Inventory inv = ((DoubleChest) block.getState()).getRightSide().getInventory().getLocation().equals(block.getLocation()) ?((DoubleChest) block.getState()).getRightSide().getInventory(): ((DoubleChest) block.getState()).getLeftSide().getInventory(); //HashSet<ItemStack> d = new HashSet<ItemStack>(Arrays.asList(inv.getContents())); HashSet<ItemStack> d = new HashSet<ItemStack>( Arrays.asList(((Chest) block.getState()).getBlockInventory().getContents())); d.addAll(block.getDrops()); drops = d.toArray(new ItemStack[d.size()]); } else { HashSet<ItemStack> d = new HashSet<ItemStack>( Arrays.asList((((InventoryHolder) block.getState()).getInventory().getContents()))); d.addAll(block.getDrops()); drops = d.toArray(new ItemStack[d.size()]); } } for (ItemStack drop : drops) { ItemStack retStack = putInToChests(drop, chests); if (retStack != null) //drop items on position itemDropUpdateSet.add(new ItemDropUpdateCommand(new Location(getCraft().getW(), harvestedBlock.getX(), harvestedBlock.getY(), harvestedBlock.getZ()), retStack)); } } data.setItemDropUpdates(itemDropUpdateSet.toArray(new ItemDropUpdateCommand[1])); }
From source file:nl.umcg.westrah.binarymetaanalyzer.BinaryMetaAnalysis.java
private void createSNPIndex(String outdir) throws IOException { HashSet<String> confineToTheseSNPs = null; HashSet<String> snpPreSelection = null; if (settings.getSNPProbeSelection() != null) { System.out.println("Getting SNPs from SNP/Probe selection file: " + settings.getSNPProbeSelection()); snpPreSelection = new HashSet<String>(); TextFile tf = new TextFile(settings.getSNPProbeSelection(), TextFile.R); String[] elems = tf.readLineElems(TextFile.tab); while (elems != null) { String snp = elems[0]; snpPreSelection.add(snp);/*from ww w .j av a 2 s. c o m*/ elems = tf.readLineElems(TextFile.tab); } tf.close(); System.out.println("Found " + snpPreSelection.size() + " unique snps in SNP/Probe selection file."); if (snpPreSelection.isEmpty()) { System.err.println("Error: SNP/Probe selection file defined, but no SNPs found."); System.exit(-1); } } if (settings.getSNPSelection() != null) { System.out.println("Selecting SNPs from file: " + settings.getSNPSelection()); confineToTheseSNPs = new HashSet<String>(); TextFile tf = new TextFile(settings.getSNPSelection(), TextFile.R); ArrayList<String> snps = tf.readAsArrayList(); tf.close(); if (snpPreSelection == null) { confineToTheseSNPs.addAll(snps); } else { System.out.println("Intersecting with SNP/Probe selection."); for (String snp : snps) { if (snpPreSelection.contains(snp)) { confineToTheseSNPs.add(snp); } } } System.out.println(confineToTheseSNPs.size() + " SNPs loaded."); } else if (snpPreSelection != null) { confineToTheseSNPs = snpPreSelection; } // create a list of all available SNPs HashSet<String> allSNPs = new HashSet<String>(); for (BinaryMetaAnalysisDataset dataset : datasets) { String[] snps = dataset.getSNPs(); for (String snp : snps) { if (confineToTheseSNPs == null || confineToTheseSNPs.contains(snp)) { allSNPs.add(snp); } } System.out.println(snps.length + " in dataset " + dataset.getName() + "\t" + allSNPs.size() + " unique SNPs found"); } if (allSNPs.isEmpty()) { System.err.println("Error: no SNPs found that match your request"); System.exit(-1); } // create a temporary map that maps each SNP to a meta-analysis position int ctr = 0; TObjectIntHashMap<String> snpMap = new TObjectIntHashMap<String>(allSNPs.size(), 0.85f, -9); snpList = new String[allSNPs.size()]; for (String s : allSNPs) { snpMap.put(s, ctr); snpList[ctr] = s; ctr++; } // TODO: for faster disk access, we would need to sort the SNPs by dataset ID... // fill index snpIndex = new int[allSNPs.size()][datasets.length]; for (int d = 0; d < datasets.length; d++) { for (int s = 0; s < allSNPs.size(); s++) { snpIndex[s][d] = -9; } } for (int d = 0; d < datasets.length; d++) { String[] snps = datasets[d].getSNPs(); for (int s = 0; s < snps.length; s++) { String snp = snps[s]; int id = snpMap.get(snp); if (id != -9) { snpIndex[id][d] = s; } } } TextFile tf = new TextFile(outdir + "snpindex.txt", TextFile.W); String header = "metaID"; for (int d = 0; d < datasets.length; d++) { header += "\t" + datasets[d].getName() + "-sid"; } tf.writeln(header); for (int s = 0; s < snpList.length; s++) { String ln = snpList[s]; for (int d = 0; d < datasets.length; d++) { ln += "\t" + snpIndex[s][d]; } tf.writeln(ln); } tf.close(); }
From source file:org.apache.sentry.provider.db.service.persistent.SentryStore.java
/** * Drop given privilege from all roles Create the new privilege if asked * @param tPrivilege//from w w w. j a va 2 s. c o m * @param pm * @throws SentryNoSuchObjectException * @throws SentryInvalidInputException */ private void dropOrRenamePrivilegeForAllRoles(PersistenceManager pm, TSentryPrivilege tPrivilege, TSentryPrivilege newTPrivilege) throws SentryNoSuchObjectException, SentryInvalidInputException { HashSet<MSentryRole> roleSet = Sets.newHashSet(); List<MSentryPrivilege> mPrivileges = getMSentryPrivileges(tPrivilege, pm); if (mPrivileges != null && !mPrivileges.isEmpty()) { for (MSentryPrivilege mPrivilege : mPrivileges) { roleSet.addAll(ImmutableSet.copyOf((mPrivilege.getRoles()))); } } MSentryPrivilege parent = getMSentryPrivilege(tPrivilege, pm); for (MSentryRole role : roleSet) { // 1. get privilege and child privileges Set<MSentryPrivilege> privilegeGraph = Sets.newHashSet(); if (parent != null) { privilegeGraph.add(parent); populateChildren(pm, Sets.newHashSet(role.getRoleName()), parent, privilegeGraph); } else { populateChildren(pm, Sets.newHashSet(role.getRoleName()), convertToMSentryPrivilege(tPrivilege), privilegeGraph); } // 2. revoke privilege and child privileges alterSentryRoleRevokePrivilegeCore(pm, role.getRoleName(), tPrivilege); // 3. add new privilege and child privileges with new tableName if (newTPrivilege != null) { for (MSentryPrivilege m : privilegeGraph) { TSentryPrivilege t = convertToTSentryPrivilege(m); if (newTPrivilege.getPrivilegeScope().equals(PrivilegeScope.DATABASE.name())) { t.setDbName(newTPrivilege.getDbName()); } else if (newTPrivilege.getPrivilegeScope().equals(PrivilegeScope.TABLE.name())) { t.setTableName(newTPrivilege.getTableName()); } alterSentryRoleGrantPrivilegeCore(pm, role.getRoleName(), t); } } } }
From source file:com.nuvolect.securesuite.data.SqlCipher.java
/** <pre> * Execute database transactions provided by the syncDataObj while updating the manifest. * Transaction types, each is a JSONObject inside the syncDataObj: * JSONObject: wrapper/* w ww . jav a 2 s . c om*/ * JSONObject inserts, each key: contact_id, value is contact data * JSONObject updates, each key: contact_id, value is contact data * JSONObject deletes, each key: contact_id, value is not used * @param ctx * @param syncDataObj * @param manifest * @return * </pre> */ public static SyncIncManifest incSyncPutIncrement(Context ctx, JSONObject syncDataObj, SyncIncManifest manifest) { /** * Do not sync this transaction to the companion device, i.e, avoid infinitate loop */ boolean syncTransactionIsFalse = false; try { /** * Execute deletes first, in case the user is short on space */ if (manifest.contactDeletes.size() > 0) { /** * While we iterate on the set, remove items from the manifest as they are processed, * hence make a copy first. */ HashSet<Long> delete_copy = new HashSet<Long>(); delete_copy.addAll(manifest.contactDeletes); for (long contact_id : delete_copy) { if (MyContacts.contactExists(contact_id)) deleteContact(ctx, contact_id, syncTransactionIsFalse); // Iterate on the copy and remove deletes from the manifest boolean success = manifest.contactDeletes.remove(contact_id); if (!success) throw new RuntimeException( "incSyncPutIncrement, delete contact_id not found in manifest: " + contact_id); } } /** * Delete groups next. It is possible that the user either created and deleted a group * within the sync period, or deleted and created a group. * New groups are not synced until associated with a contact. * Execute the delete first. Under some conditions there may be an undeleted group but * this is better than deleting groups last and deleting a group created in the same cycle. */ if (manifest.groupDeletes.size() > 0) { /** * While we iterate on the set, remove items from the manifest as they are processed, * hence make a copy first. */ HashSet<Integer> delete_copy = new HashSet<Integer>(); delete_copy.addAll(manifest.groupDeletes); for (int group_id : delete_copy) { if (MyGroups.validGroupId(group_id)) MyGroups.deleteGroup(ctx, group_id, syncTransactionIsFalse); // Iterate on the copy and remove deletes from the manifest boolean success = manifest.groupDeletes.remove(group_id); if (!success) throw new RuntimeException( "incSyncPutIncrement, group_id not found in manifest: " + group_id); } } if (syncDataObj != null && manifest.contactInserts.size() > 0) { JSONObject inserts = syncDataObj.getJSONObject(CConst.INSERTS); HashSet<Long> inserts_copy = new HashSet<Long>(); inserts_copy.addAll(manifest.contactInserts); for (long contact_id : inserts_copy) { try { String str_contact_id = String.valueOf(contact_id); if (inserts.has(str_contact_id)) createOrUpdateContact(inserts.getJSONObject(str_contact_id)); else continue; } catch (UnsupportedEncodingException e) { e.printStackTrace(); } boolean success = manifest.contactInserts.remove(contact_id); if (!success) throw new RuntimeException( "incSyncPutIncrement, insert contact_id not found in manifest: " + contact_id); } } if (syncDataObj != null && manifest.contactUpdates.size() > 0) { JSONObject updates = syncDataObj.getJSONObject(CConst.UPDATES); HashSet<Long> updates_copy = new HashSet<Long>(); updates_copy.addAll(manifest.contactUpdates); for (long contact_id : updates_copy) { try { String str_contact_id = String.valueOf(contact_id); if (updates.has(str_contact_id)) createOrUpdateContact(updates.getJSONObject(str_contact_id)); else continue; } catch (UnsupportedEncodingException e) { e.printStackTrace(); } boolean success = manifest.contactUpdates.remove(contact_id); if (!success) throw new RuntimeException( "incSyncPutIncrement, insert contact_id not found in manifest: " + contact_id); } } if (syncDataObj != null && manifest.crypSync.size() > 0) { // LogUtil.log("incSyncPutIncrement, manifest: "+manifest.report()); // LogUtil.log("incSyncPutIncrement, syncDataObj: "+syncDataObj.toString()); JSONObject cryp_sync = syncDataObj.getJSONObject(CConst.CRYP_SYNC); HashSet<Integer> cryp_sync_copy = new HashSet<Integer>(); cryp_sync_copy.addAll(manifest.crypSync); for (int ord : cryp_sync_copy) { String key = SqlIncSync.INC_SYNC_TYPE.values()[ord].toString(); String value = cryp_sync.getString(key); putCryp(key, value); boolean success = manifest.crypSync.remove(ord); if (!success) throw new RuntimeException("incSyncPutIncrement, crypSync not found in manifest: " + ord); } } } catch (JSONException e) { e.printStackTrace(); } return manifest; }
From source file:com.android.leanlauncher.Workspace.java
public void disableShortcutsByPackageName(final ArrayList<String> packages, final UserHandleCompat user, final int reason) { final HashSet<String> packageNames = new HashSet<String>(); packageNames.addAll(packages); mapOverItems(MAP_RECURSE, new ItemOperator() { @Override/*from w w w . j a v a2 s . c o m*/ public boolean evaluate(ItemInfo info, View v, View parent) { if (info instanceof ShortcutInfo && v instanceof BubbleTextView) { ShortcutInfo shortcutInfo = (ShortcutInfo) info; ComponentName cn = shortcutInfo.getTargetComponent(); if (user.equals(shortcutInfo.user) && cn != null && packageNames.contains(cn.getPackageName())) { shortcutInfo.isDisabled |= reason; BubbleTextView shortcut = (BubbleTextView) v; shortcut.applyFromShortcutInfo(shortcutInfo, mIconCache, true, true); if (parent != null) { parent.invalidate(); } } } // process all the shortcuts return false; } }); }
From source file:com.android.leanlauncher.Workspace.java
void removeItemsByPackageName(final ArrayList<String> packages, final UserHandleCompat user) { final HashSet<String> packageNames = new HashSet<String>(); packageNames.addAll(packages); // Filter out all the ItemInfos that this is going to affect final HashSet<ItemInfo> infos = new HashSet<ItemInfo>(); final HashSet<ComponentName> cns = new HashSet<ComponentName>(); ViewGroup layout = mWorkspace.getShortcutsAndWidgets(); int childCount = layout.getChildCount(); for (int i = 0; i < childCount; ++i) { View view = layout.getChildAt(i); infos.add((ItemInfo) view.getTag()); }/* w ww .j a v a 2s . c o m*/ LauncherModel.ItemInfoFilter filter = new LauncherModel.ItemInfoFilter() { @Override public boolean filterItem(ItemInfo parent, ItemInfo info, ComponentName cn) { if (packageNames.contains(cn.getPackageName()) && info.user.equals(user)) { cns.add(cn); return true; } return false; } }; LauncherModel.filterItemInfos(infos, filter); // Remove the affected components removeItemsByComponentName(cns, user); }
From source file:com.squid.kraken.v4.core.analysis.engine.project.DynamicManager.java
public void loadDomainDynamicContent(Space space, DomainContent content) { Universe univ = space.getUniverse(); Domain domain = space.getDomain();/* w w w .j av a2 s .c o m*/ // check if the domain is in legacy mode (i.e. default is to hide dynamic) boolean isDomainLegacyMode = domain.getInternalVersion() == null; // domainInternalDefautDynamic flag: if legacy mode, hide dynamic object is the default boolean domainInternalDefautDynamic = isDomainLegacyMode ? true : false; // coverage Set: columns already available through defined dimensions HashSet<Column> coverage = new HashSet<Column>(); HashSet<ExpressionAST> metricCoverage = new HashSet<ExpressionAST>(); HashSet<Space> neighborhood = new HashSet<Space>(); HashSet<String> checkName = new HashSet<String>(); boolean isPeriodDefined = false; // String prefix = genDomainPrefixID(space.getUniverse().getProject(), domain); // // evaluate the concrete objects HashSet<String> ids = new HashSet<String>(); // T446: must define the scope incrementally and override the universe ArrayList<ExpressionObject<?>> scope = new ArrayList<ExpressionObject<?>>(); // // sort by level (0 first, ...) List<ExpressionObject<?>> concrete = new ArrayList<ExpressionObject<?>>(); concrete.addAll(content.getDimensions()); concrete.addAll(content.getMetrics()); Collections.sort(concrete, new LevelComparator<ExpressionObject<?>>()); // failed List : keep track of failed evaluation, will try again latter List<ExpressionObject<?>> failed = new ArrayList<ExpressionObject<?>>(); for (ExpressionObject<?> object : concrete) { if (object.getName() != null) { checkName.add(object.getName()); } if (object instanceof Dimension) { // handle Dimension Dimension dimension = (Dimension) object; try { if (dimension.getId() != null) { ids.add(dimension.getId().getDimensionId()); // add also the canonical ID if (dimension.getExpression() != null && dimension.getExpression().getValue() != null) { ids.add(digest(prefix + dimension.getExpression().getValue())); } // add also the Axis ID ids.add(space.A(dimension).getId()); } ExpressionAST expr = parseResilient(univ, domain, dimension, scope); scope.add(object); IDomain image = expr.getImageDomain(); dimension.setImageDomain(image); dimension.setValueType(computeValueType(image)); if (expr instanceof ColumnReference) { ColumnReference ref = (ColumnReference) expr; if (ref.getColumn() != null) { coverage.add(ref.getColumn()); } } else if (image.isInstanceOf(IDomain.OBJECT)) { // it's an sub-domain, we build the space to connect and // will dedup for dynamics Space path = space.S(expr); neighborhood.add(path); } if (dimension.getType() == Type.CONTINUOUS && image.isInstanceOf(IDomain.TEMPORAL)) { isPeriodDefined = true; } } catch (ScopeException e) { // invalid expression, just keep it if (logger.isDebugEnabled()) { logger.debug(("Invalid Dimension '" + domain.getName() + "'.'" + dimension.getName() + "' definition: " + e.getLocalizedMessage())); } failed.add(object); } } else if (object instanceof Metric) { // handle Metric Metric metric = (Metric) object; try { if (metric.getId() != null) { ids.add(metric.getId().getMetricId()); } if (metric.getExpression() != null) { ExpressionAST expr = parseResilient(univ, domain, metric, scope); scope.add(object); metricCoverage.add(expr); } } catch (ScopeException e) { // invalid expression, just keep it if (logger.isDebugEnabled()) { logger.debug(("Invalid Metric '" + domain.getName() + "'.'" + metric.getName() + "' definition: " + e.getLocalizedMessage())); } failed.add(object); } } } // // exclude keys HashSet<Column> keys = new HashSet<Column>(); // filter out the primary-key try { Index pk = space.getTable().getPrimaryKey(); if (pk != null) { for (Column col : pk.getColumns()) { keys.add(col); } } } catch (ScopeException e) { // ignore } // filter out the foreign-keys try { for (ForeignKey fk : space.getTable().getForeignKeys()) { for (KeyPair pair : fk.getKeys()) { keys.add(pair.getExported()); } } } catch (ScopeException | ExecutionException e1) { // ignore } // filter out the relations ? ExtractColumns extractor = new ExtractColumns(); List<Space> subspaces = Collections.emptyList(); try { subspaces = space.S(); } catch (ScopeException | ComputingException e1) { // ignore } for (Space next : subspaces) { Relation relation = next.getRelation(); try { ExpressionAST expr = univ.getParser().parse(relation); List<Column> cols = extractor.apply(expr); keys.addAll(cols); } catch (ScopeException e) { // ignore } } // // populate dynamic dimensions List<RawDImension> periodCandidates = new ArrayList<RawDImension>(); List<Column> columns = Collections.emptyList(); try { columns = space.getTable().getColumns(); } catch (ScopeException | ExecutionException e1) { // ignore } for (Column col : columns) { if (!keys.contains(col) && !coverage.contains(col) && includeColumnAsDimension(col)) { ColumnReference ref = new ColumnReference(col); String expr = ref.prettyPrint(); DimensionPK id = new DimensionPK(domain.getId(), digest(prefix + expr)); if (!ids.contains(id.getDimensionId())) { Type type = Type.INDEX; String name = checkName(normalizeObjectName(col.getName()), checkName); Dimension dim = new Dimension(id, name, type, new Expression(expr), domainInternalDefautDynamic); if (col.getDescription() != null) dim.setDescription(col.getDescription()); dim.setImageDomain(col.getTypeDomain()); dim.setValueType(computeValueType(col.getTypeDomain())); AccessRightsUtils.getInstance().setAccessRights(univ.getContext(), dim, domain); content.add(dim); checkName.add(name); if (col.getTypeDomain().isInstanceOf(IDomain.TEMPORAL) && !isPeriodDefined) { periodCandidates.add(new RawDImension(col, dim)); } } } } // relation and FK for (Space neighbor : subspaces) { if (neighbor.length() == 1 // build only direct paths (the facet // will populate the others && !neighborhood.contains(neighbor)) // dedup if already // concrete // associated with // the same path { Relation relation = neighbor.getRelation(); try { RelationReference ref = new RelationReference(space.getUniverse(), relation, space.getDomain(), neighbor.getDomain()); if (useRelation(relation, ref)) { checkName.add(ref.getReferenceName()); String expr = ref.prettyPrint() + ".$'SELF'";// add the // SELF // parameter DimensionPK id = new DimensionPK(domain.getId(), digest(prefix + expr)); if (!ids.contains(id.getDimensionId())) { String name = ref.getReferenceName(); if (isDomainLegacyMode) { name = checkName(">" + name, checkName); } else { // this is the new naming convention for // sub-domains name = checkName(name + " > ", checkName); } Dimension dim = new Dimension(id, name, Type.INDEX, new Expression(expr), domainInternalDefautDynamic); dim.setDescription("relation to " + neighbor.getDomain().getName()); dim.setValueType(ValueType.OBJECT); dim.setImageDomain(ref.getImageDomain()); AccessRightsUtils.getInstance().setAccessRights(univ.getContext(), dim, domain); content.add(dim); checkName.add(name); } } } catch (ScopeException e) { // ignore } } } // // populate dynamic metrics // // add count metric ExpressionAST count = ExpressionMaker.COUNT(); if (!coverage.contains(count)) { Expression expr = new Expression(count.prettyPrint()); MetricPK metricId = new MetricPK(domain.getId(), digest(prefix + expr.getValue())); if (!ids.contains(metricId.getMetricId())) {// check for natural // definition String name = "COUNT " + domain.getName(); name = checkName(name, checkName); Metric metric = new Metric(metricId, name, expr, domainInternalDefautDynamic); metric.setDescription(domain.getName() + " count"); AccessRightsUtils.getInstance().setAccessRights(univ.getContext(), metric, domain); content.add(metric); checkName.add(name); } } // for (Column col : columns) { if (col.getTypeDomain().isInstanceOf(IDomain.NUMERIC)) { if (!keys.contains(col)) { ExpressionAST total = ExpressionMaker.SUM(new ColumnDomainReference(space, col)); if (!coverage.contains(total)) { Expression expr = new Expression(total.prettyPrint()); MetricPK metricId = new MetricPK(domain.getId(), digest(prefix + expr.getValue())); if (!ids.contains(metricId.getMetricId())) {// check for // natural // definition String name = "SUM " + normalizeObjectName(col.getName()); name = checkName(name, checkName); Metric metric = new Metric(metricId, name, expr, domainInternalDefautDynamic); if (col.getDescription() != null) metric.setDescription(col.getDescription()); AccessRightsUtils.getInstance().setAccessRights(univ.getContext(), metric, domain); content.add(metric); checkName.add(name); } } } } } // // try to recover failed ones for (ExpressionObject<?> object : failed) { try { if (object.getExpression() != null) { if (object instanceof Dimension) { // handle Dimension Dimension dimension = (Dimension) object; ExpressionAST expr = parseResilient(univ, domain, dimension, scope); scope.add(object); IDomain image = expr.getImageDomain(); dimension.setImageDomain(image); dimension.setValueType(computeValueType(image)); } else if (object instanceof Metric) { // handle Metric Metric metric = (Metric) object; ExpressionAST expr = parseResilient(univ, domain, metric, scope); scope.add(object); IDomain image = expr.getImageDomain(); metric.setImageDomain(image); metric.setValueType(computeValueType(image)); } } } catch (ScopeException | CyclicDependencyException e) { // set as permanent error } } // // select a Period if needed boolean isFact = isFactDomain(univ.getContext(), domain.getId()); boolean needPeriod = !isPeriodDefined // if already defined, that's fine && isFact // it must be a fact table, if not there is a good // chance to pollute && content.getMetrics().size() > 1; // and we want at least a // metric different than // COUNT() // select the period if (needPeriod && !periodCandidates.isEmpty()) { DimensionPeriodSelector selector = new DimensionPeriodSelector(space.getUniverse()); RawDImension candidate = selector.selectPeriod(periodCandidates); if (candidate != null) { candidate.dim.setType(Type.CONTINUOUS); } } }
From source file:org.quantumbadger.redreader.reddit.api.RedditAPIIndividualSubredditListRequester.java
private void doSubredditListRequest(final RedditSubredditManager.SubredditListType type, final RequestResponseHandler<WritableHashSet, SubredditRequestFailure> handler, final String after) { URI uri;//from ww w .j av a 2 s . c o m switch (type) { case SUBSCRIBED: uri = Constants.Reddit.getUri(Constants.Reddit.PATH_SUBREDDITS_MINE_SUBSCRIBER); break; case MODERATED: uri = Constants.Reddit.getUri(Constants.Reddit.PATH_SUBREDDITS_MINE_MODERATOR); break; case MOST_POPULAR: uri = Constants.Reddit.getUri(Constants.Reddit.PATH_SUBREDDITS_POPULAR); break; default: throw new UnexpectedInternalStateException(type.name()); } if (after != null) { // TODO move this logic to General? final Uri.Builder builder = Uri.parse(uri.toString()).buildUpon(); builder.appendQueryParameter("after", after); uri = General.uriFromString(builder.toString()); } final CacheRequest aboutSubredditCacheRequest = new CacheRequest(uri, user, null, Constants.Priority.API_SUBREDDIT_INVIDIVUAL, 0, CacheRequest.DownloadType.FORCE, Constants.FileType.SUBREDDIT_LIST, true, true, false, context) { @Override protected void onCallbackException(Throwable t) { handler.onRequestFailed( new SubredditRequestFailure(RequestFailureType.PARSE, t, null, "Internal error", url)); } @Override protected void onDownloadNecessary() { } @Override protected void onDownloadStarted() { } @Override protected void onProgress(final boolean authorizationInProgress, long bytesRead, long totalBytes) { } @Override protected void onFailure(RequestFailureType type, Throwable t, StatusLine status, String readableMessage) { handler.onRequestFailed( new SubredditRequestFailure(type, t, status, readableMessage, url.toString())); } @Override protected void onSuccess(CacheManager.ReadableCacheFile cacheFile, long timestamp, UUID session, boolean fromCache, String mimetype) { } @Override public void onJsonParseStarted(JsonValue result, long timestamp, UUID session, boolean fromCache) { try { final HashSet<String> output = new HashSet<String>(); final ArrayList<RedditSubreddit> toWrite = new ArrayList<RedditSubreddit>(); final JsonBufferedObject redditListing = result.asObject().getObject("data"); final JsonBufferedArray subreddits = redditListing.getArray("children"); final JsonBuffered.Status joinStatus = subreddits.join(); if (joinStatus == JsonBuffered.Status.FAILED) { handler.onRequestFailed(new SubredditRequestFailure(RequestFailureType.PARSE, null, null, "Unknown parse error", url.toString())); return; } if (type == RedditSubredditManager.SubredditListType.SUBSCRIBED && subreddits.getCurrentItemCount() == 0 && after == null) { doSubredditListRequest(RedditSubredditManager.SubredditListType.DEFAULTS, handler, null); return; } for (final JsonValue v : subreddits) { final RedditThing thing = v.asObject(RedditThing.class); final RedditSubreddit subreddit = thing.asSubreddit(); subreddit.downloadTime = timestamp; toWrite.add(subreddit); output.add(subreddit.getCanonicalName()); } RedditSubredditManager.getInstance(context, user).offerRawSubredditData(toWrite, timestamp); final String receivedAfter = redditListing.getString("after"); if (receivedAfter != null && type != RedditSubredditManager.SubredditListType.MOST_POPULAR) { doSubredditListRequest(type, new RequestResponseHandler<WritableHashSet, SubredditRequestFailure>() { public void onRequestFailed(SubredditRequestFailure failureReason) { handler.onRequestFailed(failureReason); } public void onRequestSuccess(WritableHashSet result, long timeCached) { output.addAll(result.toHashset()); handler.onRequestSuccess( new WritableHashSet(output, timeCached, type.name()), timeCached); if (after == null) { Log.i("SubredditListRequester", "Got " + output.size() + " subreddits in multiple requests"); } } }, receivedAfter); } else { handler.onRequestSuccess(new WritableHashSet(output, timestamp, type.name()), timestamp); if (after == null) { Log.i("SubredditListRequester", "Got " + output.size() + " subreddits in 1 request"); } } } catch (Exception e) { handler.onRequestFailed(new SubredditRequestFailure(RequestFailureType.PARSE, e, null, "Parse error", url.toString())); } } }; CacheManager.getInstance(context).makeRequest(aboutSubredditCacheRequest); }