List of usage examples for com.google.common.collect Multimap putAll
boolean putAll(Multimap<? extends K, ? extends V> multimap);
From source file:com.flexive.core.search.genericSQL.GenericSQLDataFilter.java
/** * Return the content tables used by the given (sub-)condition. The key contains the (SQL) table name, * the value information about the modes in which the table is accessed. Depending on the number of * different accesses (e.g. multilang/no multilang), a query may or may not be optimized. * * @param br the condition//from w w w . j a v a 2s. c om * @param recurse whether to process only the given brace, or also all subconditions * @return the table infos * @throws FxSqlSearchException on errors */ private Multimap<String, ConditionTableInfo> getUsedContentTables(Brace br, boolean recurse) throws FxSqlSearchException { final Multimap<String, ConditionTableInfo> tables = HashMultimap.create(); for (BraceElement be : br.getElements()) { if (be instanceof Condition) { final Pair<String, ConditionTableInfo> cti = getPropertyInfo((Condition) be); tables.put(cti.getFirst(), cti.getSecond()); } else if (be instanceof Brace && recurse) { tables.putAll(getUsedContentTables((Brace) be, true)); } } return tables; }
From source file:com.cloudant.sync.datastore.BasicDatastore.java
@Override public Map<String, Collection<String>> revsDiff(Multimap<String, String> revisions) { Preconditions.checkState(this.isOpen(), "Database is closed"); Preconditions.checkNotNull(revisions, "Input revisions must not be null"); Multimap<String, String> missingRevs = ArrayListMultimap.create(); // Break the potentially big multimap into small ones so for each map, // a single query can be use to check if the <id, revision> pairs in sqlDb or not List<Multimap<String, String>> batches = this.multiMapPartitions(revisions, SQLITE_QUERY_PLACEHOLDERS_LIMIT); for (Multimap<String, String> batch : batches) { this.revsDiffBatch(batch); missingRevs.putAll(batch); }/*from w w w . ja v a2 s .co m*/ return missingRevs.asMap(); }
From source file:elaborate.editor.publish.PublishTask.java
private String createRegularDraft(final Project project, final ProjectService ps) { prepareDirectories();/*from w ww . ja v a 2s. c om*/ status.addLogline("setting up new solr index"); prepareSolr(); // these 2 use transaction explicitly final List<String> projectEntryMetadataFields = getProjectEntryMetadataFields(ps); annotationDataMap = filterOnPublishableAnnotationTypes(ps.getAnnotationDataForProject(projectId), settings.getAnnotationTypeIds()); // the rest don't (see TODO) ps.setEntityManager(entityManager); final Map<String, String> typographicalAnnotationMap = getTypographicalAnnotationMap(project); final Collection<String> multivaluedFacetNames = getFacetsToSplit(project); final List<ProjectEntry> projectEntriesInOrder = ps.getProjectEntriesInOrder(projectId); int entryNum = 1; final List<EntryData> entryData = Lists.newArrayList(); final Map<Long, List<String>> thumbnails = Maps.newHashMap(); final Multimap<String, AnnotationIndexData> annotationIndex = ArrayListMultimap.create(); final String value = project.getMetadataMap().get(ProjectMetadataFields.MULTIVALUED_METADATA_FIELDS); final String[] multivaluedMetadataFields = value != null ? value.split(";") : new String[] {}; for (final ProjectEntry projectEntry : projectEntriesInOrder) { if (projectEntry.isPublishable()) { status.addLogline(MessageFormat.format("exporting entry {0,number,#}: \"{1}\"", entryNum, projectEntry.getName())); final ExportedEntryData eed = exportEntryData(projectEntry, entryNum++, projectEntryMetadataFields, typographicalAnnotationMap); final long id = projectEntry.getId(); final Multimap<String, String> multivaluedFacetValues = getMultivaluedFacetValues( multivaluedMetadataFields, projectEntry); final String datafile = id + ".json"; entryData.add(new EntryData(id, projectEntry.getName(), projectEntry.getShortName(), datafile, multivaluedFacetValues)); thumbnails.put(id, eed.thumbnailUrls); annotationIndex.putAll(eed.annotationDataMap); indexEntry(projectEntry, multivaluedFacetNames); } } commitAndCloseSolr(); exportPojectData(entryData, thumbnails, annotationIndex); final String basename = getBasename(project); final String url = getBaseURL(project.getName()); final List<String> facetableProjectEntryMetadataFields = getFacetableProjectEntryMetadataFields(ps); exportSearchConfig(project, facetableProjectEntryMetadataFields, multivaluedFacetNames, url); exportBuildDate(); exportLoggingProperties(basename); // FIXME: fix, error bij de ystroom if (entityManager.isOpen()) { entityManager.close(); } status.addLogline("generating war file " + basename + ".war"); final File war = new WarMaker(basename, distDir, rootDir).make(); status.addLogline("deploying war to " + url); deploy(war); status.addLogline("cleaning up temporary directories"); clearDirectories(); return url; }
From source file:com.cloudant.sync.datastore.DatastoreImpl.java
/** * Returns the subset of given the document id/revisions that are not stored in the database. * * The input revisions is a map, whose key is document id, and value is a list of revisions. * An example input could be (in json format): * * { "03ee06461a12f3c288bb865b22000170": * [//from w w w . j a v a 2 s . co m * "1-b2e54331db828310f3c772d6e042ac9c", * "2-3a24009a9525bde9e4bfa8a99046b00d" * ], * "82e04f650661c9bdb88c57e044000a4b": * [ * "3-bb39f8c740c6ffb8614c7031b46ac162" * ] * } * * The output is in same format. * * @see <a href="http://wiki.apache.org/couchdb/HttpPostRevsDiff">HttpPostRevsDiff documentation</a> * @param revisions a Multimap of document id revision id * @return a Map of document id collection of revision id: the subset of given the document * id/revisions that are not stored in the database */ public Map<String, Collection<String>> revsDiff(final Multimap<String, String> revisions) { Preconditions.checkState(this.isOpen(), "Database is closed"); Preconditions.checkNotNull(revisions, "Input revisions must not be null"); try { return queue.submit(new SQLQueueCallable<Map<String, Collection<String>>>() { @Override public Map<String, Collection<String>> call(SQLDatabase db) throws Exception { Multimap<String, String> missingRevs = ArrayListMultimap.create(); // Break the potentially big multimap into small ones so for each map, // a single query can be use to check if the <id, revision> pairs in sqlDb or not List<Multimap<String, String>> batches = multiMapPartitions(revisions, SQLITE_QUERY_PLACEHOLDERS_LIMIT); for (Multimap<String, String> batch : batches) { revsDiffBatch(db, batch); missingRevs.putAll(batch); } return missingRevs.asMap(); } }).get(); } catch (InterruptedException e) { logger.log(Level.SEVERE, "Failed to do revsdiff", e); } catch (ExecutionException e) { logger.log(Level.SEVERE, "Failed to do revsdiff", e); } return null; }
From source file:com.kolich.curacao.mappers.MapperTable.java
private final ImmutableMultimap<Class<?>, ControllerArgumentMapper<?>> buildArgumentMapperTable( final Set<Class<?>> mapperSet) { // Using a LinkedHashMultimap internally because insertion order is // very important in this case. final Multimap<Class<?>, ControllerArgumentMapper<?>> mappers = LinkedHashMultimap.create(); // Preserves order // Filter the incoming mapper set to only argument mappers. final Set<Class<?>> filtered = Sets.filter(mapperSet, Predicates.assignableFrom(ControllerArgumentMapper.class)); logger__.debug("Found {} argument mappers annotated with @{}", filtered.size(), MAPPER_ANNOTATION_SN); // For each discovered mapper class... for (final Class<?> mapper : filtered) { logger__.debug("Found @{}: argument mapper {}", MAPPER_ANNOTATION_SN, mapper.getCanonicalName()); try {/*w ww.java2 s. co m*/ // Locate a single constructor worthy of injecting with // components, if any. May be null. final Constructor<?> ctor = getInjectableConstructor(mapper); ControllerArgumentMapper<?> instance = null; if (ctor == null) { // Class.newInstance() is evil, so we do the ~right~ thing // here to instantiate a new instance of the mapper using // the preferred getConstructor() idiom. instance = (ControllerArgumentMapper<?>) mapper.getConstructor().newInstance(); } else { final Class<?>[] types = ctor.getParameterTypes(); final Object[] params = new Object[types.length]; for (int i = 0, l = types.length; i < l; i++) { params[i] = componentTable_.getComponentForType(types[i]); } instance = (ControllerArgumentMapper<?>) ctor.newInstance(params); } // Note the key in the map is the parameterized generic type // hanging off the mapper. mappers.put(getGenericType(mapper), instance); } catch (Exception e) { logger__.error("Failed to instantiate mapper instance: {}", mapper.getCanonicalName(), e); } } // Add the "default" mappers to the ~end~ of the immutable hash multi map. // This essentially means that default argument mappers (the ones // provided by this library) are found & called after any user registered // mappers. mappers.putAll(defaultArgMappers__); return ImmutableMultimap.copyOf(mappers); }
From source file:org.wso2.appcloud.core.DomainMappingManager.java
/** * Resolve CNAME and A records for the given {@code hostname}. * * @param domain hostname to be resolved. * @param environmentConfigs environment configuration * @return {@link com.google.common.collect.Multimap} of resolved dns entries. This {@link com.google.common.collect.Multimap} will contain the resolved * "CNAME" and "A" records from the given {@code hostname} * @throws AppCloudException if error occurred while the operation *//* ww w .j av a2s . com*/ public Multimap<String, String> resolveDNS(String domain, Hashtable<String, String> environmentConfigs) throws AppCloudException, NamingException { // result mutimap of dns records. Contains the cname and records resolved by the given hostname // ex: CNAME => foo.com,bar.com // A => 192.1.2.3 , 192.3.4.5 Multimap<String, String> dnsRecordsResult = ArrayListMultimap.create(); Attributes dnsRecords; boolean isARecordFound = false; boolean isCNAMEFound = false; try { if (log.isDebugEnabled()) { log.debug("DNS validation: resolving DNS for " + domain + " " + "(A/CNAME)"); } DirContext context = new InitialDirContext(environmentConfigs); String[] dnsRecordsToCheck = new String[] { DNS_A_RECORD, DNS_CNAME_RECORD }; dnsRecords = context.getAttributes(domain, dnsRecordsToCheck); } catch (NamingException e) { String msg = "DNS validation: DNS query failed for: " + domain + ". Error occurred while configuring " + "directory context."; log.error(msg, e); throw new AppCloudException(msg, e); } try { // looking for for A records Attribute aRecords = dnsRecords.get(DNS_A_RECORD); if (aRecords != null && aRecords.size() > 0) { // if an A record exists NamingEnumeration aRecordHosts = aRecords.getAll(); // get all resolved A entries String aHost; while (aRecordHosts.hasMore()) { isARecordFound = true; aHost = (String) aRecordHosts.next(); dnsRecordsResult.put(DNS_A_RECORD, aHost); if (log.isDebugEnabled()) { log.debug("DNS validation: A record found: " + aHost); } } } // looking for CNAME records Attribute cnameRecords = dnsRecords.get(DNS_CNAME_RECORD); if (cnameRecords != null && cnameRecords.size() > 0) { // if CNAME record exists NamingEnumeration cnameRecordHosts = cnameRecords.getAll(); // get all resolved CNAME entries for hostname String cnameHost; while (cnameRecordHosts.hasMore()) { isCNAMEFound = true; cnameHost = (String) cnameRecordHosts.next(); if (cnameHost.endsWith(".")) { // Since DNS records are end with "." we are removing it. // For example real dns entry for www.google.com is www.google.com. cnameHost = cnameHost.substring(0, cnameHost.lastIndexOf('.')); } dnsRecordsResult.put(DNS_CNAME_RECORD, cnameHost); if (log.isDebugEnabled()) { log.debug("DNS validation: recurring on CNAME record towards host " + cnameHost); } dnsRecordsResult.putAll(resolveDNS(cnameHost, environmentConfigs)); // recursively resolve cnameHost } } if (!isARecordFound && !isCNAMEFound && log.isDebugEnabled()) { log.debug("DNS validation: No CNAME or A record found for domain: '" + domain); } return dnsRecordsResult; } catch (NamingException ne) { String msg = "DNS validation: DNS query failed for: " + domain + ". Provided domain: " + domain + " might be a " + "non existing domain."; // we are logging this as warn messages since this is caused, due to an user error. For example if the // user entered a rubbish custom url(Or a url which is, CNAME record is not propagated at the // time of adding the url), then url validation will fail but it is not an system error log.warn(msg, ne); throw new NamingException(msg); } }
From source file:org.wso2.carbon.appfactory.s4.integration.DomainMappingManagementService.java
/** * Resolve CNAME and A records for the given {@code hostname}. * * @param domain hostname to be resolved. * @param environmentConfigs environment configuration * @return {@link com.google.common.collect.Multimap} of resolved dns entries. This {@link com.google.common.collect.Multimap} will contain the resolved * "CNAME" and "A" records from the given {@code hostname} * @throws AppFactoryException if error occurred while the operation */// w w w . j a va 2 s . c o m public Multimap<String, String> resolveDNS(String domain, Hashtable<String, String> environmentConfigs) throws AppFactoryException, DomainMappingVerificationException { // result mutimap of dns records. Contains the cname and records resolved by the given hostname // ex: CNAME => foo.com,bar.com // A => 192.1.2.3 , 192.3.4.5 Multimap<String, String> dnsRecordsResult = ArrayListMultimap.create(); Attributes dnsRecords; boolean isARecordFound = false; boolean isCNAMEFound = false; try { if (log.isDebugEnabled()) { log.debug("DNS validation: resolving DNS for " + domain + " " + "(A/CNAME)"); } DirContext context = new InitialDirContext(environmentConfigs); String[] dnsRecordsToCheck = new String[] { DNS_A_RECORD, DNS_CNAME_RECORD }; dnsRecords = context.getAttributes(domain, dnsRecordsToCheck); } catch (NamingException e) { String msg = "DNS validation: DNS query failed for: " + domain + ". Error occurred while configuring " + "directory context."; log.error(msg, e); throw new AppFactoryException(msg, e); } try { // looking for for A records Attribute aRecords = dnsRecords.get(DNS_A_RECORD); if (aRecords != null && aRecords.size() > 0) { // if an A record exists NamingEnumeration aRecordHosts = aRecords.getAll(); // get all resolved A entries String aHost; while (aRecordHosts.hasMore()) { isARecordFound = true; aHost = (String) aRecordHosts.next(); dnsRecordsResult.put(DNS_A_RECORD, aHost); if (log.isDebugEnabled()) { log.debug("DNS validation: A record found: " + aHost); } } } // looking for CNAME records Attribute cnameRecords = dnsRecords.get(DNS_CNAME_RECORD); if (cnameRecords != null && cnameRecords.size() > 0) { // if CNAME record exists NamingEnumeration cnameRecordHosts = cnameRecords.getAll(); // get all resolved CNAME entries for hostname String cnameHost; while (cnameRecordHosts.hasMore()) { isCNAMEFound = true; cnameHost = (String) cnameRecordHosts.next(); if (cnameHost.endsWith(".")) { // Since DNS records are end with "." we are removing it. // For example real dns entry for www.google.com is www.google.com. cnameHost = cnameHost.substring(0, cnameHost.lastIndexOf('.')); } dnsRecordsResult.put(DNS_CNAME_RECORD, cnameHost); if (log.isDebugEnabled()) { log.debug("DNS validation: recurring on CNAME record towards host " + cnameHost); } dnsRecordsResult.putAll(resolveDNS(cnameHost, environmentConfigs)); // recursively resolve cnameHost } } if (!isARecordFound && !isCNAMEFound && log.isDebugEnabled()) { log.debug("DNS validation: No CNAME or A record found for domain: '" + domain); } return dnsRecordsResult; } catch (NamingException ne) { String msg = "DNS validation: DNS query failed for: " + domain + ". Provided domain: " + domain + " might be a " + "non existing domain."; // we are logging this as warn messages since this is caused, due to an user error. For example if the // user entered a rubbish custom url(Or a url which is, CNAME record is not propagated at the // time of adding the url), then url validation will fail but it is not an system error log.warn(msg, ne); throw new DomainMappingVerificationException(msg, ne); } }
From source file:ch.ethz.bsse.quasirecomb.model.Preprocessing.java
/** * Entry point. Forwards invokes of the specified workflow. * * @param input path to the fasta file// w ww . j a v a2 s .co m * @param Kmin minimal amount of generators * @param Kmax minimal amount of generators */ public static void workflow(String input, int Kmin, int Kmax) { Utils.mkdir(Globals.getINSTANCE().getSAVEPATH() + "support"); //parse file StatusUpdate.getINSTANCE().print("Parsing"); Read[] reads = Utils.parseInput(input); int L = fixAlignment(reads); if (L > 300 && Globals.getINSTANCE().getINTERPOLATE_MU() > 0) { Globals.getINSTANCE().setINTERPOLATE_MU(.5); } int[][] alignment = computeAlignment(reads, L); StatusUpdate.getINSTANCE().println("Paired reads\t" + Globals.getINSTANCE().getPAIRED_COUNT()); computeInsertDist(reads); StatusUpdate.getINSTANCE().println("Merged reads\t" + Globals.getINSTANCE().getMERGED() + "\n"); printAlignment(reads); circos(L, alignment); if (Globals.getINSTANCE().isDEBUG()) { new File(Globals.getINSTANCE().getSAVEPATH() + "support/log/").mkdirs(); } int n = countChars(reads); Globals.getINSTANCE().setTAU_OMEGA(reads, L); int N = 0; for (Read r : reads) { if (r.getWatsonLength() > 0) { N += r.getCount(); } } plot(); if (Globals.getINSTANCE().isBOOTSTRAP()) { String[] truth = FastaParser.parseFarFile(Globals.getINSTANCE().getPRIOR()); Multimap<Integer, Double> bics = ArrayListMultimap.create(); List<Read> readList = Lists.newArrayListWithExpectedSize(N); int x = 0; for (Read r : reads) { int count = r.getCount(); for (int i = 0; i < count; i++) { readList.add(new Read(r)); } // r.setCount(1); } // for (Read r : reads) { // Utils.appendFile(Globals.getINSTANCE().getSAVEPATH() + "pi_bootstrap_original.txt", + r.getCount() + "\n"); // } Read[] readArray = readList.toArray(new Read[readList.size()]); for (int i = 0; i < 100; i++) { StatusUpdate.getINSTANCE().println("Bootstrap " + i + "\n"); Map<Integer, Read> hashed = new HashMap<>(); Random rnd = new Random(); for (int y = 0; y < N; y++) { Read r = readArray[rnd.nextInt(N)]; int hash = r.hashCode(); if (hashed.containsKey(hash)) { hashed.get(hash).incCount(); } else { hashed.put(r.hashCode(), r); } } // for (Read r : reads) { // Utils.appendFile(Globals.getINSTANCE().getSAVEPATH() + "pi_bootstrap_" + i + ".txt", (hashed.get(r.hashCode()) == null ? 0 : hashed.get(r.hashCode()).getCount()) + "\n"); // } Read[] rs = hashed.values().toArray(new Read[hashed.size()]); ModelSelection ms = new ModelSelection(rs, Kmin, Kmax, L, n); OptimalResult or = ms.getOptimalResult(); double[] pi = new double[or.getPi()[0].length]; double piSum = 0; for (int j = 0; j < or.getPi().length; j++) { for (int k = 0; k < or.getPi()[j].length; k++) { pi[k] += or.getPi()[j][k]; piSum += or.getPi()[j][k]; } } StringBuilder piSB = new StringBuilder(); piSB.append(pi[0] / piSum); for (int k = 1; k < pi.length; k++) { piSB.append("\t"); piSB.append(pi[k] / piSum); } new File(Globals.getINSTANCE().getSAVEPATH() + i).mkdirs(); ModelSampling modelSampling = new ModelSampling(or, Globals.getINSTANCE().getSAVEPATH() + i + File.separator); modelSampling.save(); Map<String, Double> quasispecies = FastaParser.parseQuasispeciesFile( Globals.getINSTANCE().getSAVEPATH() + i + File.separator + "quasispecies.fasta"); double[] frequencies = new double[truth.length]; for (Map.Entry<String, Double> e : quasispecies.entrySet()) { int distance = Integer.MAX_VALUE; int index = -1; for (int t = 0; t < truth.length; t++) { int hamming = DistanceUtils.calcHamming(e.getKey(), truth[t]); if (hamming < distance) { index = t; distance = hamming; } } frequencies[index] += e.getValue(); } StringBuilder fSB = new StringBuilder(); fSB.append(frequencies[0]); for (int f = 1; f < frequencies.length; f++) { fSB.append("\t"); fSB.append(frequencies[f]); } Utils.appendFile(Globals.getINSTANCE().getSAVEPATH() + "bootstrap.txt", fSB.toString() + "\n"); Utils.appendFile(Globals.getINSTANCE().getSAVEPATH() + "pi_bootstrap.txt", piSB.toString() + "\n"); bics.putAll(ms.getMsTemp().getMaxBICs()); } System.exit(0); StringBuilder sb = new StringBuilder(); int bootstraps = bics.asMap().values().iterator().next().size(); Set<Integer> keySet = bics.keySet(); for (int i : keySet) { sb.append(i).append("\t"); } sb.setLength(sb.length() - 1); sb.append("\n"); for (int l = 0; l < bootstraps; l++) { for (int i : keySet) { ArrayList arrayList = new ArrayList(bics.get(i)); if (l < arrayList.size()) { sb.append(arrayList.get(l)); } sb.append("\t"); } sb.setLength(sb.length() - 1); sb.append("\n"); } Utils.saveFile(Globals.getINSTANCE().getSAVEPATH() + "support" + File.separator + "bics.txt", sb.toString()); StatusUpdate.getINSTANCE().println(""); ModelSelectionBootstrapStorage msbt = new ModelSelectionBootstrapStorage(bics); Kmin = msbt.getBestK(); Kmax = Kmin; Globals.getINSTANCE().setBOOTSTRAP(false); } ModelSelection ms = null; // if (Globals.getINSTANCE().isSUBSAMPLE()) { // shuffleArray(reads); // List<Read> subsample = new LinkedList<>(); // Map<String, Integer> generators = new HashMap<>(); // Globals.getINSTANCE().setDESIRED_REPEATS(0); // for (int i = 0; i < reads.length; i++) { // if (subsample.size() < reads.length / 10 || i + reads.length / 10 > reads.length) { // subsample.add(reads[i]); // } else { // Read[] readsSubsample = subsample.toArray(new Read[subsample.size()]); // ModelSelection msSubsample = new ModelSelection(readsSubsample, Kmin, Kmax, L, n); // subsample.clear(); // saveSubSample(msSubsample.getOptimalResult().getMu(), generators, L); // } // } // for (Map.Entry<String, Integer> e : generators.entrySet()) { // System.out.println(e.getValue() + "\t" + e.getKey()); // } // } else { ms = new ModelSelection(reads, Kmin, Kmax, L, n); if (!Globals.getINSTANCE().isNOSAMPLE()) { ModelSampling modelSampling = new ModelSampling(ms.getOptimalResult(), Globals.getINSTANCE().getSAVEPATH()); modelSampling.save(); System.out .println("\nQuasispecies saved: " + Globals.getINSTANCE().getSAVEPATH() + "quasispecies.fasta"); } if (!Globals.getINSTANCE().isDEBUG()) { deleteDirectory( new File(Globals.getINSTANCE().getSAVEPATH() + "support" + File.separator + "snapshots")); } // } // errorCorrection(ms, reads); }
From source file:org.apache.calcite.rel.rules.AbstractMaterializedViewRule.java
/** * It checks whether the query can be rewritten using the view even though the * view uses additional tables. In order to do that, we need to double-check * that every join that exists in the view and is not in the query is a * cardinality-preserving join, i.e., it only appends columns to the row * without changing its multiplicity. Thus, the join needs to be: * <ul>//from w w w. j a v a 2 s. co m * <li> Equi-join </li> * <li> Between all columns in the keys </li> * <li> Foreign-key columns do not allow NULL values </li> * <li> Foreign-key </li> * <li> Unique-key </li> * </ul> * * <p>If it can be rewritten, it returns true and it inserts the missing equi-join * predicates in the input compensationEquiColumns multimap. Otherwise, it returns * false. */ private static boolean compensateQueryPartial( Multimap<RexTableInputRef, RexTableInputRef> compensationEquiColumns, Set<RelTableRef> viewTableRefs, EquivalenceClasses vEC, Set<RelTableRef> queryTableRefs) { // Create UK-FK graph with view tables final DirectedGraph<RelTableRef, Edge> graph = DefaultDirectedGraph.create(Edge.FACTORY); final Multimap<List<String>, RelTableRef> tableQNameToTableRefs = ArrayListMultimap.create(); final Set<RelTableRef> extraTableRefs = new HashSet<>(); for (RelTableRef tRef : viewTableRefs) { // Add tables in view as vertices graph.addVertex(tRef); tableQNameToTableRefs.put(tRef.getQualifiedName(), tRef); if (!queryTableRefs.contains(tRef)) { // Add to extra tables if table is not part of the query extraTableRefs.add(tRef); } } for (RelTableRef tRef : graph.vertexSet()) { // Add edges between tables List<RelReferentialConstraint> constraints = tRef.getTable().getReferentialConstraints(); for (RelReferentialConstraint constraint : constraints) { Collection<RelTableRef> parentTableRefs = tableQNameToTableRefs .get(constraint.getTargetQualifiedName()); if (parentTableRefs == null || parentTableRefs.isEmpty()) { continue; } for (RelTableRef parentTRef : parentTableRefs) { boolean canBeRewritten = true; Multimap<RexTableInputRef, RexTableInputRef> equiColumns = ArrayListMultimap.create(); for (int pos = 0; pos < constraint.getNumColumns(); pos++) { int foreignKeyPos = constraint.getColumnPairs().get(pos).source; RelDataType foreignKeyColumnType = tRef.getTable().getRowType().getFieldList() .get(foreignKeyPos).getType(); RexTableInputRef foreignKeyColumnRef = RexTableInputRef.of(tRef, foreignKeyPos, foreignKeyColumnType); int uniqueKeyPos = constraint.getColumnPairs().get(pos).target; RexTableInputRef uniqueKeyColumnRef = RexTableInputRef.of(parentTRef, uniqueKeyPos, parentTRef.getTable().getRowType().getFieldList().get(uniqueKeyPos).getType()); if (!foreignKeyColumnType.isNullable() && vEC.getEquivalenceClassesMap() .get(uniqueKeyColumnRef).contains(foreignKeyColumnRef)) { equiColumns.put(foreignKeyColumnRef, uniqueKeyColumnRef); } else { canBeRewritten = false; break; } } if (canBeRewritten) { // Add edge FK -> UK Edge edge = graph.getEdge(tRef, parentTRef); if (edge == null) { edge = graph.addEdge(tRef, parentTRef); } edge.equiColumns.putAll(equiColumns); break; } } } } // Try to eliminate tables from graph: if we can do it, it means extra tables in // view are cardinality-preserving joins boolean done = false; do { List<RelTableRef> nodesToRemove = new ArrayList<>(); for (RelTableRef tRef : graph.vertexSet()) { if (graph.getInwardEdges(tRef).size() == 1 && graph.getOutwardEdges(tRef).isEmpty()) { // UK-FK join nodesToRemove.add(tRef); if (extraTableRefs.contains(tRef)) { // We need to add to compensation columns as the table is not present in the query compensationEquiColumns.putAll(graph.getInwardEdges(tRef).get(0).equiColumns); } } } if (!nodesToRemove.isEmpty()) { graph.removeAllVertices(nodesToRemove); } else { done = true; } } while (!done); // After removing them, we check whether all the remaining tables in the graph // are tables present in the query: if they are, we can try to rewrite if (!Collections.disjoint(graph.vertexSet(), extraTableRefs)) { return false; } return true; }
From source file:de.hzi.helmholtz.Compare.PathwayComparisonWithModules.java
public Multimap<Double, String> SubsetsMatching(final PathwayWithModules firstPathway, final PathwayWithModules secondPathway, BiMap<Integer, Integer> newSourceGeneIdToPositionMap, BiMap<Integer, Integer> newTargetGeneIdToPositionMap, int Yes) { Multimap<Double, String> resultPerfect = TreeMultimap.create(Ordering.natural().reverse(), Ordering.natural());//w w w .j av a 2 s. com PathwayWithModules firstPathwayCopy = new PathwayWithModules(firstPathway);// Copy of the Query pathway PathwayWithModules secondPathwayCopy = new PathwayWithModules(secondPathway);// Copy of the Target pathway' // PathwayWithModules secondPathwayCopy1 = new PathwayWithModules(secondPathway); int currentQueryGene = 0; Iterator<ModuleGene> sourceGeneIt = firstPathway.moduleGeneIterator(); List<Integer> QueryToRemove = new ArrayList<Integer>(); List<Integer> TargetToRemove = new ArrayList<Integer>(); while (sourceGeneIt.hasNext()) { currentQueryGene++; ModuleGene queryGene = sourceGeneIt.next(); int currentTargetGene = 0; Multiset<String> qfunction = LinkedHashMultiset.create(); List<String> qfunctionList = new ArrayList<String>(); List<String> qactivity = new ArrayList<String>(); List<Set<String>> qsubstrate = new ArrayList<Set<String>>(); for (Module m : queryGene.getModule()) { for (Domain d : m.getDomains()) { qfunction.add(d.getDomainFunctionString()); qfunctionList.add(d.getDomainFunctionString()); qactivity.add(d.getStatus().toString()); qsubstrate.add(d.getSubstrates()); } } Iterator<ModuleGene> targetGeneIt = secondPathway.moduleGeneIterator(); while (targetGeneIt.hasNext()) { currentTargetGene++; ModuleGene targetGene = targetGeneIt.next(); Multiset<String> tfunction = LinkedHashMultiset.create(); List<String> tfunctionList = new ArrayList<String>(); List<String> tactivity = new ArrayList<String>(); List<Set<String>> tsubstrate = new ArrayList<Set<String>>(); for (Module m : targetGene.getModule()) { for (Domain d : m.getDomains()) { tfunctionList.add(d.getDomainFunctionString()); tfunction.add(d.getDomainFunctionString()); tactivity.add(d.getStatus().toString()); tsubstrate.add(d.getSubstrates()); } } Multiset<String> DomainsCovered = Multisets.intersection(qfunction, tfunction); if (DomainsCovered.size() == qfunction.size() && DomainsCovered.size() == tfunction.size()) { Multimap<Double, Multimap<String, Integer>> activityscores = myFunction.calculate(qactivity, tactivity); Multimap<String, Integer> Functionscores = ArrayListMultimap.create(); int TranspositionDomains = LevenshteinDistance.computeLevenshteinDistance(qfunctionList, tfunctionList); if (TranspositionDomains > 0) { TranspositionDomains = 1; } Functionscores.put(qfunction.size() + "-0", TranspositionDomains); Multimap<Double, Multimap<String, Integer>> substratescore = myFunction .calculate(getSubstrateList(qsubstrate), getSubstrateList(tsubstrate)); Object activityScore = activityscores.asMap().keySet().toArray()[0]; Object substrateScore = substratescore.asMap().keySet().toArray()[0]; double finalScore = Math .round((((2.9 * 1.0) + (0.05 * Double.parseDouble(activityScore.toString().trim())) + (0.05 * Double.parseDouble(substrateScore.toString().trim()))) / 3) * 100.0) / 100.0; String ConvertedGeneIDs = ""; if (Yes == 0) { ConvertedGeneIDs = reconstructWithGeneId(Integer.toString(currentQueryGene), newSourceGeneIdToPositionMap) + "->" + reconstructWithGeneId(Integer.toString(currentTargetGene), newTargetGeneIdToPositionMap); } else { ConvertedGeneIDs = reconstructWithGeneId(Integer.toString(currentTargetGene), newTargetGeneIdToPositionMap) + "->" + reconstructWithGeneId(Integer.toString(currentQueryGene), newSourceGeneIdToPositionMap); } resultPerfect.put(finalScore, ConvertedGeneIDs); ScoreFunctionMatchMisMatch.put(ConvertedGeneIDs, Functionscores); ScoreStatusMatchMisMatch.putAll(ConvertedGeneIDs, activityscores.values()); ScoreSubstrateMatchMisMatch.putAll(ConvertedGeneIDs, substratescore.values()); TargetToRemove.add(currentTargetGene); QueryToRemove.add(currentQueryGene); } } } for (int i : TargetToRemove) { secondPathwayCopy.removeGene(i); } for (int i : QueryToRemove) { firstPathwayCopy.removeGene(i); } if (firstPathwayCopy.size() > 0 && secondPathwayCopy.size() > 0) { // Re-construct the bimaps newSourceGeneIdToPositionMap = HashBiMap.create(); int temp = 0; for (ModuleGene e : firstPathwayCopy.getModulegenes()) { temp = temp + 1; newSourceGeneIdToPositionMap.put(e.getGeneId(), temp); } newTargetGeneIdToPositionMap = HashBiMap.create(); temp = 0; for (ModuleGene e : secondPathwayCopy.getModulegenes()) { temp = temp + 1; newTargetGeneIdToPositionMap.put(e.getGeneId(), temp); } resultPerfect.putAll(SubsetIdentification(firstPathwayCopy, secondPathwayCopy, newSourceGeneIdToPositionMap, newTargetGeneIdToPositionMap, Yes)); } System.out.println(resultPerfect); return resultPerfect; }