List of usage examples for java.util LinkedHashSet addAll
boolean addAll(Collection<? extends E> c);
From source file:nl.systemsgenetics.genenetworkbackend.hpo.TestDiseaseGenePerformance.java
/** * @param args the command line arguments * @throws java.lang.Exception/* www. j av a 2 s. c om*/ */ public static void main(String[] args) throws Exception { final File diseaseGeneHpoFile = new File( "C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\HPO\\135\\ALL_SOURCES_ALL_FREQUENCIES_diseases_to_genes_to_phenotypes.txt"); final File ncbiToEnsgMapFile = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\ensgNcbiId.txt"); final File hgncToEnsgMapFile = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\ensgHgnc.txt"); final File ensgSymbolMappingFile = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\ensgHgnc.txt"); final File predictionMatrixFile = new File( "C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\Data31995Genes05-12-2017\\PCA_01_02_2018\\predictions\\hpo_predictions_zscores.txt.gz"); final File predictionMatrixCorrelationFile = new File( "C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\Data31995Genes05-12-2017\\PCA_01_02_2018\\predictions\\hpo_predictions_pathwayCorrelation.txt"); final File significantTermsFile = new File( "C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\Data31995Genes05-12-2017\\PCA_01_02_2018\\predictions\\hpo_predictions_bonSigTerms.txt"); final double correctedPCutoff = 0.05; final File hpoOboFile = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\HPO\\135\\hp.obo"); final File hpoPredictionInfoFile = new File( "C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\Data31995Genes05-12-2017\\PCA_01_02_2018\\predictions\\hpo_predictions_auc_bonferroni.txt"); final File hposToExcludeFile = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\hpoToExclude.txt"); final File skewnessFile = new File( "C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\Data31995Genes05-12-2017\\PCA_01_02_2018\\predictions\\skewnessSummary.txt"); final boolean randomize = true; final File annotationMatrixFile = new File( "C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\Data31995Genes05-12-2017\\PCA_01_02_2018\\PathwayMatrix\\ALL_SOURCES_ALL_FREQUENCIES_phenotype_to_genes.txt_matrix.txt.gz"); final File backgroundForRandomize = new File( "C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\Data31995Genes05-12-2017\\PCA_01_02_2018\\PathwayMatrix\\Ensembl2Reactome_All_Levels.txt_genesInPathways.txt"); //final File backgroundForRandomize = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\expressedReactomeGenes.txt"); final boolean randomizeCustomBackground = true; Map<String, String> ensgSymbolMapping = loadEnsgToHgnc(ensgSymbolMappingFile); final File outputFile; final ArrayList<String> backgroundGenes; if (randomize) { if (randomizeCustomBackground) { System.err.println("First need to fix so ranking list contains all genes in background list"); return; // backgroundGenes = loadBackgroundGenes(backgroundForRandomize); // outputFile = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\hpoDiseaseBenchmarkRandomizedCustomBackground.txt"); } else { backgroundGenes = null; outputFile = new File( "C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\hpoDiseaseBenchmarkRandomizedExtraNorm.txt"); } } else { backgroundGenes = null; outputFile = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\hpoDiseaseBenchmarkExtraNorm.txt"); } final HashMap<String, ArrayList<String>> ncbiToEnsgMap = loadNcbiToEnsgMap(ncbiToEnsgMapFile); final HashMap<String, ArrayList<String>> hgncToEnsgMap = loadHgncToEnsgMap(hgncToEnsgMapFile); final HashSet<String> exludedHpo = loadHpoExclude(hposToExcludeFile); final SkewnessInfo skewnessInfo = new SkewnessInfo(skewnessFile); LinkedHashSet<String> significantTerms = loadSignificantTerms(significantTermsFile); DoubleMatrixDataset<String, String> predictionMatrix = DoubleMatrixDataset .loadDoubleData(predictionMatrixFile.getAbsolutePath()); DoubleMatrixDataset<String, String> predictionMatrixSignificant = predictionMatrix .viewColSelection(significantTerms); DoubleMatrixDataset<String, String> predictionMatrixSignificantCorrelationMatrix = DoubleMatrixDataset .loadDoubleData(predictionMatrixCorrelationFile.getAbsolutePath()); DiseaseGeneHpoData diseaseGeneHpoData = new DiseaseGeneHpoData(diseaseGeneHpoFile, ncbiToEnsgMap, hgncToEnsgMap, exludedHpo, new HashSet(predictionMatrix.getHashRows().keySet()), "OMIM"); //NOTE if one would use a differnt background this needs to be updated HashSet<String> diseaseGenes = new HashSet<>(diseaseGeneHpoData.getDiseaseGenes()); if (randomize) { diseaseGeneHpoData = diseaseGeneHpoData.getPermutation(1, backgroundGenes); } for (String gene : diseaseGenes) { if (!predictionMatrixSignificant.containsRow(gene)) { throw new Exception("Error: " + gene); } } int[] mapGeneIndexToDiseaseGeneIndex = new int[predictionMatrix.rows()]; ArrayList<String> predictedGenes = predictionMatrix.getRowObjects(); int g2 = 0; for (int g = 0; g < predictedGenes.size(); ++g) { mapGeneIndexToDiseaseGeneIndex[g] = diseaseGenes.contains(predictedGenes.get(g)) ? g2++ : -1; } DoubleMatrixDataset<String, String> annotationnMatrix = DoubleMatrixDataset .loadDoubleData(annotationMatrixFile.getAbsolutePath()); DoubleMatrixDataset<String, String> annotationMatrixSignificant = annotationnMatrix .viewColSelection(significantTerms); HashMap<String, MeanSd> hpoMeanSds = calculatePathayMeansOfAnnotatedGenes(predictionMatrixSignificant, annotationMatrixSignificant); Map<String, PredictionInfo> predictionInfo = HpoFinder.loadPredictionInfo(hpoPredictionInfoFile); Ontology hpoOntology = HpoFinder.loadHpoOntology(hpoOboFile); HpoFinder hpoFinder = new HpoFinder(hpoOntology, predictionInfo); final int totalGenes = predictionMatrixSignificant.rows(); final int totalDiseaseGenes = diseaseGenes.size(); final double[] geneScores = new double[totalGenes]; final double[] geneScoresDiseaseGenes = new double[totalDiseaseGenes]; final NaturalRanking naturalRanking = new NaturalRanking(NaNStrategy.FAILED, TiesStrategy.MAXIMUM); CSVWriter writer = new CSVWriter(new FileWriter(outputFile), '\t', '\0', '\0', "\n"); String[] outputLine = new String[16]; int c = 0; outputLine[c++] = "Disease"; outputLine[c++] = "Gene"; outputLine[c++] = "Hgnc"; outputLine[c++] = "Rank"; outputLine[c++] = "RankAmongDiseaseGenes"; outputLine[c++] = "Z-score"; outputLine[c++] = "HPO_skewness"; outputLine[c++] = "Other_mean_skewness"; outputLine[c++] = "Other_max_skewness"; outputLine[c++] = "HPO_phenotypic_match_score"; outputLine[c++] = "HPO_count"; outputLine[c++] = "HPO_sum_auc"; outputLine[c++] = "HPO_mean_auc"; outputLine[c++] = "HPO_median_auc"; outputLine[c++] = "HPO_terms"; outputLine[c++] = "HPO_terms_match_score"; writer.writeNext(outputLine); Random random = new Random(1); Mean meanCalculator = new Mean(); Median medianCalculator = new Median(); for (DiseaseGeneHpoData.DiseaseGene diseaseGene : diseaseGeneHpoData.getDiseaseGeneHpos()) { String gene = diseaseGene.getGene(); String disease = diseaseGene.getDisease(); if (!predictionMatrixSignificant.containsRow(gene)) { continue; } Set<String> geneHpos = diseaseGeneHpoData.getDiseaseEnsgHpos(diseaseGene); LinkedHashSet<String> geneHposPredictable = new LinkedHashSet<>(); for (String hpo : geneHpos) { geneHposPredictable .addAll(hpoFinder.getTermsToNames(hpoFinder.getPredictableTerms(hpo, correctedPCutoff))); } if (geneHposPredictable.isEmpty()) { continue; } // if(geneHposPredictable.size() > 1){ // String hpoSelected = geneHposPredictable.toArray(new String[geneHposPredictable.size()])[random.nextInt(geneHposPredictable.size())]; // geneHposPredictable = new LinkedHashSet<>(1); // geneHposPredictable.add(hpoSelected); // } DoubleMatrixDataset<String, String> predictionCaseTerms = predictionMatrixSignificant .viewColSelection(geneHposPredictable); DoubleMatrix2D predictionCaseTermsMatrix = predictionCaseTerms.getMatrix(); double denominator = Math.sqrt(geneHposPredictable.size()); for (int g = 0; g < totalGenes; ++g) { geneScores[g] = predictionCaseTermsMatrix.viewRow(g).zSum() / denominator; if (Double.isNaN(geneScores[g])) { geneScores[g] = 0; } g2 = mapGeneIndexToDiseaseGeneIndex[g]; if (g2 >= 0) { geneScoresDiseaseGenes[g2] = geneScores[g]; } } double[] geneRanks = naturalRanking.rank(geneScores); int diseaseGeneIndex = predictionMatrixSignificant.getRowIndex(gene); double[] geneRanksDiseaseGenes = naturalRanking.rank(geneScoresDiseaseGenes); int diseaseGeneIndexInDiseaseGenesOnly = mapGeneIndexToDiseaseGeneIndex[diseaseGeneIndex]; double zscore = geneScores[diseaseGeneIndex]; double rank = (totalGenes - geneRanks[diseaseGeneIndex]) + 1; double rankAmongDiseaseGenes = (totalDiseaseGenes - geneRanksDiseaseGenes[diseaseGeneIndexInDiseaseGenesOnly]) + 1; double hpoPhenotypicMatchScore = 0; StringBuilder individualMatchScore = new StringBuilder(); boolean notFirst = false; int usedHpos = 0; double[] aucs = new double[geneHposPredictable.size()]; double sumAucs = 0; int i = 0; for (String hpo : geneHposPredictable) { usedHpos++; MeanSd hpoMeanSd = hpoMeanSds.get(hpo); double hpoPredictionZ = predictionMatrixSignificant.getElement(gene, hpo); double hpoPredictionOutlierScore = ((hpoPredictionZ - hpoMeanSd.getMean()) / hpoMeanSd.getSd()); if (notFirst) { individualMatchScore.append(';'); } notFirst = true; individualMatchScore.append(hpoPredictionOutlierScore); hpoPhenotypicMatchScore += hpoPredictionOutlierScore; aucs[i++] = predictionInfo.get(hpo).getAuc(); sumAucs += predictionInfo.get(hpo).getAuc(); } double meanAuc = meanCalculator.evaluate(aucs); double medianAuc = medianCalculator.evaluate(aucs); if (usedHpos == 0) { hpoPhenotypicMatchScore = Double.NaN; } else { hpoPhenotypicMatchScore = hpoPhenotypicMatchScore / usedHpos; } String symbol = ensgSymbolMapping.get(gene); if (symbol == null) { symbol = ""; } c = 0; outputLine[c++] = disease; outputLine[c++] = gene; outputLine[c++] = symbol; outputLine[c++] = String.valueOf(rank); outputLine[c++] = String.valueOf(rankAmongDiseaseGenes); outputLine[c++] = String.valueOf(zscore); outputLine[c++] = String.valueOf(skewnessInfo.getHpoSkewness(gene)); outputLine[c++] = String.valueOf(skewnessInfo.getMeanSkewnessExHpo(gene)); outputLine[c++] = String.valueOf(skewnessInfo.getMaxSkewnessExHpo(gene)); outputLine[c++] = String.valueOf(hpoPhenotypicMatchScore); outputLine[c++] = String.valueOf(geneHposPredictable.size()); outputLine[c++] = String.valueOf(sumAucs); outputLine[c++] = String.valueOf(meanAuc); outputLine[c++] = String.valueOf(medianAuc); outputLine[c++] = String.join(";", geneHposPredictable); outputLine[c++] = individualMatchScore.toString(); writer.writeNext(outputLine); } writer.close(); }
From source file:Main.java
public static <ELEMENT> LinkedHashSet<ELEMENT> newLinkedHashSet(Collection<ELEMENT> elements) { final LinkedHashSet<ELEMENT> set = newLinkedHashSetSized(elements.size()); set.addAll(elements); return set;// ww w. ja va2 s. co m }
From source file:com.gs.obevo.db.impl.platforms.db2.Db2SqlExecutor.java
/** * Package-private for unit testing only. *///from w w w.j av a2 s .c om static String getCurrentPathSql(Connection conn, JdbcHelper jdbc, ImmutableSet<PhysicalSchema> physicalSchemas) { String path = jdbc.query(conn, "select current path from sysibm.sysdummy1", new ScalarHandler<String>()); MutableList<String> currentSchemaPathList = Lists.mutable.of(path.split(",")) .collect(new Function<String, String>() { @Override public String valueOf(String object) { if (object.startsWith("\"") && object.endsWith("\"")) { return object.substring(1, object.length() - 1); } else { return object; } } }); // Rules on constructing this "set path" command: // 1) The existing default path must come first (respecting the existing connection), followed by the // schemas in our environment. The default path must take precedence. // 2) We cannot have duplicate schemas listed in the "set path" call; i.e. in case the schemas in our // environment config are already in the default schema. // // Given these two requirements, we use a LinkedHashSet LinkedHashSet<String> currentSchemaPaths = new LinkedHashSet(currentSchemaPathList); currentSchemaPaths.addAll(physicalSchemas.collect(PhysicalSchema.TO_PHYSICAL_NAME).castToSet()); // This is needed to work w/ stored procedures // Ideally, we'd use "set current path current path, " + physicalSchemaList // However, we can't set this multiple times in a connection, as we can't have dupes in "current path" // Ideally, we could use BasicDataSource.initConnectionSqls, but this does not interoperate w/ the LDAP // datasource for JNDI-JDBC return "set path " + CollectionAdapter.adapt(currentSchemaPaths).makeString(","); }
From source file:com.ery.ertc.estorm.util.DNS.java
/** * Returns all the IPs associated with the provided interface, if any, in textual form. * /*w ww. j a v a 2 s . co m*/ * @param strInterface * The name of the network interface or subinterface to query (eg eth0 or eth0:0) or the string "default" * @param returnSubinterfaces * Whether to return IPs associated with subinterfaces of the given interface * @return A string vector of all the IPs associated with the provided interface * @throws UnknownHostException * If an UnknownHostException is encountered in querying the default interface or the given interface can not be found * */ public static String[] getIPs(String strInterface, boolean returnSubinterfaces) throws UnknownHostException { if ("default".equals(strInterface)) { return new String[] { InetAddress.getLocalHost().getHostAddress() }; } NetworkInterface netIf; try { netIf = NetworkInterface.getByName(strInterface); if (netIf == null) { netIf = getSubinterface(strInterface); if (netIf == null) { throw new UnknownHostException("Unknown interface " + strInterface); } } } catch (SocketException e) { LOG.warn("Unable to get IP for interface " + strInterface, e); return new String[] { InetAddress.getLocalHost().getHostAddress() }; } // NB: Using a LinkedHashSet to preserve the order for callers // that depend on a particular element being 1st in the array. // For example, getDefaultIP always returns the first element. LinkedHashSet<InetAddress> allAddrs = new LinkedHashSet<InetAddress>(); allAddrs.addAll(Collections.list(netIf.getInetAddresses())); if (!returnSubinterfaces) { allAddrs.removeAll(getSubinterfaceInetAddrs(netIf)); } String ips[] = new String[allAddrs.size()]; int i = 0; for (InetAddress addr : allAddrs) { ips[i++] = addr.getHostAddress(); } return ips; }
From source file:com.offbynull.portmapper.common.NetworkUtils.java
/** * Attempts to put together a list of gateway addresses using pre-set values and running OS-specific processes. * @return a list of possible addresses for gateway device * @throws InterruptedException if interrupted *//* ww w . j a v a 2 s. c om*/ public static Set<InetAddress> getPotentialGatewayAddresses() throws InterruptedException { // Ask OS for gateway address String netstatOutput = ""; try { netstatOutput = ProcessUtils.runProcessAndDumpOutput(5000L, "netstat", "-rn"); } catch (IOException ioe) { // NOPMD // do nothing if (Thread.currentThread().isInterrupted()) { throw new InterruptedException(); } } LinkedHashSet<String> strAddresses = new LinkedHashSet<>(RegexUtils.findAllIpv4Addresses(netstatOutput)); // Push in defaults strAddresses.addAll(PRESET_IPV4_GATEWAY_ADDRESSES); LinkedHashSet<InetAddress> addresses = new LinkedHashSet<>(); for (String strAddress : strAddresses) { try { InetAddress addr = InetAddress.getByName(strAddress); if (!addr.isAnyLocalAddress()) { addresses.add(addr); } } catch (UnknownHostException uhe) { // NOPMD // do nothing } } return addresses; }
From source file:com.buaa.cfs.net.DNS.java
/** * Returns all the IPs associated with the provided interface, if any, in textual form. * * @param strInterface The name of the network interface or sub-interface to query (eg eth0 or eth0:0) or the * string "default" * @param returnSubinterfaces Whether to return IPs associated with subinterfaces of the given interface * * @return A string vector of all the IPs associated with the provided interface. The local host IP is returned if * the interface name "default" is specified or there is an I/O error looking for the given interface. * * @throws UnknownHostException If the given interface is invalid */// www . j a v a2 s . c o m public static String[] getIPs(String strInterface, boolean returnSubinterfaces) throws UnknownHostException { if ("default".equals(strInterface)) { return new String[] { cachedHostAddress }; } NetworkInterface netIf; try { netIf = NetworkInterface.getByName(strInterface); if (netIf == null) { netIf = getSubinterface(strInterface); } } catch (SocketException e) { LOG.warn("I/O error finding interface " + strInterface + ": " + e.getMessage()); return new String[] { cachedHostAddress }; } if (netIf == null) { throw new UnknownHostException("No such interface " + strInterface); } // NB: Using a LinkedHashSet to preserve the order for callers // that depend on a particular element being 1st in the array. // For example, getDefaultIP always returns the first element. LinkedHashSet<InetAddress> allAddrs = new LinkedHashSet<InetAddress>(); allAddrs.addAll(Collections.list(netIf.getInetAddresses())); if (!returnSubinterfaces) { allAddrs.removeAll(getSubinterfaceInetAddrs(netIf)); } String ips[] = new String[allAddrs.size()]; int i = 0; for (InetAddress addr : allAddrs) { ips[i++] = addr.getHostAddress(); } return ips; }
From source file:com.github.dozermapper.core.util.MappingUtils.java
public static List<Class<?>> getSuperClassesAndInterfaces(Class<?> srcClass, BeanContainer beanContainer) { List<Class<?>> superClasses = new ArrayList<>(); Class<?> realClass = getRealClass(srcClass, beanContainer); // Add all super classes first Class<?> superClass = getRealClass(realClass, beanContainer).getSuperclass(); while (!isBaseClass(superClass)) { superClasses.add(superClass);// w ww .j a v a 2 s .c om superClass = superClass.getSuperclass(); } // Now add all interfaces of the passed in class and all it's super classes // Linked hash set so duplicated are not added but insertion order is kept LinkedHashSet<Class<?>> interfaces = new LinkedHashSet<>(); interfaces.addAll(getInterfaceHierarchy(realClass, beanContainer)); for (Class<?> clazz : superClasses) { interfaces.addAll(getInterfaceHierarchy(clazz, beanContainer)); } superClasses.addAll(interfaces); return superClasses; }
From source file:org.dozer.util.MappingUtils.java
public static List<Class<?>> getSuperClassesAndInterfaces(Class<?> srcClass) { List<Class<?>> superClasses = new ArrayList<Class<?>>(); Class<?> realClass = getRealClass(srcClass); // Add all super classes first Class<?> superClass = getRealClass(realClass).getSuperclass(); while (!isBaseClass(superClass)) { superClasses.add(superClass);// www . jav a 2 s. co m superClass = superClass.getSuperclass(); } // Now add all interfaces of the passed in class and all it's super classes // Linked hash set so duplicated are not added but insertion order is kept LinkedHashSet<Class<?>> interfaces = new LinkedHashSet<Class<?>>(); interfaces.addAll(getInterfaceHierarchy(realClass)); for (Class<?> clazz : superClasses) { interfaces.addAll(getInterfaceHierarchy(clazz)); } superClasses.addAll(interfaces); return superClasses; }
From source file:org.apache.hadoop.net.DNS.java
/** * Returns all the IPs associated with the provided interface, if any, as * a list of InetAddress objects./* ww w. j a v a 2 s . c om*/ * * @param strInterface * The name of the network interface or sub-interface to query * (eg eth0 or eth0:0) or the string "default" * @param returnSubinterfaces * Whether to return IPs associated with subinterfaces of * the given interface * @return A list of all the IPs associated with the provided * interface. The local host IP is returned if the interface * name "default" is specified or there is an I/O error looking * for the given interface. * @throws UnknownHostException * If the given interface is invalid * */ public static List<InetAddress> getIPsAsInetAddressList(String strInterface, boolean returnSubinterfaces) throws UnknownHostException { if ("default".equals(strInterface)) { return Arrays.asList(InetAddress.getByName(cachedHostAddress)); } NetworkInterface netIf; try { netIf = NetworkInterface.getByName(strInterface); if (netIf == null) { netIf = getSubinterface(strInterface); } } catch (SocketException e) { LOG.warn("I/O error finding interface " + strInterface + ": " + e.getMessage()); return Arrays.asList(InetAddress.getByName(cachedHostAddress)); } if (netIf == null) { throw new UnknownHostException("No such interface " + strInterface); } // NB: Using a LinkedHashSet to preserve the order for callers // that depend on a particular element being 1st in the array. // For example, getDefaultIP always returns the first element. LinkedHashSet<InetAddress> allAddrs = new LinkedHashSet<InetAddress>(); allAddrs.addAll(Collections.list(netIf.getInetAddresses())); if (!returnSubinterfaces) { allAddrs.removeAll(getSubinterfaceInetAddrs(netIf)); } return new Vector<InetAddress>(allAddrs); }
From source file:password.pwm.util.java.JavaHelper.java
public static Collection<Method> getAllMethodsForClass(final Class clazz) { final LinkedHashSet<Method> methods = new LinkedHashSet<>(); // add local methods; methods.addAll(Arrays.asList(clazz.getDeclaredMethods())); final Class superClass = clazz.getSuperclass(); if (superClass != null) { methods.addAll(getAllMethodsForClass(superClass)); }//ww w . ja va 2 s . co m return Collections.unmodifiableSet(methods); }