Example usage for java.lang Math log10

List of usage examples for java.lang Math log10

Introduction

In this page you can find the example usage for java.lang Math log10.

Prototype

@HotSpotIntrinsicCandidate
public static double log10(double a) 

Source Link

Document

Returns the base 10 logarithm of a double value.

Usage

From source file:edu.cornell.med.icb.goby.modes.WithinGroupVariabilityMode.java

private void processTranscriptAlignment(final String basename) throws IOException {
    final AlignmentReaderImpl reader = new AlignmentReaderImpl(basename);
    PrintWriter outputWriter = null;
    try {/*from   w  w  w.  j av a 2s .c  o m*/
        outputWriter = new PrintWriter(new FileWriter(outputFile));

        // outputWriter.write("# One line per reference id. Count indicates the number of times a query \n" +
        //         "# partially overlaps a target, given the various quality filters used to create the alignment.\n");
        outputWriter.write("sampleId\treferenceId\tcount\tlog10(count+1)\tcumulativeBasesAligned\n");

        reader.readHeader();

        final int numberOfReferences = reader.getNumberOfTargets();
        final int[] numberOfReadsPerReference = new int[numberOfReferences];
        final int[] cumulativeBasesPerReference = new int[numberOfReferences];

        System.out.printf("Scanning alignment %s%n", basename);
        for (final Alignments.AlignmentEntry alignmentEntry : reader) {
            ++numberOfReadsPerReference[alignmentEntry.getTargetIndex()];

            cumulativeBasesPerReference[alignmentEntry.getTargetIndex()] += Math
                    .min(alignmentEntry.getQueryAlignedLength(), alignmentEntry.getTargetAlignedLength());
        }
        final IndexedIdentifier targetIds = reader.getTargetIdentifiers();

        final DoubleIndexedIdentifier targetIdBackward = new DoubleIndexedIdentifier(targetIds);

        final String sampleId = FilenameUtils.getBaseName(basename);
        deCalculator.reserve(numberOfReferences, inputFiles.length);
        int numAlignedReadsInSample = 0;
        // define elements that will be tested for differential expression:
        for (int referenceIndex = 0; referenceIndex < numberOfReferences; ++referenceIndex) {

            final String transcriptId = targetIdBackward.getId(referenceIndex).toString();
            final int index = deCalculator.defineElement(transcriptId,
                    DifferentialExpressionCalculator.ElementType.TRANSCRIPT);

            deCalculator.defineElementLength(index, reader.getTargetLength(referenceIndex));
        }

        // observe elements:
        for (int referenceIndex = 0; referenceIndex < numberOfReferences; ++referenceIndex) {

            outputWriter.printf("%s\t%s\t%d\t%g\t%d%n", basename, targetIdBackward.getId(referenceIndex),
                    numberOfReadsPerReference[referenceIndex],
                    Math.log10(numberOfReadsPerReference[referenceIndex] + 1),
                    cumulativeBasesPerReference[referenceIndex]);

            final String transcriptId = targetIdBackward.getId(referenceIndex).toString();

            deCalculator.observe(sampleId, transcriptId, numberOfReadsPerReference[referenceIndex]);
            numAlignedReadsInSample += numberOfReadsPerReference[referenceIndex];
        }
        deCalculator.setNumAlignedInSample(sampleId, numAlignedReadsInSample);
        outputWriter.flush();

    } finally {
        IOUtils.closeQuietly(outputWriter);
        reader.close();
    }
}

From source file:msi.gaml.operators.Maths.java

@operator(value = "log", can_be_const = true, category = { IOperatorCategory.ARITHMETIC })
@doc(value = "returns the logarithm (base 10) of the operand.", examples = @example(value = "log(1)", equals = "0.0"))
public static Double log(final Integer x) {
    if (x <= 0) {
        throw GamaRuntimeException.warning("The ln operator cannot accept negative or null inputs");
        // return Double.MAX_VALUE; // A compromise...
    }/*  www .j av a  2s  .  c  om*/
    return Math.log10(x);
}

From source file:no.sintef.ict.splcatool.CoveringArrayAlgICPL.java

private void generate3(int coverLimit, Integer sizelimit)
        throws TimeoutException, org.sat4j.specs.TimeoutException {
    // Get a list of vars
    List<BooleanVariableInterface> vars = new ArrayList<BooleanVariableInterface>(cnf.getFocusVariables());

    // Get invalid 2-tuples
    generate2(100, sizelimit);//from  w  w w . j  a v a2 s . c om

    // 3-wise
    System.out.println("--- 3-wise ---");

    // Set of invalid 3-tuples
    invalid3w = new HashSet<Pair3>();

    // Solutions
    List<List<Integer>> solutions = new ArrayList<List<Integer>>(initial);
    int coveredInitially = 0;

    /* Calculate uncovered tuples */
    long invalid = 0;
    long ignored = 0;
    List<Pair3> uncovered = new ArrayList<Pair3>();

    {
        int f = vars.size();
        long total = MathUtils.binomialCoefficient(f, 3);
        if (coverOnlyOnes) {
        } else if (!coverZerosOnly) {
            total *= (2 * 2 * 2 - 1);
        } else if (firstHalfOnly || secondHalfOnly) {
            total *= 4;
        } else if (firstFourthOnly || thirdFourthOnly) {
            total *= 2;
        } else if (coverEight != 0) {
        } else {
            total *= 2 * 2 * 2;
        }
        int threads = Runtime.getRuntime().availableProcessors();
        List<CalcUncovered3Thread> cuts = new ArrayList<CalcUncovered3Thread>();
        for (int i = 0; i < threads; i++) {
            int begin = i * vars.size() / threads;
            int end = ((i + 1) * vars.size() / threads);

            CalcUncovered3Thread cut = new CalcUncovered3Thread(begin, end, vars, coverOnlyOnes, coverZerosOnly,
                    invalid2w, idnr, solutions, new HashSet<Pair3>(), firstHalfOnly, secondHalfOnly,
                    firstFourthOnly, thirdFourthOnly, coverEight);
            cuts.add(cut);
        }
        List<Thread> cutts = new ArrayList<Thread>();
        for (int i = 0; i < threads; i++) {
            cutts.add(new Thread(cuts.get(i)));
        }

        // Start threads
        for (int i = 0; i < threads; i++) {
            cutts.get(i).start();
        }

        // Monitor progress
        List<ProgressReporter> xprs = new ArrayList<ProgressReporter>();
        xprs.addAll(cuts);
        ProgressThread xpt = new ProgressThread("Calculate uncovered triples", xprs, total);
        Thread xptt = new Thread(xpt);
        xptt.start();

        // Wait
        for (int i = 0; i < threads; i++) {
            try {
                cutts.get(i).join();
            } catch (InterruptedException e1) {
            }
        }

        // Stop monitoring
        xpt.stop();

        // Gather
        for (int i = 0; i < threads; i++) {
            invalid += cuts.get(i).getInvalidCount();
            uncovered.addAll(cuts.get(i).getUncovered());
            invalid3w.addAll(cuts.get(i).getInvalid());
        }
    }

    // Done
    System.out.println(
            "Uncovered triples left: " + uncovered.size() + " invalid: " + invalid + " ignored: " + ignored);

    // Cover
    long grandTotal = uncovered.size() + invalid;
    boolean invalidRemoved = false;
    int oldcovered = uncovered.size();
    while (!uncovered.isEmpty()) {
        List<List<Integer>> sols = new ArrayList<List<Integer>>();
        int uncTotal = coveredInitially + uncovered.size();

        // Start threads
        {
            List<Pair3> uncSplit = new ArrayList<Pair3>();
            for (int i = 0; i < uncovered.size(); i++) {
                uncSplit.add(uncovered.get(i));
            }
            uncovered.clear();

            C3SplitThread fmt = new C3SplitThread(cnf, uncSplit, idnr);
            Thread t = new Thread(fmt);

            t.start();

            // Start monitoring thread
            List<C3SplitThread> fmts = new ArrayList<C3SplitThread>();
            fmts.add(fmt);
            List<ProgressReporter> prs = new ArrayList<ProgressReporter>(fmts);
            ProgressThread pt = new ProgressThread("Cover triples", prs, uncTotal);
            Thread ptt = new Thread(pt);
            ptt.start();

            // Wait for thread to finish
            try {
                t.join();
            } catch (InterruptedException e) {
            }

            // Stop monitoring
            pt.stop();

            // Round complete
            System.out.println("Round complete");
            uncovered.addAll(fmt.getUncovered());
            sols.addAll(fmt.getSolutions());

            if (saveAfterEachRound) {
                try {
                    solutions.addAll(sols);
                    result = solutions;
                    writeToFile(tmp_save_filename, Type.horizontal, tmpSave_hideUnderscoreVariables);
                } catch (IOException e) {
                    // TODO Auto-generated catch block
                    e.printStackTrace();
                }
            }
        }

        // Remove covered
        int newcovered;
        Set<Pair3> cov;
        {
            cov = getCovInv3(sols, uncovered);
            System.out.println("Additionally covered " + cov.size());
            newcovered = uncovered.size();
            Set<Pair3> uncovSet = new HashSet<Pair3>(uncovered);
            uncovered.clear();
            uncovSet.removeAll(cov);
            uncovered.addAll(uncovSet);
            uncovSet.clear();
        }

        newcovered = newcovered - uncovered.size();

        // Remove invalid at some round
        if (!invalidRemoved) {
            if ((int) Math.log10(cov.size()) <= (int) Math.log10(cnf.getFocusVariables().size())) {
                System.out.println("Removing invalid");
                int diff = uncovered.size();
                uncovered = getInvalid3(coveredInitially, uncovered);
                diff -= uncovered.size();
                uncTotal -= diff;
                System.out.println("Invalid: " + diff);
                invalidRemoved = true;
            }
        }

        // Store
        solutions.addAll(sols);

        // Report progress
        System.out.println("Uncovered: " + uncovered.size() + ", progress: "
                + (grandTotal - uncovered.size()) * 100 / grandTotal + "% with solutions: " + solutions.size());

        // Stop at limit
        if (coverLimit <= (grandTotal - uncovered.size()) * 100 / grandTotal)
            break;

        // Stop at limit
        if (solutions.size() >= sizelimit)
            break;

        // Done if no more covered
        if (oldcovered == uncovered.size()) {
            System.out.println("Unable to cover valid tuples: " + uncovered.size());
            System.exit(-1);
        }
        oldcovered = uncovered.size();
    }

    result = solutions;
}

From source file:org.apache.hadoop.hive.metastore.MetaStoreDirectSql.java

/** Should be called with the list short enough to not trip up Oracle/etc. */
private List<Partition> getPartitionsFromPartitionIds(String dbName, String tblName, Boolean isView,
        List<Object> partIdList) throws MetaException {
    boolean doTrace = LOG.isDebugEnabled();
    int idStringWidth = (int) Math.ceil(Math.log10(partIdList.size())) + 1; // 1 for comma
    int sbCapacity = partIdList.size() * idStringWidth;
    // Prepare StringBuilder for "PART_ID in (...)" to use in future queries.
    StringBuilder partSb = new StringBuilder(sbCapacity);
    for (Object partitionId : partIdList) {
        partSb.append(extractSqlLong(partitionId)).append(",");
    }//from w  ww  . j  ava 2s .  c om
    String partIds = trimCommaList(partSb);

    // Get most of the fields for the IDs provided.
    // Assume db and table names are the same for all partition, as provided in arguments.
    String queryText = "select " + PARTITIONS + ".\"PART_ID\", " + SDS + ".\"SD_ID\", " + SDS + ".\"CD_ID\","
            + " " + SERDES + ".\"SERDE_ID\", " + PARTITIONS + ".\"CREATE_TIME\"," + " " + PARTITIONS
            + ".\"LAST_ACCESS_TIME\", " + SDS + ".\"INPUT_FORMAT\", " + SDS + ".\"IS_COMPRESSED\"," + " " + SDS
            + ".\"IS_STOREDASSUBDIRECTORIES\", " + SDS + ".\"LOCATION\", " + SDS + ".\"NUM_BUCKETS\"," + " "
            + SDS + ".\"OUTPUT_FORMAT\", " + SERDES + ".\"NAME\", " + SERDES + ".\"SLIB\" " + "from "
            + PARTITIONS + "" + "  left outer join " + SDS + " on " + PARTITIONS + ".\"SD_ID\" = " + SDS
            + ".\"SD_ID\" " + "  left outer join " + SERDES + " on " + SDS + ".\"SERDE_ID\" = " + SERDES
            + ".\"SERDE_ID\" " + "where \"PART_ID\" in (" + partIds + ") order by \"PART_NAME\" asc";
    long start = doTrace ? System.nanoTime() : 0;
    Query query = pm.newQuery("javax.jdo.query.SQL", queryText);
    List<Object[]> sqlResult = executeWithArray(query, null, queryText);
    long queryTime = doTrace ? System.nanoTime() : 0;
    Deadline.checkTimeout();

    // Read all the fields and create partitions, SDs and serdes.
    TreeMap<Long, Partition> partitions = new TreeMap<Long, Partition>();
    TreeMap<Long, StorageDescriptor> sds = new TreeMap<Long, StorageDescriptor>();
    TreeMap<Long, SerDeInfo> serdes = new TreeMap<Long, SerDeInfo>();
    TreeMap<Long, List<FieldSchema>> colss = new TreeMap<Long, List<FieldSchema>>();
    // Keep order by name, consistent with JDO.
    ArrayList<Partition> orderedResult = new ArrayList<Partition>(partIdList.size());

    // Prepare StringBuilder-s for "in (...)" lists to use in one-to-many queries.
    StringBuilder sdSb = new StringBuilder(sbCapacity), serdeSb = new StringBuilder(sbCapacity);
    StringBuilder colsSb = new StringBuilder(7); // We expect that there's only one field schema.
    tblName = tblName.toLowerCase();
    dbName = dbName.toLowerCase();
    for (Object[] fields : sqlResult) {
        // Here comes the ugly part...
        long partitionId = extractSqlLong(fields[0]);
        Long sdId = extractSqlLong(fields[1]);
        Long colId = extractSqlLong(fields[2]);
        Long serdeId = extractSqlLong(fields[3]);
        // A partition must have at least sdId and serdeId set, or nothing set if it's a view.
        if (sdId == null || serdeId == null) {
            if (isView == null) {
                isView = isViewTable(dbName, tblName);
            }
            if ((sdId != null || colId != null || serdeId != null) || !isView) {
                throw new MetaException("Unexpected null for one of the IDs, SD " + sdId + ", serde " + serdeId
                        + " for a " + (isView ? "" : "non-") + " view");
            }
        }

        Partition part = new Partition();
        orderedResult.add(part);
        // Set the collection fields; some code might not check presence before accessing them.
        part.setParameters(new HashMap<String, String>());
        part.setValues(new ArrayList<String>());
        part.setDbName(dbName);
        part.setTableName(tblName);
        if (fields[4] != null)
            part.setCreateTime(extractSqlInt(fields[4]));
        if (fields[5] != null)
            part.setLastAccessTime(extractSqlInt(fields[5]));
        partitions.put(partitionId, part);

        if (sdId == null)
            continue; // Probably a view.
        assert serdeId != null;

        // We assume each partition has an unique SD.
        StorageDescriptor sd = new StorageDescriptor();
        StorageDescriptor oldSd = sds.put(sdId, sd);
        if (oldSd != null) {
            throw new MetaException("Partitions reuse SDs; we don't expect that");
        }
        // Set the collection fields; some code might not check presence before accessing them.
        sd.setSortCols(new ArrayList<Order>());
        sd.setBucketCols(new ArrayList<String>());
        sd.setParameters(new HashMap<String, String>());
        sd.setSkewedInfo(new SkewedInfo(new ArrayList<String>(), new ArrayList<List<String>>(),
                new HashMap<List<String>, String>()));
        sd.setInputFormat((String) fields[6]);
        Boolean tmpBoolean = extractSqlBoolean(fields[7]);
        if (tmpBoolean != null)
            sd.setCompressed(tmpBoolean);
        tmpBoolean = extractSqlBoolean(fields[8]);
        if (tmpBoolean != null)
            sd.setStoredAsSubDirectories(tmpBoolean);
        sd.setLocation((String) fields[9]);
        if (fields[10] != null)
            sd.setNumBuckets(extractSqlInt(fields[10]));
        sd.setOutputFormat((String) fields[11]);
        sdSb.append(sdId).append(",");
        part.setSd(sd);

        if (colId != null) {
            List<FieldSchema> cols = colss.get(colId);
            // We expect that colId will be the same for all (or many) SDs.
            if (cols == null) {
                cols = new ArrayList<FieldSchema>();
                colss.put(colId, cols);
                colsSb.append(colId).append(",");
            }
            sd.setCols(cols);
        }

        // We assume each SD has an unique serde.
        SerDeInfo serde = new SerDeInfo();
        SerDeInfo oldSerde = serdes.put(serdeId, serde);
        if (oldSerde != null) {
            throw new MetaException("SDs reuse serdes; we don't expect that");
        }
        serde.setParameters(new HashMap<String, String>());
        serde.setName((String) fields[12]);
        serde.setSerializationLib((String) fields[13]);
        serdeSb.append(serdeId).append(",");
        sd.setSerdeInfo(serde);
        Deadline.checkTimeout();
    }
    query.closeAll();
    timingTrace(doTrace, queryText, start, queryTime);

    // Now get all the one-to-many things. Start with partitions.
    queryText = "select \"PART_ID\", \"PARAM_KEY\", \"PARAM_VALUE\" from " + PARTITION_PARAMS + ""
            + " where \"PART_ID\" in (" + partIds + ") and \"PARAM_KEY\" is not null"
            + " order by \"PART_ID\" asc";
    loopJoinOrderedResult(partitions, queryText, 0, new ApplyFunc<Partition>() {
        @Override
        public void apply(Partition t, Object[] fields) {
            t.putToParameters((String) fields[1], (String) fields[2]);
        }
    });
    // Perform conversion of null map values
    for (Partition t : partitions.values()) {
        t.setParameters(MetaStoreUtils.trimMapNulls(t.getParameters(), convertMapNullsToEmptyStrings));
    }

    queryText = "select \"PART_ID\", \"PART_KEY_VAL\" from " + PARTITION_KEY_VALS + ""
            + " where \"PART_ID\" in (" + partIds + ") and \"INTEGER_IDX\" >= 0"
            + " order by \"PART_ID\" asc, \"INTEGER_IDX\" asc";
    loopJoinOrderedResult(partitions, queryText, 0, new ApplyFunc<Partition>() {
        @Override
        public void apply(Partition t, Object[] fields) {
            t.addToValues((String) fields[1]);
        }
    });

    // Prepare IN (blah) lists for the following queries. Cut off the final ','s.
    if (sdSb.length() == 0) {
        assert serdeSb.length() == 0 && colsSb.length() == 0;
        return orderedResult; // No SDs, probably a view.
    }

    String sdIds = trimCommaList(sdSb);
    String serdeIds = trimCommaList(serdeSb);
    String colIds = trimCommaList(colsSb);

    // Get all the stuff for SD. Don't do empty-list check - we expect partitions do have SDs.
    queryText = "select \"SD_ID\", \"PARAM_KEY\", \"PARAM_VALUE\" from " + SD_PARAMS + ""
            + " where \"SD_ID\" in (" + sdIds + ") and \"PARAM_KEY\" is not null" + " order by \"SD_ID\" asc";
    loopJoinOrderedResult(sds, queryText, 0, new ApplyFunc<StorageDescriptor>() {
        @Override
        public void apply(StorageDescriptor t, Object[] fields) {
            t.putToParameters((String) fields[1], extractSqlClob(fields[2]));
        }
    });
    // Perform conversion of null map values
    for (StorageDescriptor t : sds.values()) {
        t.setParameters(MetaStoreUtils.trimMapNulls(t.getParameters(), convertMapNullsToEmptyStrings));
    }

    queryText = "select \"SD_ID\", \"COLUMN_NAME\", " + SORT_COLS + ".\"ORDER\"" + " from " + SORT_COLS + ""
            + " where \"SD_ID\" in (" + sdIds + ") and \"INTEGER_IDX\" >= 0"
            + " order by \"SD_ID\" asc, \"INTEGER_IDX\" asc";
    loopJoinOrderedResult(sds, queryText, 0, new ApplyFunc<StorageDescriptor>() {
        @Override
        public void apply(StorageDescriptor t, Object[] fields) {
            if (fields[2] == null)
                return;
            t.addToSortCols(new Order((String) fields[1], extractSqlInt(fields[2])));
        }
    });

    queryText = "select \"SD_ID\", \"BUCKET_COL_NAME\" from " + BUCKETING_COLS + "" + " where \"SD_ID\" in ("
            + sdIds + ") and \"INTEGER_IDX\" >= 0" + " order by \"SD_ID\" asc, \"INTEGER_IDX\" asc";
    loopJoinOrderedResult(sds, queryText, 0, new ApplyFunc<StorageDescriptor>() {
        @Override
        public void apply(StorageDescriptor t, Object[] fields) {
            t.addToBucketCols((String) fields[1]);
        }
    });

    // Skewed columns stuff.
    queryText = "select \"SD_ID\", \"SKEWED_COL_NAME\" from " + SKEWED_COL_NAMES + "" + " where \"SD_ID\" in ("
            + sdIds + ") and \"INTEGER_IDX\" >= 0" + " order by \"SD_ID\" asc, \"INTEGER_IDX\" asc";
    boolean hasSkewedColumns = loopJoinOrderedResult(sds, queryText, 0, new ApplyFunc<StorageDescriptor>() {
        @Override
        public void apply(StorageDescriptor t, Object[] fields) {
            if (!t.isSetSkewedInfo())
                t.setSkewedInfo(new SkewedInfo());
            t.getSkewedInfo().addToSkewedColNames((String) fields[1]);
        }
    }) > 0;

    // Assume we don't need to fetch the rest of the skewed column data if we have no columns.
    if (hasSkewedColumns) {
        // We are skipping the SKEWED_STRING_LIST table here, as it seems to be totally useless.
        queryText = "select " + SKEWED_VALUES + ".\"SD_ID_OID\"," + "  " + SKEWED_STRING_LIST_VALUES
                + ".\"STRING_LIST_ID\"," + "  " + SKEWED_STRING_LIST_VALUES + ".\"STRING_LIST_VALUE\" "
                + "from " + SKEWED_VALUES + " " + "  left outer join " + SKEWED_STRING_LIST_VALUES + " on "
                + SKEWED_VALUES + "." + "\"STRING_LIST_ID_EID\" = " + SKEWED_STRING_LIST_VALUES
                + ".\"STRING_LIST_ID\" " + "where " + SKEWED_VALUES + ".\"SD_ID_OID\" in (" + sdIds + ") "
                + "  and " + SKEWED_VALUES + ".\"STRING_LIST_ID_EID\" is not null " + "  and " + SKEWED_VALUES
                + ".\"INTEGER_IDX\" >= 0 " + "order by " + SKEWED_VALUES + ".\"SD_ID_OID\" asc, "
                + SKEWED_VALUES + ".\"INTEGER_IDX\" asc," + "  " + SKEWED_STRING_LIST_VALUES
                + ".\"INTEGER_IDX\" asc";
        loopJoinOrderedResult(sds, queryText, 0, new ApplyFunc<StorageDescriptor>() {
            private Long currentListId;
            private List<String> currentList;

            @Override
            public void apply(StorageDescriptor t, Object[] fields) throws MetaException {
                if (!t.isSetSkewedInfo())
                    t.setSkewedInfo(new SkewedInfo());
                // Note that this is not a typical list accumulator - there's no call to finalize
                // the last list. Instead we add list to SD first, as well as locally to add elements.
                if (fields[1] == null) {
                    currentList = null; // left outer join produced a list with no values
                    currentListId = null;
                    t.getSkewedInfo().addToSkewedColValues(Collections.<String>emptyList());
                } else {
                    long fieldsListId = extractSqlLong(fields[1]);
                    if (currentListId == null || fieldsListId != currentListId) {
                        currentList = new ArrayList<String>();
                        currentListId = fieldsListId;
                        t.getSkewedInfo().addToSkewedColValues(currentList);
                    }
                    currentList.add((String) fields[2]);
                }
            }
        });

        // We are skipping the SKEWED_STRING_LIST table here, as it seems to be totally useless.
        queryText = "select " + SKEWED_COL_VALUE_LOC_MAP + ".\"SD_ID\"," + " " + SKEWED_STRING_LIST_VALUES
                + ".STRING_LIST_ID," + " " + SKEWED_COL_VALUE_LOC_MAP + ".\"LOCATION\"," + " "
                + SKEWED_STRING_LIST_VALUES + ".\"STRING_LIST_VALUE\" " + "from " + SKEWED_COL_VALUE_LOC_MAP
                + "" + "  left outer join " + SKEWED_STRING_LIST_VALUES + " on " + SKEWED_COL_VALUE_LOC_MAP
                + "." + "\"STRING_LIST_ID_KID\" = " + SKEWED_STRING_LIST_VALUES + ".\"STRING_LIST_ID\" "
                + "where " + SKEWED_COL_VALUE_LOC_MAP + ".\"SD_ID\" in (" + sdIds + ")" + "  and "
                + SKEWED_COL_VALUE_LOC_MAP + ".\"STRING_LIST_ID_KID\" is not null " + "order by "
                + SKEWED_COL_VALUE_LOC_MAP + ".\"SD_ID\" asc," + "  " + SKEWED_STRING_LIST_VALUES
                + ".\"STRING_LIST_ID\" asc," + "  " + SKEWED_STRING_LIST_VALUES + ".\"INTEGER_IDX\" asc";

        loopJoinOrderedResult(sds, queryText, 0, new ApplyFunc<StorageDescriptor>() {
            private Long currentListId;
            private List<String> currentList;

            @Override
            public void apply(StorageDescriptor t, Object[] fields) throws MetaException {
                if (!t.isSetSkewedInfo()) {
                    SkewedInfo skewedInfo = new SkewedInfo();
                    skewedInfo.setSkewedColValueLocationMaps(new HashMap<List<String>, String>());
                    t.setSkewedInfo(skewedInfo);
                }
                Map<List<String>, String> skewMap = t.getSkewedInfo().getSkewedColValueLocationMaps();
                // Note that this is not a typical list accumulator - there's no call to finalize
                // the last list. Instead we add list to SD first, as well as locally to add elements.
                if (fields[1] == null) {
                    currentList = new ArrayList<String>(); // left outer join produced a list with no values
                    currentListId = null;
                } else {
                    long fieldsListId = extractSqlLong(fields[1]);
                    if (currentListId == null || fieldsListId != currentListId) {
                        currentList = new ArrayList<String>();
                        currentListId = fieldsListId;
                    } else {
                        skewMap.remove(currentList); // value based compare.. remove first
                    }
                    currentList.add((String) fields[3]);
                }
                skewMap.put(currentList, (String) fields[2]);
            }
        });
    } // if (hasSkewedColumns)

    // Get FieldSchema stuff if any.
    if (!colss.isEmpty()) {
        // We are skipping the CDS table here, as it seems to be totally useless.
        queryText = "select \"CD_ID\", \"COMMENT\", \"COLUMN_NAME\", \"TYPE_NAME\"" + " from " + COLUMNS_V2
                + " where \"CD_ID\" in (" + colIds + ") and \"INTEGER_IDX\" >= 0"
                + " order by \"CD_ID\" asc, \"INTEGER_IDX\" asc";
        loopJoinOrderedResult(colss, queryText, 0, new ApplyFunc<List<FieldSchema>>() {
            @Override
            public void apply(List<FieldSchema> t, Object[] fields) {
                t.add(new FieldSchema((String) fields[2], extractSqlClob(fields[3]), (String) fields[1]));
            }
        });
    }

    // Finally, get all the stuff for serdes - just the params.
    queryText = "select \"SERDE_ID\", \"PARAM_KEY\", \"PARAM_VALUE\" from " + SERDE_PARAMS + ""
            + " where \"SERDE_ID\" in (" + serdeIds + ") and \"PARAM_KEY\" is not null"
            + " order by \"SERDE_ID\" asc";
    loopJoinOrderedResult(serdes, queryText, 0, new ApplyFunc<SerDeInfo>() {
        @Override
        public void apply(SerDeInfo t, Object[] fields) {
            t.putToParameters((String) fields[1], extractSqlClob(fields[2]));
        }
    });
    // Perform conversion of null map values
    for (SerDeInfo t : serdes.values()) {
        t.setParameters(MetaStoreUtils.trimMapNulls(t.getParameters(), convertMapNullsToEmptyStrings));
    }

    return orderedResult;
}

From source file:eu.clarin.cmdi.vlo.importer.MetadataImporter.java

/**
 * Updates documents in Solr with their hierarchy weight and lists of
 * related resources (hasPart & isPartOf)
 *
 * @throws SolrServerException/*w w w. j  a  v  a 2s .  c om*/
 * @throws MalformedURLException
 */
private void updateDocumentHierarchy() throws SolrServerException, MalformedURLException, IOException {
    LOG.info(ResourceStructureGraph.printStatistics(0));
    Boolean updatedDocs = false;
    List<SolrInputDocument> updateDocs = new ArrayList<>();
    Iterator<CmdiVertex> vertexIter = ResourceStructureGraph.getFoundVertices().iterator();
    while (vertexIter.hasNext()) {
        CmdiVertex vertex = vertexIter.next();
        List<String> incomingVertexNames = ResourceStructureGraph.getIncomingVertexNames(vertex);
        List<String> outgoingVertexNames = ResourceStructureGraph.getOutgoingVertexNames(vertex);

        // update vertex if changes are necessary (necessary if non-default weight or edges to other resources)
        if (vertex.getHierarchyWeight() != 0 || !incomingVertexNames.isEmpty()
                || !outgoingVertexNames.isEmpty()) {
            updatedDocs = true;
            SolrInputDocument doc = new SolrInputDocument();
            doc.setField(FacetConstants.FIELD_ID, Arrays.asList(vertex.getId()));

            if (vertex.getHierarchyWeight() != 0) {
                Map<String, Integer> partialUpdateMap = new HashMap<>();
                partialUpdateMap.put("set", Math.abs(vertex.getHierarchyWeight()));
                doc.setField(FacetConstants.FIELD_HIERARCHY_WEIGHT, partialUpdateMap);
            }

            // remove vertices that were not imported
            Iterator<String> incomingVertexIter = incomingVertexNames.iterator();
            while (incomingVertexIter.hasNext()) {
                String vertexId = incomingVertexIter.next();
                if (ResourceStructureGraph.getVertex(vertexId) == null
                        || !ResourceStructureGraph.getVertex(vertexId).getWasImported()) {
                    incomingVertexIter.remove();
                }
            }
            Iterator<String> outgoingVertexIter = outgoingVertexNames.iterator();
            while (outgoingVertexIter.hasNext()) {
                String vertexId = outgoingVertexIter.next();
                if (ResourceStructureGraph.getVertex(vertexId) == null
                        || !ResourceStructureGraph.getVertex(vertexId).getWasImported()) {
                    outgoingVertexIter.remove();
                }
            }

            if (!incomingVertexNames.isEmpty()) {
                Map<String, List<String>> partialUpdateMap = new HashMap<>();
                partialUpdateMap.put("set", incomingVertexNames);
                doc.setField(FacetConstants.FIELD_HAS_PART, partialUpdateMap);

                Map<String, Integer> partialUpdateMapCount = new HashMap<>();
                partialUpdateMapCount.put("set", incomingVertexNames.size());
                doc.setField(FacetConstants.FIELD_HAS_PART_COUNT, partialUpdateMapCount);

                // add hasPartCount weight
                Double hasPartCountWeight = Math.log10(1 + Math.min(50, incomingVertexNames.size()));
                Map<String, Double> partialUpdateMapCountWeight = new HashMap<>();
                partialUpdateMapCountWeight.put("set", hasPartCountWeight);
                doc.setField(FacetConstants.FIELD_HAS_PART_COUNT_WEIGHT, partialUpdateMapCountWeight);
            }

            if (!outgoingVertexNames.isEmpty()) {
                Map<String, List<String>> partialUpdateMap = new HashMap<>();
                partialUpdateMap.put("set", outgoingVertexNames);
                doc.setField(FacetConstants.FIELD_IS_PART_OF, partialUpdateMap);
            }
            updateDocs.add(doc);
        }

        if (updateDocs.size() == config.getMaxDocsInList()) {
            solrServer.add(updateDocs);
            if (serverError != null) {
                throw new SolrServerException(serverError);
            }
            updateDocs = new ArrayList<>();
        }
    }
    if (!updateDocs.isEmpty()) {
        solrServer.add(updateDocs);
        if (serverError != null) {
            throw new SolrServerException(serverError);
        }
    }

    if (updatedDocs) {
        solrServer.commit();
    }

    ResourceStructureGraph.clearResourceGraph();
}

From source file:de.tudarmstadt.lt.lm.app.StartLM.java

void listNgrams() {
    if (_providerService == null) {
        System.out.println("LM Server is not runnning.");
        return;//from   ww w .  jav  a 2 s  .co  m
    }

    try {
        LanguageModel<String> lm = _providerService.getLanguageModel();
        Iterator<List<String>> iter = lm.getNgramIterator();
        if (!iter.hasNext()) {
            System.out.println("No ngrams in Language Model.");
            return;
        }

        double log10_prob, prob10, log2_prob;
        int i = 0;
        for (List<String> ngram = null; iter.hasNext();) {
            ngram = iter.next();
            if (++i % 30 == 0)
                if (":q".equals(readInput(String.format(
                        "press <enter> to show next 30 ngrams, type ':q' if you want to quit showing ngrams: %n%s $> ",
                        _name))))
                    break;

            log10_prob = _providerService.getNgramLog10Probability(ngram);
            prob10 = Math.pow(10, log10_prob);
            log2_prob = log10_prob / Math.log10(2);

            System.out.format("%-50.50s [%g (log10=%g, log2=%g)] %n",
                    StringUtils.abbreviate(StringUtils.join(ngram, ' '), 50), prob10, log10_prob, log2_prob);
        }

    } catch (Exception e) {
        LOG.warn(e.getMessage());
    }

}

From source file:com.act.reachables.LoadAct.java

private void setXrefs(Long node, Chemical c) {
    for (REFS typ : Chemical.REFS.values()) {
        if (c.getRef(typ) != null) {
            Double valuation = c.getRefMetric(typ);
            Node.setAttribute(node, typ.name(), c.getRef(typ).toString());
            Node.setAttribute(node, "metric" + typ.name(), valuation == null ? -999999999.0 : valuation);
            Node.setAttribute(node, "log10metric" + typ.name(),
                    valuation == null ? -99.0 : Math.log10(valuation));
            Node.setAttribute(node, "has" + typ.name(), true);
        } else {/*from w  ww .  ja  v  a 2s  . c o m*/
            Node.setAttribute(node, "has" + typ.name(), false);
        }
    }
}

From source file:us.mn.state.health.lims.reports.action.implementation.HaitiPatientReport.java

private String getAugmentedResult(HaitiClinicalPatientData data, Result result) {
    String resultValue = data.getResult();
    if (TestIdentityService.isTestNumericViralLoad(reportAnalysis.getTest())) {
        try {/*from  w w w .  j a v  a 2  s . c o m*/
            resultValue += " (" + twoDecimalFormat.format(Math.log10(Double.parseDouble(resultValue)))
                    + ")log ";
        } catch (IllegalFormatException e) {
            // no-op
        }
    }

    return resultValue + (augmentResultWithFlag() ? getResultFlag(result, null) : "");
}

From source file:ubc.pavlab.gotrack.beans.TrackView.java

private <T extends Number> LineChartModel createChart(
        GoChart<Edition, Map<GeneOntologyTerm, Set<EvidenceReference>>> goChart, GoChart<Edition, T> staticData,
        boolean logAxis) {
    LineChartModel dateModel = new LineChartModel();

    if (staticData != null) {
        for (Entry<String, LinkedHashMap<Edition, T>> es : staticData.getSeries().entrySet()) {
            String label = es.getKey();
            Map<Edition, T> sData = es.getValue();

            LineChartSeries series = new LineChartSeries();
            series.setLabel(label);//w  w  w.j  a va2  s .co  m
            series.setShowMarker(false);

            for (Entry<Edition, T> dataPoint : sData.entrySet()) {
                String date = dataPoint.getKey().getDate().toString();
                Number val = logAxis ? Math.log10((double) dataPoint.getValue()) : dataPoint.getValue();
                series.set(date, val);
            }

            dateModel.addSeries(series);
        }
    }

    for (Entry<String, LinkedHashMap<Edition, Map<GeneOntologyTerm, Set<EvidenceReference>>>> es : goChart
            .getSeries().entrySet()) {
        String primary = es.getKey();
        Map<Edition, Map<GeneOntologyTerm, Set<EvidenceReference>>> sData = es.getValue();

        LineChartSeries series = new LineChartSeries();
        series.setLabel(primary);
        series.setMarkerStyle("filledDiamond");

        for (Entry<Edition, Map<GeneOntologyTerm, Set<EvidenceReference>>> dataPoint : sData.entrySet()) {
            String date = dataPoint.getKey().getDate().toString();
            Integer count = dataPoint.getValue().size();
            series.set(date, logAxis ? Math.log10(count) : count);
        }

        dateModel.addSeries(series);
    }

    dateModel.setTitle(goChart.getTitle());
    dateModel.setZoom(true);

    dateModel.setLegendPosition("nw");
    // dateModel.setAnimate( true );
    dateModel.setLegendRows(8);
    dateModel.setMouseoverHighlight(true);
    dateModel.setExtender("chartExtender");

    dateModel.getAxis(AxisType.Y).setLabel((logAxis ? "Log of " : "") + goChart.getyLabel());

    if (goChart.getMin() != null) {
        dateModel.getAxis(AxisType.Y).setMin(goChart.getMin());
    }

    if (goChart.getMax() != null) {
        dateModel.getAxis(AxisType.Y).setMax(goChart.getMax());
    }

    DateAxis axis = new DateAxis(goChart.getxLabel());
    // CategoryAxis axis = new CategoryAxis( "Editions" );
    axis.setTickAngle(-50);
    // axis.setMax( currentEdition.getDate());
    axis.setTickFormat("%b %#d, %y");

    dateModel.getAxes().put(AxisType.X, axis);
    return dateModel;
}

From source file:org.broadinstitute.gatk.utils.MathUtilsUnitTest.java

@Test
public void testLog10sumLog10() {
    final double requiredPrecision = 1E-14;

    final double log3 = 0.477121254719662;
    Assert.assertEquals(MathUtils.log10sumLog10(new double[] { 0.0, 0.0, 0.0 }), log3, requiredPrecision);
    Assert.assertEquals(MathUtils.log10sumLog10(new double[] { 0.0, 0.0, 0.0 }, 0), log3, requiredPrecision);
    Assert.assertEquals(MathUtils.log10sumLog10(new double[] { 0.0, 0.0, 0.0 }, 0, 3), log3, requiredPrecision);

    final double log2 = 0.301029995663981;
    Assert.assertEquals(MathUtils.log10sumLog10(new double[] { 0.0, 0.0, 0.0 }, 0, 2), log2, requiredPrecision);
    Assert.assertEquals(MathUtils.log10sumLog10(new double[] { 0.0, 0.0, 0.0 }, 0, 1), 0.0, requiredPrecision);

    Assert.assertEquals(MathUtils.log10sumLog10(new double[] { 0.0 }), 0.0, requiredPrecision);
    Assert.assertEquals(MathUtils.log10sumLog10(new double[] { -5.15 }), -5.15, requiredPrecision);
    Assert.assertEquals(MathUtils.log10sumLog10(new double[] { 130.0 }), 130.0, requiredPrecision);
    Assert.assertEquals(MathUtils.log10sumLog10(new double[] { -0.145 }), -0.145, requiredPrecision);

    Assert.assertEquals(MathUtils.log10sumLog10(new double[] { 0.0, 0.0 }),
            Math.log10(Math.pow(10.0, 0.0) + Math.pow(10.0, 0.0)), requiredPrecision);
    Assert.assertEquals(MathUtils.log10sumLog10(new double[] { -1.0, 0.0 }),
            Math.log10(Math.pow(10.0, -1.0) + Math.pow(10.0, 0.0)), requiredPrecision);
    Assert.assertEquals(MathUtils.log10sumLog10(new double[] { 0.0, -1.0 }),
            Math.log10(Math.pow(10.0, 0.0) + Math.pow(10.0, -1.0)), requiredPrecision);
    Assert.assertEquals(MathUtils.log10sumLog10(new double[] { -2.2, -3.5 }),
            Math.log10(Math.pow(10.0, -2.2) + Math.pow(10.0, -3.5)), requiredPrecision);
    Assert.assertEquals(MathUtils.log10sumLog10(new double[] { -1.0, -7.1 }),
            Math.log10(Math.pow(10.0, -1.0) + Math.pow(10.0, -7.1)), requiredPrecision);
    Assert.assertEquals(MathUtils.log10sumLog10(new double[] { 5.0, 6.2 }),
            Math.log10(Math.pow(10.0, 5.0) + Math.pow(10.0, 6.2)), requiredPrecision);
    Assert.assertEquals(MathUtils.log10sumLog10(new double[] { 38.1, 16.2 }),
            Math.log10(Math.pow(10.0, 38.1) + Math.pow(10.0, 16.2)), requiredPrecision);
    Assert.assertEquals(MathUtils.log10sumLog10(new double[] { -38.1, 6.2 }),
            Math.log10(Math.pow(10.0, -38.1) + Math.pow(10.0, 6.2)), requiredPrecision);
    Assert.assertEquals(MathUtils.log10sumLog10(new double[] { -19.1, -37.1 }),
            Math.log10(Math.pow(10.0, -19.1) + Math.pow(10.0, -37.1)), requiredPrecision);
    Assert.assertEquals(MathUtils.log10sumLog10(new double[] { -29.1, -27.6 }),
            Math.log10(Math.pow(10.0, -29.1) + Math.pow(10.0, -27.6)), requiredPrecision);
    Assert.assertEquals(MathUtils.log10sumLog10(new double[] { -0.12345, -0.23456 }),
            Math.log10(Math.pow(10.0, -0.12345) + Math.pow(10.0, -0.23456)), requiredPrecision);
    Assert.assertEquals(MathUtils.log10sumLog10(new double[] { -15.7654, -17.0101 }),
            Math.log10(Math.pow(10.0, -15.7654) + Math.pow(10.0, -17.0101)), requiredPrecision);

    Assert.assertEquals(MathUtils.log10sumLog10(new double[] { 0.0, 0.0, 0.0 }),
            Math.log10(Math.pow(10.0, 0.0) + Math.pow(10.0, 0.0) + Math.pow(10.0, 0.0)), requiredPrecision);
    Assert.assertEquals(MathUtils.log10sumLog10(new double[] { -1.0, 0.0, 0.0 }),
            Math.log10(Math.pow(10.0, -1.0) + Math.pow(10.0, 0.0) + Math.pow(10.0, 0.0)), requiredPrecision);
    Assert.assertEquals(MathUtils.log10sumLog10(new double[] { 0.0, -1.0, -2.5 }),
            Math.log10(Math.pow(10.0, 0.0) + Math.pow(10.0, -1.0) + Math.pow(10.0, -2.5)), requiredPrecision);
    Assert.assertEquals(MathUtils.log10sumLog10(new double[] { -2.2, -3.5, -1.1 }),
            Math.log10(Math.pow(10.0, -2.2) + Math.pow(10.0, -3.5) + Math.pow(10.0, -1.1)), requiredPrecision);
    Assert.assertEquals(MathUtils.log10sumLog10(new double[] { -1.0, -7.1, 0.5 }),
            Math.log10(Math.pow(10.0, -1.0) + Math.pow(10.0, -7.1) + Math.pow(10.0, 0.5)), requiredPrecision);
    Assert.assertEquals(MathUtils.log10sumLog10(new double[] { 5.0, 6.2, 1.3 }),
            Math.log10(Math.pow(10.0, 5.0) + Math.pow(10.0, 6.2) + Math.pow(10.0, 1.3)), requiredPrecision);
    Assert.assertEquals(MathUtils.log10sumLog10(new double[] { 38.1, 16.2, 18.1 }),
            Math.log10(Math.pow(10.0, 38.1) + Math.pow(10.0, 16.2) + Math.pow(10.0, 18.1)), requiredPrecision);
    Assert.assertEquals(MathUtils.log10sumLog10(new double[] { -38.1, 6.2, 26.6 }),
            Math.log10(Math.pow(10.0, -38.1) + Math.pow(10.0, 6.2) + Math.pow(10.0, 26.6)), requiredPrecision);
    Assert.assertEquals(MathUtils.log10sumLog10(new double[] { -19.1, -37.1, -45.1 }),
            Math.log10(Math.pow(10.0, -19.1) + Math.pow(10.0, -37.1) + Math.pow(10.0, -45.1)),
            requiredPrecision);/*ww  w  .j  a  v  a2s.  co  m*/
    Assert.assertEquals(MathUtils.log10sumLog10(new double[] { -29.1, -27.6, -26.2 }),
            Math.log10(Math.pow(10.0, -29.1) + Math.pow(10.0, -27.6) + Math.pow(10.0, -26.2)),
            requiredPrecision);
    Assert.assertEquals(MathUtils.log10sumLog10(new double[] { -0.12345, -0.23456, -0.34567 }),
            Math.log10(Math.pow(10.0, -0.12345) + Math.pow(10.0, -0.23456) + Math.pow(10.0, -0.34567)),
            requiredPrecision);
    Assert.assertEquals(MathUtils.log10sumLog10(new double[] { -15.7654, -17.0101, -17.9341 }),
            Math.log10(Math.pow(10.0, -15.7654) + Math.pow(10.0, -17.0101) + Math.pow(10.0, -17.9341)),
            requiredPrecision);

    // magnitude of the sum doesn't matter, so we can combinatorially test this via partitions of unity
    double[] mult_partitionFactor = new double[] { 0.999, 0.98, 0.95, 0.90, 0.8, 0.5, 0.3, 0.1, 0.05, 0.001 };
    int[] n_partitions = new int[] { 2, 4, 8, 16, 32, 64, 128, 256, 512, 1028 };
    for (double alpha : mult_partitionFactor) {
        double log_alpha = Math.log10(alpha);
        double log_oneMinusAlpha = Math.log10(1 - alpha);
        for (int npart : n_partitions) {
            double[] multiplicative = new double[npart];
            double[] equal = new double[npart];
            double remaining_log = 0.0; // realspace = 1
            for (int i = 0; i < npart - 1; i++) {
                equal[i] = -Math.log10(npart);
                double piece = remaining_log + log_alpha; // take a*remaining, leaving remaining-a*remaining = (1-a)*remaining
                multiplicative[i] = piece;
                remaining_log = remaining_log + log_oneMinusAlpha;
            }
            equal[npart - 1] = -Math.log10(npart);
            multiplicative[npart - 1] = remaining_log;
            Assert.assertEquals(MathUtils.log10sumLog10(equal), 0.0, requiredPrecision);
            Assert.assertEquals(MathUtils.log10sumLog10(multiplicative), 0.0, requiredPrecision,
                    String.format("Did not sum to one: nPartitions=%d, alpha=%f", npart, alpha));
        }
    }
}