Example usage for java.util BitSet set

List of usage examples for java.util BitSet set

Introduction

In this page you can find the example usage for java.util BitSet set.

Prototype

public void set(int bitIndex) 

Source Link

Document

Sets the bit at the specified index to true .

Usage

From source file:com.turn.griffin.data.GriffinUploadTask.java

private BitSet getAvailableBitmap(FileInfo fileInfo) {

    String filename = fileInfo.getFilename();
    long fileVersion = fileInfo.getVersion();
    long blockCount = fileInfo.getBlockCount();

    Optional<GriffinConsumer> consumer = Optional.absent();
    BitSet availableBlockBitmap = new BitSet((int) blockCount);
    try {/*from w w  w .j  av a  2 s  .co  m*/
        BlockingQueue<byte[]> dataQueue = new ArrayBlockingQueue<>(
                GriffinDownloadTask.DOWNLOAD_CONSUMER_QUEUE_SIZE);
        Properties properties = new Properties();
        properties.put("auto.offset.reset", "smallest");

        /* The groupId should be unique to avoid conflict with other consumers running on this machine */
        String consumerGroupId = GriffinKafkaTopicNameUtil.getDataTopicConsumerGroupId(filename, fileVersion,
                new String[] { dataManager.getMyServerId(), this.getClass().getSimpleName(),
                        UUID.randomUUID().toString() });
        String dataTopicNameForConsumer = GriffinKafkaTopicNameUtil.getDataTopicNameForConsumer(filename,
                fileVersion);

        consumer = Optional.fromNullable(new GriffinConsumer(GriffinModule.ZOOKEEPER, consumerGroupId,
                dataTopicNameForConsumer, GriffinDownloadTask.DOWNLOAD_THREAD_COUNT, properties, dataQueue));

        /* TODO: Change this to a better bitmap (Check out RoaringBitmap) */
        while (availableBlockBitmap.nextClearBit(0) != blockCount) {
            Optional<byte[]> message = Optional.fromNullable(dataQueue
                    .poll(GriffinLeaderSelectionTask.LEADER_SELECTION_PERIOD_MS, TimeUnit.MILLISECONDS));
            if (!message.isPresent()) {
                /* We know how much of the file is available in Kafka */
                break;
            }
            DataMessage dataMessage = DataMessage.parseFrom(message.get());
            availableBlockBitmap.set((int) dataMessage.getBlockSeqNo());
        }
    } catch (Exception e) {
        logger.warn(String.format("Unable to download file %s to get available bitmap ", filename), e);
        /* Work with whatever information we have gathered till now */
    } finally {
        if (consumer.isPresent()) {
            consumer.get().shutdown(true);
        }
    }

    return availableBlockBitmap;
}

From source file:org.apache.hadoop.mapreduce.lib.input.TestMRKeyValueTextInputFormat.java

@Test
public void testSplitableCodecs() throws Exception {
    final Job job = Job.getInstance(defaultConf);
    final Configuration conf = job.getConfiguration();

    // Create the codec
    CompressionCodec codec = null;//from  w  ww . ja  va2  s. c  o  m
    try {
        codec = (CompressionCodec) ReflectionUtils
                .newInstance(conf.getClassByName("org.apache.hadoop.io.compress.BZip2Codec"), conf);
    } catch (ClassNotFoundException cnfe) {
        throw new IOException("Illegal codec!");
    }
    Path file = new Path(workDir, "test" + codec.getDefaultExtension());

    int seed = new Random().nextInt();
    LOG.info("seed = " + seed);
    Random random = new Random(seed);

    localFs.delete(workDir, true);
    FileInputFormat.setInputPaths(job, workDir);

    final int MAX_LENGTH = 500000;
    FileInputFormat.setMaxInputSplitSize(job, MAX_LENGTH / 20);
    // for a variety of lengths
    for (int length = 0; length < MAX_LENGTH; length += random.nextInt(MAX_LENGTH / 4) + 1) {

        LOG.info("creating; entries = " + length);

        // create a file with length entries
        Writer writer = new OutputStreamWriter(codec.createOutputStream(localFs.create(file)));
        try {
            for (int i = 0; i < length; i++) {
                writer.write(Integer.toString(i * 2));
                writer.write("\t");
                writer.write(Integer.toString(i));
                writer.write("\n");
            }
        } finally {
            writer.close();
        }

        // try splitting the file in a variety of sizes
        KeyValueTextInputFormat format = new KeyValueTextInputFormat();
        assertTrue("KVTIF claims not splittable", format.isSplitable(job, file));
        for (int i = 0; i < 3; i++) {
            int numSplits = random.nextInt(MAX_LENGTH / 2000) + 1;
            LOG.info("splitting: requesting = " + numSplits);
            List<InputSplit> splits = format.getSplits(job);
            LOG.info("splitting: got =        " + splits.size());

            // check each split
            BitSet bits = new BitSet(length);
            for (int j = 0; j < splits.size(); j++) {
                LOG.debug("split[" + j + "]= " + splits.get(j));
                TaskAttemptContext context = MapReduceTestUtil
                        .createDummyMapTaskAttemptContext(job.getConfiguration());
                RecordReader<Text, Text> reader = format.createRecordReader(splits.get(j), context);
                Class<?> clazz = reader.getClass();
                MapContext<Text, Text, Text, Text> mcontext = new MapContextImpl<Text, Text, Text, Text>(
                        job.getConfiguration(), context.getTaskAttemptID(), reader, null, null,
                        MapReduceTestUtil.createDummyReporter(), splits.get(j));
                reader.initialize(splits.get(j), mcontext);

                Text key = null;
                Text value = null;
                try {
                    int count = 0;
                    while (reader.nextKeyValue()) {
                        key = reader.getCurrentKey();
                        value = reader.getCurrentValue();
                        final int k = Integer.parseInt(key.toString());
                        final int v = Integer.parseInt(value.toString());
                        assertEquals("Bad key", 0, k % 2);
                        assertEquals("Mismatched key/value", k / 2, v);
                        LOG.debug("read " + k + "," + v);
                        assertFalse(k + "," + v + " in multiple partitions.", bits.get(v));
                        bits.set(v);
                        count++;
                    }
                    if (count > 0) {
                        LOG.info("splits[" + j + "]=" + splits.get(j) + " count=" + count);
                    } else {
                        LOG.debug("splits[" + j + "]=" + splits.get(j) + " count=" + count);
                    }
                } finally {
                    reader.close();
                }
            }
            assertEquals("Some keys in no partition.", length, bits.cardinality());
        }

    }
}

From source file:org.apache.carbondata.core.scan.filter.FilterUtil.java

/**
 * Below method will be used to get all the include filter values in case of range filters when
 * blocklet is encoded with local dictionary
 * @param expression//from w ww . jav  a  2  s.  c  om
 * filter expression
 * @param dictionary
 * dictionary
 * @return include filter bitset
 * @throws FilterUnsupportedException
 */
private static BitSet getIncludeDictFilterValuesForRange(Expression expression, CarbonDictionary dictionary)
        throws FilterUnsupportedException {
    ConditionalExpression conExp = (ConditionalExpression) expression;
    ColumnExpression columnExpression = conExp.getColumnList().get(0);
    BitSet includeFilterBitSet = new BitSet();
    for (int i = 2; i < dictionary.getDictionarySize(); i++) {
        if (null == dictionary.getDictionaryValue(i)) {
            continue;
        }
        try {
            RowIntf row = new RowImpl();
            String stringValue = new String(dictionary.getDictionaryValue(i),
                    Charset.forName(CarbonCommonConstants.DEFAULT_CHARSET));
            row.setValues(new Object[] { DataTypeUtil.getDataBasedOnDataType(stringValue,
                    columnExpression.getCarbonColumn().getDataType()) });
            Boolean rslt = expression.evaluate(row).getBoolean();
            if (null != rslt) {
                if (rslt) {
                    includeFilterBitSet.set(i);
                }
            }
        } catch (FilterIllegalMemberException e) {
            LOGGER.debug(e.getMessage());
        }
    }
    return includeFilterBitSet;
}

From source file:com.joliciel.jochre.graphics.SegmenterImplTest.java

public void testSplitShape(@NonStrict final LetterGuesserService letterGuesserService,
        @NonStrict final SourceImage sourceImage, @NonStrict final Shape shape) throws Exception {
    GraphicsServiceImpl graphicsService = new GraphicsServiceImpl();
    graphicsService.setLetterGuesserService(letterGuesserService);

    final int threshold = 100;
    final int width = 12;
    final int height = 9;
    final int maxBridgeWidth = 2;
    final int minLetterWeight = 12;
    final int maxOverlap = 2;
    final BitSet bitset = new BitSet(width * height);
    final int left = 10;
    final int top = 10;

    new NonStrictExpectations() {
        {/*w  w w  . ja  va 2  s  .  co  m*/
            shape.getHeight();
            returns(height);
            shape.getWidth();
            returns(width);
            shape.getLeft();
            returns(left);
            shape.getTop();
            returns(top);
            shape.getRight();
            returns(left + width - 1);
            shape.getBottom();
            returns(top + height - 1);

            sourceImage.getSeparationThreshold();
            returns(threshold);
            int[] pixels = { 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, // row 0
                    0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, // row 1
                    0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, // row 2
                    0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1, // row 3
                    0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, // row 4
                    0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, // row 5
                    0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, // row 6
                    1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, // row 7
                    0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, // row 8
            };

            for (int x = -1; x <= width; x++)
                for (int y = -1; y <= height; y++) {
                    shape.isPixelBlack(x, y, threshold);
                    if (x >= 0 && x < width && y >= 0 && y < height) {
                        returns(pixels[y * width + x] == 1);
                        if (pixels[y * width + x] == 1) {
                            bitset.set(y * width + x);
                        }
                    } else
                        returns(false);
                }

            shape.getBlackAndWhiteBitSet(threshold, 0);
            returns(bitset);
        }
    };

    SegmenterImpl segmenter = new SegmenterImpl(sourceImage);
    segmenter.setGraphicsService(graphicsService);

    List<Shape> shapes = segmenter.splitShape(shape, sourceImage, maxBridgeWidth, minLetterWeight, maxOverlap);

    for (Shape splitShape : shapes) {
        LOG.debug("Split shape:  " + splitShape);
    }
    assertEquals(2, shapes.size());

    Shape leftShape = shapes.get(0);
    assertEquals(left, leftShape.getLeft());
    assertEquals(left + 5, leftShape.getRight());
    assertEquals(top, leftShape.getTop());
    assertEquals(top + 7, leftShape.getBottom());
    Shape rightShape = shapes.get(1);
    assertEquals(left + 6, rightShape.getLeft());
    assertEquals(top + 11, rightShape.getRight());
    assertEquals(top, rightShape.getTop());
    assertEquals(top + 8, rightShape.getBottom());
}

From source file:org.apache.kylin.cube.model.CubeDesc.java

private void initMeasureReferenceToColumnFamily() {
    if (measures == null || measures.size() == 0)
        return;//from w  w  w. j  a v  a2 s  .  com

    Map<String, MeasureDesc> measureLookup = new HashMap<String, MeasureDesc>();
    for (MeasureDesc m : measures)
        measureLookup.put(m.getName(), m);
    Map<String, Integer> measureIndexLookup = new HashMap<String, Integer>();
    for (int i = 0; i < measures.size(); i++)
        measureIndexLookup.put(measures.get(i).getName(), i);

    BitSet checkEachMeasureExist = new BitSet();
    for (HBaseColumnFamilyDesc cf : getHbaseMapping().getColumnFamily()) {
        for (HBaseColumnDesc c : cf.getColumns()) {
            String[] colMeasureRefs = c.getMeasureRefs();
            MeasureDesc[] measureDescs = new MeasureDesc[colMeasureRefs.length];
            int[] measureIndex = new int[colMeasureRefs.length];
            for (int i = 0; i < colMeasureRefs.length; i++) {
                measureDescs[i] = measureLookup.get(colMeasureRefs[i]);
                checkState(measureDescs[i] != null, "measure desc at (%s) is null", i);
                measureIndex[i] = measureIndexLookup.get(colMeasureRefs[i]);
                checkState(measureIndex[i] >= 0, "measure index at (%s) not positive", i);

                checkEachMeasureExist.set(measureIndex[i]);
            }
            c.setMeasures(measureDescs);
            c.setMeasureIndex(measureIndex);
            c.setColumnFamilyName(cf.getName());
        }
    }

    for (int i = 0; i < measures.size(); i++) {
        checkState(checkEachMeasureExist.get(i),
                "measure (%s) does not exist in column familyor measure duplicates", measures.get(i));
    }
}

From source file:com.oltpbenchmark.benchmarks.seats.SEATSWorker.java

/**
 * Execute the FindOpenSeat procedure/*w  w  w . j  a  v  a2  s.  c  om*/
 * @throws SQLException
 */
private boolean executeFindOpenSeats(FindOpenSeats proc) throws SQLException {
    final FlightId search_flight = this.profile.getRandomFlightId();
    assert (search_flight != null);
    Long airport_depart_id = search_flight.getDepartAirportId();

    if (LOG.isTraceEnabled())
        LOG.trace("Calling " + proc);
    Object[][] results = proc.run(conn, search_flight.encode());
    conn.commit();

    int rowCount = results.length;
    assert (rowCount <= SEATSConstants.FLIGHTS_NUM_SEATS) : String
            .format("Unexpected %d open seats returned for %s", rowCount, search_flight);

    // there is some tiny probability of an empty flight .. maybe 1/(20**150)
    // if you hit this assert (with valid code), play the lottery!
    if (rowCount == 0)
        return (true);

    LinkedList<Reservation> cache = CACHE_RESERVATIONS.get(CacheType.PENDING_INSERTS);
    assert (cache != null) : "Unexpected " + CacheType.PENDING_INSERTS;

    // Store pending reservations in our queue for a later transaction            
    BitSet seats = getSeatsBitSet(search_flight);
    tmp_reservations.clear();

    for (Object row[] : results) {
        if (row == null)
            continue; //  || rng.nextInt(100) < 75) continue; // HACK
        Integer seatnum = (Integer) row[1];

        // We first try to get a CustomerId based at this departure airport
        if (LOG.isTraceEnabled())
            LOG.trace("Looking for a random customer to fly on " + search_flight);
        CustomerId customer_id = profile.getRandomCustomerId(airport_depart_id);

        // We will go for a random one if:
        //  (1) The Customer is already booked on this Flight
        //  (2) We already made a new Reservation just now for this Customer
        int tries = SEATSConstants.FLIGHTS_NUM_SEATS;
        while (tries-- > 0 && (customer_id == null)) { //  || isCustomerBookedOnFlight(customer_id, flight_id))) {
            customer_id = profile.getRandomCustomerId();
            if (LOG.isTraceEnabled())
                LOG.trace("RANDOM CUSTOMER: " + customer_id);
        } // WHILE
        assert (customer_id != null) : String.format(
                "Failed to find a unique Customer to reserve for seat #%d on %s", seatnum, search_flight);

        Reservation r = new Reservation(profile.getNextReservationId(getId()), search_flight, customer_id,
                seatnum.intValue());
        seats.set(seatnum);
        tmp_reservations.add(r);
        if (LOG.isTraceEnabled())
            LOG.trace(
                    "QUEUED INSERT: " + search_flight + " / " + search_flight.encode() + " -> " + customer_id);
    } // WHILE

    if (tmp_reservations.isEmpty() == false) {
        Collections.shuffle(tmp_reservations);
        cache.addAll(tmp_reservations);
        while (cache.size() > SEATSConstants.CACHE_LIMIT_PENDING_INSERTS) {
            cache.remove();
        } // WHILE
        if (LOG.isDebugEnabled())
            LOG.debug(String.format("Stored %d pending inserts for %s [totalPendingInserts=%d]",
                    tmp_reservations.size(), search_flight, cache.size()));
    }
    return (true);
}

From source file:org.jahia.services.search.facets.SimpleJahiaJcrFacets.java

OpenBitSet getPositiveDocSet(Query q, final String locale) throws IOException {
    OpenBitSet answer;//from   w ww.j  a  v a2  s. c o  m

    //      if (filterCache != null) {
    //        answer = filterCache.get(q);
    //        if (answer!=null) return answer;
    //      }
    final BitSet bitset = new BitSet();
    searcher.search(q, new AbstractHitCollector() {
        @Override
        public void collect(int docId, float scorer) {
            if (locale != null) {
                try {
                    int docMainDocId = getMainDocIdForTranslations(
                            searcher.getIndexReader().document(docId, TRANSLATION_FIELDS), locale);
                    if (docMainDocId != -1) {
                        bitset.set(docMainDocId);
                    }
                } catch (Exception e) {
                    logger.warn("Error getting index document while faceting", e);
                }
            }
            bitset.set(docId);
        }

        @Override
        public boolean acceptsDocsOutOfOrder() {
            return true;
        }
    });
    answer = new OpenBitSetDISI(new DocIdBitSet(bitset).iterator(), bitset.size());
    //      answer = getDocSetNC(q,null);
    //      if (filterCache != null) filterCache.put(q,answer);
    return answer;
}

From source file:org.jahia.services.search.facets.SimpleJahiaJcrFacets.java

private OpenBitSet getDocIdSet(Query query, final String locale) {
    OpenBitSet docIds = null;//  w  w  w  .j  a  v a  2  s .co m
    try {
        final BitSet bitset = new BitSet();
        searcher.search(query, new AbstractHitCollector() {
            @Override
            public void collect(int docId, float scorer) {
                if (locale != null) {
                    try {
                        int docMainDocId = getMainDocIdForTranslations(
                                searcher.getIndexReader().document(docId, TRANSLATION_FIELDS), locale);
                        if (docMainDocId != -1) {
                            bitset.set(docMainDocId);
                        }
                    } catch (Exception e) {
                        logger.warn("Error getting index document while faceting", e);
                    }
                }
                bitset.set(docId);
            }

            @Override
            public boolean acceptsDocsOutOfOrder() {
                return true;
            }
        });

        docIds = new OpenBitSetDISI(new DocIdBitSet(bitset).iterator(), bitset.size());
    } catch (IOException e) {
        logger.debug("Can't retrive bitset from hits", e);
    }
    return docIds;
}

From source file:org.apache.hadoop.mapreduce.lib.input.TestMRKeyValueTextInputFormat.java

@Test
public void testFormat() throws Exception {
    Job job = Job.getInstance(new Configuration(defaultConf));
    Path file = new Path(workDir, "test.txt");

    int seed = new Random().nextInt();
    LOG.info("seed = " + seed);
    Random random = new Random(seed);

    localFs.delete(workDir, true);//from  w ww.  j a  v a 2s.  c o m
    FileInputFormat.setInputPaths(job, workDir);

    final int MAX_LENGTH = 10000;
    // for a variety of lengths
    for (int length = 0; length < MAX_LENGTH; length += random.nextInt(MAX_LENGTH / 10) + 1) {

        LOG.debug("creating; entries = " + length);

        // create a file with length entries
        Writer writer = new OutputStreamWriter(localFs.create(file));
        try {
            for (int i = 0; i < length; i++) {
                writer.write(Integer.toString(i * 2));
                writer.write("\t");
                writer.write(Integer.toString(i));
                writer.write("\n");
            }
        } finally {
            writer.close();
        }

        // try splitting the file in a variety of sizes
        KeyValueTextInputFormat format = new KeyValueTextInputFormat();
        for (int i = 0; i < 3; i++) {
            int numSplits = random.nextInt(MAX_LENGTH / 20) + 1;
            LOG.debug("splitting: requesting = " + numSplits);
            List<InputSplit> splits = format.getSplits(job);
            LOG.debug("splitting: got =        " + splits.size());

            // check each split
            BitSet bits = new BitSet(length);
            for (int j = 0; j < splits.size(); j++) {
                LOG.debug("split[" + j + "]= " + splits.get(j));
                TaskAttemptContext context = MapReduceTestUtil
                        .createDummyMapTaskAttemptContext(job.getConfiguration());
                RecordReader<Text, Text> reader = format.createRecordReader(splits.get(j), context);
                Class<?> clazz = reader.getClass();
                assertEquals("reader class is KeyValueLineRecordReader.", KeyValueLineRecordReader.class,
                        clazz);
                MapContext<Text, Text, Text, Text> mcontext = new MapContextImpl<Text, Text, Text, Text>(
                        job.getConfiguration(), context.getTaskAttemptID(), reader, null, null,
                        MapReduceTestUtil.createDummyReporter(), splits.get(j));
                reader.initialize(splits.get(j), mcontext);

                Text key = null;
                Text value = null;
                try {
                    int count = 0;
                    while (reader.nextKeyValue()) {
                        key = reader.getCurrentKey();
                        clazz = key.getClass();
                        assertEquals("Key class is Text.", Text.class, clazz);
                        value = reader.getCurrentValue();
                        clazz = value.getClass();
                        assertEquals("Value class is Text.", Text.class, clazz);
                        final int k = Integer.parseInt(key.toString());
                        final int v = Integer.parseInt(value.toString());
                        assertEquals("Bad key", 0, k % 2);
                        assertEquals("Mismatched key/value", k / 2, v);
                        LOG.debug("read " + v);
                        assertFalse("Key in multiple partitions.", bits.get(v));
                        bits.set(v);
                        count++;
                    }
                    LOG.debug("splits[" + j + "]=" + splits.get(j) + " count=" + count);
                } finally {
                    reader.close();
                }
            }
            assertEquals("Some keys in no partition.", length, bits.cardinality());
        }

    }
}

From source file:juicebox.data.MatrixZoomData.java

/**
 * Dump the O/E or Pearsons matrix to standard out in ascii format.
 *
 * @param df   Density function (expected values)
 * @param type will be "oe", "pearsons", or "expected"
 * @param les  output stream/* ww  w.  j ava  2s .c o m*/
 * @param pw   Text output stream
 * @throws java.io.IOException If fails to write
 */
public void dumpOE(ExpectedValueFunction df, String type, NormalizationType no, LittleEndianOutputStream les,
        PrintWriter pw) throws IOException {
    if (les == null && pw == null) {
        pw = new PrintWriter(System.out);
    }

    if (type.equals("oe")) {
        int nBins;

        if (zoom.getUnit() == HiC.Unit.BP) {
            nBins = chr1.getLength() / zoom.getBinSize() + 1;
        } else {
            nBins = ((DatasetReaderV2) reader).getFragCount(chr1) / zoom.getBinSize() + 1;
        }

        BasicMatrix matrix = new InMemoryMatrix(nBins);
        BitSet bitSet = new BitSet(nBins);

        List<Integer> blockNumbers = reader.getBlockNumbers(this);

        for (int blockNumber : blockNumbers) {
            Block b = null;
            try {
                b = reader.readNormalizedBlock(blockNumber, this, df.getNormalizationType());
                if (b != null) {
                    for (ContactRecord rec : b.getContactRecords()) {
                        int x = rec.getBinX();
                        int y = rec.getBinY();

                        int dist = Math.abs(x - y);
                        double expected = 0;
                        try {
                            expected = df.getExpectedValue(chr1.getIndex(), dist);
                        } catch (Exception e) {
                            e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
                        }
                        double observed = rec.getCounts(); // Observed is already normalized
                        double normCounts = observed / expected;
                        // The apache library doesn't seem to play nice with NaNs
                        if (!Double.isNaN(normCounts)) {
                            matrix.setEntry(x, y, (float) normCounts);
                            if (x != y) {
                                matrix.setEntry(y, x, (float) normCounts);
                            }
                            bitSet.set(x);
                            bitSet.set(y);
                        }
                    }
                }
            } catch (IOException e) {
                e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
            }
        }

        if (les != null)
            les.writeInt(nBins);

        for (int i = 0; i < nBins; i++) {
            for (int j = 0; j < nBins; j++) {
                float output;
                if (!bitSet.get(i) && !bitSet.get(j)) {
                    output = Float.NaN;
                } else
                    output = matrix.getEntry(i, j);
                if (les != null)
                    les.writeFloat(output);
                else
                    pw.print(output + " ");
            }
            if (les == null)
                pw.println();
        }
        if (les == null) {
            pw.println();
            pw.flush();
        }
    } else {
        BasicMatrix pearsons = getPearsons(df);
        if (pearsons != null) {
            int dim = pearsons.getRowDimension();
            for (int i = 0; i < dim; i++) {
                for (int j = 0; j < dim; j++) {
                    float output = pearsons.getEntry(i, j);
                    if (les != null)
                        les.writeFloat(output);
                    else
                        pw.print(output + " ");
                }
                if (les == null)
                    pw.println();
            }
            pw.flush();
        } else {
            log.error("Pearson's not available at zoom " + zoom);
        }
    }
}