Example usage for java.util BitSet cardinality

List of usage examples for java.util BitSet cardinality

Introduction

In this page you can find the example usage for java.util BitSet cardinality.

Prototype

public int cardinality() 

Source Link

Document

Returns the number of bits set to true in this BitSet .

Usage

From source file:org.apache.hadoop.mapred.TestTextInputFormat.java

public void testFormat() throws Exception {
    JobConf job = new JobConf();
    Path file = new Path(workDir, "test.txt");

    // A reporter that does nothing
    Reporter reporter = Reporter.NULL;/*from   www .  j  a  v a 2  s. com*/

    int seed = new Random().nextInt();
    LOG.info("seed = " + seed);
    Random random = new Random(seed);

    localFs.delete(workDir, true);
    FileInputFormat.setInputPaths(job, workDir);

    // for a variety of lengths
    for (int length = 0; length < MAX_LENGTH; length += random.nextInt(MAX_LENGTH / 10) + 1) {

        LOG.debug("creating; entries = " + length);

        // create a file with length entries
        Writer writer = new OutputStreamWriter(localFs.create(file));
        try {
            for (int i = 0; i < length; i++) {
                writer.write(Integer.toString(i));
                writer.write("\n");
            }
        } finally {
            writer.close();
        }

        // try splitting the file in a variety of sizes
        TextInputFormat format = new TextInputFormat();
        format.configure(job);
        LongWritable key = new LongWritable();
        Text value = new Text();
        for (int i = 0; i < 3; i++) {
            int numSplits = random.nextInt(MAX_LENGTH / 20) + 1;
            LOG.debug("splitting: requesting = " + numSplits);
            InputSplit[] splits = format.getSplits(job, numSplits);
            LOG.debug("splitting: got =        " + splits.length);

            if (length == 0) {
                assertEquals("Files of length 0 are not returned from FileInputFormat.getSplits().", 1,
                        splits.length);
                assertEquals("Empty file length == 0", 0, splits[0].getLength());
            }

            // check each split
            BitSet bits = new BitSet(length);
            for (int j = 0; j < splits.length; j++) {
                LOG.debug("split[" + j + "]= " + splits[j]);
                RecordReader<LongWritable, Text> reader = format.getRecordReader(splits[j], job, reporter);
                try {
                    int count = 0;
                    while (reader.next(key, value)) {
                        int v = Integer.parseInt(value.toString());
                        LOG.debug("read " + v);
                        if (bits.get(v)) {
                            LOG.warn("conflict with " + v + " in split " + j + " at position "
                                    + reader.getPos());
                        }
                        assertFalse("Key in multiple partitions.", bits.get(v));
                        bits.set(v);
                        count++;
                    }
                    LOG.debug("splits[" + j + "]=" + splits[j] + " count=" + count);
                } finally {
                    reader.close();
                }
            }
            assertEquals("Some keys in no partition.", length, bits.cardinality());
        }

    }
}

From source file:org.apache.hyracks.control.cc.executor.ActivityClusterPlanner.java

private void assignConnectorPolicy(ActivityCluster ac, Map<ActivityId, ActivityPlan> taskMap) {
    Map<ConnectorDescriptorId, IConnectorPolicy> cPolicyMap = new HashMap<>();
    Set<ActivityId> activities = ac.getActivityMap().keySet();
    BitSet targetBitmap = new BitSet();
    for (ActivityId a1 : activities) {
        Task[] ac1TaskStates = taskMap.get(a1).getTasks();
        int nProducers = ac1TaskStates.length;
        List<IConnectorDescriptor> outputConns = ac.getActivityOutputMap().get(a1);
        if (outputConns == null) {
            continue;
        }//from  ww w. ja  va  2  s .c  om
        for (IConnectorDescriptor c : outputConns) {
            ConnectorDescriptorId cdId = c.getConnectorId();
            ActivityId a2 = ac.getConsumerActivity(cdId);
            Task[] ac2TaskStates = taskMap.get(a2).getTasks();
            int nConsumers = ac2TaskStates.length;

            int[] fanouts = new int[nProducers];
            if (c.allProducersToAllConsumers()) {
                for (int i = 0; i < nProducers; ++i) {
                    fanouts[i] = nConsumers;
                }
            } else {
                for (int i = 0; i < nProducers; ++i) {
                    c.indicateTargetPartitions(nProducers, nConsumers, i, targetBitmap);
                    fanouts[i] = targetBitmap.cardinality();
                }
            }
            IConnectorPolicy cp = assignConnectorPolicy(ac, c, nProducers, nConsumers, fanouts);
            cPolicyMap.put(cdId, cp);
        }
    }
    executor.getJobRun().getConnectorPolicyMap().putAll(cPolicyMap);
}

From source file:edu.uci.ics.hyracks.control.cc.scheduler.ActivityClusterPlanner.java

private void assignConnectorPolicy(ActivityCluster ac, Map<ActivityId, ActivityPlan> taskMap) {
    Map<ConnectorDescriptorId, IConnectorPolicy> cPolicyMap = new HashMap<ConnectorDescriptorId, IConnectorPolicy>();
    Set<ActivityId> activities = ac.getActivityMap().keySet();
    BitSet targetBitmap = new BitSet();
    for (ActivityId a1 : activities) {
        Task[] ac1TaskStates = taskMap.get(a1).getTasks();
        int nProducers = ac1TaskStates.length;
        List<IConnectorDescriptor> outputConns = ac.getActivityOutputMap().get(a1);
        if (outputConns != null) {
            for (IConnectorDescriptor c : outputConns) {
                ConnectorDescriptorId cdId = c.getConnectorId();
                ActivityId a2 = ac.getConsumerActivity(cdId);
                Task[] ac2TaskStates = taskMap.get(a2).getTasks();
                int nConsumers = ac2TaskStates.length;

                int[] fanouts = new int[nProducers];
                if (c.allProducersToAllConsumers()) {
                    for (int i = 0; i < nProducers; ++i) {
                        fanouts[i] = nConsumers;
                    }/*  w  w w.  j  ava2s  .co  m*/
                } else {
                    for (int i = 0; i < nProducers; ++i) {
                        c.indicateTargetPartitions(nProducers, nConsumers, i, targetBitmap);
                        fanouts[i] = targetBitmap.cardinality();
                    }
                }
                IConnectorPolicy cp = assignConnectorPolicy(ac, c, nProducers, nConsumers, fanouts);
                cPolicyMap.put(cdId, cp);
            }
        }
    }
    scheduler.getJobRun().getConnectorPolicyMap().putAll(cPolicyMap);
}

From source file:AnalysisModule.DataAnalysis.java

protected void bitmapAnalyse(List<Scenario> lstScenario) throws Exception {
    BitMap bitMap;/*from  w  w  w. j  a  v  a 2  s  .c  o m*/
    String bmpDir;
    int traffic;
    int numberOfFiles;
    FileInputStream fin = null;
    ObjectInputStream ois = null;

    for (Scenario scenario : lstScenario) {
        for (Topology topology : scenario.lstTopology) {
            for (Instance instance : topology.getLstInstance()) {
                instance.trafficMatrix = new double[topology.getNumberOfSwitches()][topology
                        .getNumberOfSwitches()];
            }
        }
    }

    for (Scenario scenario : lstScenario) {
        for (Topology topology : scenario.lstTopology) {
            System.out.println("Analisando Topologia: " + topology.getIdTopology());
            for (Instance instance : topology.getLstInstance()) {
                System.out.println("           Instancia: " + instance.getId());
                for (int i = 0; i < topology.getNumberOfSwitches(); i++) {
                    for (int j = 0; j < topology.getNumberOfSwitches(); j++) {
                        switch (instance.type) {
                        case BITMAP: {
                            if (j <= i) {
                                break;
                            }
                            TreeMap<Long, BitMap> sourceBitMapTree = new TreeMap<>();
                            TreeMap<Long, BitMap> destinationBitMapTree = new TreeMap<>();

                            bmpDir = getSrcDir(instance, i);
                            numberOfFiles = new File(bmpDir).listFiles().length - 1;
                            int firstBitmap = -2;

                            for (int numberOfBmp = 0; numberOfBmp < numberOfFiles; numberOfBmp++) {

                                fin = new FileInputStream(bmpDir + "BitMap" + numberOfBmp + ".bmp");
                                ois = new ObjectInputStream(fin);
                                bitMap = (BitMap) ois.readObject();

                                if ((bitMap.getStartEpoch() - 21600000000L > scenario.startTime)
                                        && (bitMap.getStartEpoch() - 21600000000L < scenario.endTime)) {
                                    if (firstBitmap == -2) {
                                        firstBitmap = numberOfBmp - 1;
                                    }
                                    sourceBitMapTree.put(bitMap.getStartEpoch() - 21600000000L, bitMap);

                                }

                                ois.close();
                                fin.close();
                            }

                            //Add the first bitmap in the Measurement Interval
                            if (firstBitmap >= 0) {
                                fin = new FileInputStream(bmpDir + "BitMap" + firstBitmap + ".bmp");
                                ois = new ObjectInputStream(fin);
                                bitMap = (BitMap) ois.readObject();

                                sourceBitMapTree.put(bitMap.getStartEpoch() - 21600000000L, bitMap);
                                ois.close();
                                fin.close();

                            }

                            bmpDir = getDestDir(instance, j);
                            numberOfFiles = new File(bmpDir).listFiles().length - 1;
                            firstBitmap = -2;

                            for (int numberOfBmp = 0; numberOfBmp < numberOfFiles; numberOfBmp++) {

                                fin = new FileInputStream(bmpDir + "BitMap" + numberOfBmp + ".bmp");
                                ois = new ObjectInputStream(fin);
                                bitMap = (BitMap) ois.readObject();

                                if ((bitMap.getStartEpoch() - 21600000000L > scenario.startTime)
                                        && (bitMap.getStartEpoch() - 21600000000L < scenario.endTime)) {
                                    if (firstBitmap == -2) {
                                        firstBitmap = numberOfBmp - 1;
                                    }
                                    destinationBitMapTree.put(bitMap.getStartEpoch() - 21600000000L, bitMap);
                                }
                                ois.close();
                                fin.close();
                            }

                            //Add the first bitmap in the Measurement Interval
                            if (firstBitmap >= 0) {
                                fin = new FileInputStream(bmpDir + "BitMap" + firstBitmap + ".bmp");
                                ois = new ObjectInputStream(fin);
                                bitMap = (BitMap) ois.readObject();

                                destinationBitMapTree.put(bitMap.getStartEpoch() - 21600000000L, bitMap);

                                ois.close();
                                fin.close();

                            }

                            //Estimation
                            int k1 = sourceBitMapTree.size();
                            int k2 = destinationBitMapTree.size();

                            Collection sourceEntrySet = sourceBitMapTree.entrySet();
                            Iterator sourceEntries = sourceEntrySet.iterator();

                            for (int q = 0; q < k1; q++) {
                                Map.Entry entrySrc = (Map.Entry) sourceEntries.next();
                                BitMap bmpSrc = (BitMap) entrySrc.getValue();

                                Collection destinationEntrySet = destinationBitMapTree.entrySet();
                                Iterator destinationEntries = destinationEntrySet.iterator();

                                for (int r = 0; r < k2; r++) {
                                    Map.Entry entryDst = (Map.Entry) destinationEntries.next();
                                    BitMap bmpDst = (BitMap) entryDst.getValue();

                                    boolean overlap = bmpSrc.getStartEpoch() <= bmpDst.getEndEpoch()
                                            && bmpSrc.getEndEpoch() >= bmpDst.getStartEpoch();

                                    if (overlap) {
                                        double sourceDTr = instance.getBitMapSize()
                                                * Math.log(((double) instance.getBitMapSize())
                                                        / (instance.getBitMapSize() - bmpSrc.occupancy()));

                                        double destinationDTr = instance.getBitMapSize()
                                                * Math.log(((double) instance.getBitMapSize())
                                                        / (instance.getBitMapSize() - bmpDst.occupancy()));

                                        BitSet orSrcDst = (BitSet) bmpSrc.getBitSet().clone();
                                        //BitSet andSrcDst = (BitSet) bmpSrc.getBitSet().clone();

                                        orSrcDst.or(bmpDst.getBitSet());
                                        //andSrcDst.and(bmpDst.getBitSet());
                                        double orDTr = instance.getBitMapSize()
                                                * Math.log(((double) instance.getBitMapSize())
                                                        / (instance.getBitMapSize() - orSrcDst.cardinality()));
                                        //double andDTr = instance.getBitMapSize() * Math.log(((double) instance.getBitMapSize()) / (instance.getBitMapSize() - andSrcDst.cardinality()));

                                        double estimation = 0D;
                                        if (Double.isFinite(orDTr)) {
                                            estimation = sourceDTr + destinationDTr - orDTr;
                                            //estimation = (bmpSrc.getNumberOfPackets()/sourceDTr) * estimation;
                                            //estimation = andDTr;
                                        }

                                        instance.trafficMatrix[i][j] += estimation;
                                    }
                                }
                            }
                            break;
                        }

                        case COUNTER_ARRAY: {
                            traffic = 0;
                            traffic += instance.networkSwitch.get(i).arrayCounter[i][j];
                            //traffic += instance.networkSwitch.get(i).arrayCounter[j][i];
                            instance.doCalculateMatrixElem(i, j, topology, traffic);
                            break;
                        }

                        case OPT_COUNTER_ARRAY: {
                            traffic = 0;
                            for (Integer node : topology.getPathNodes(i, j)) {
                                if (instance.networkSwitch.get(node).isObserver) {
                                    traffic += instance.networkSwitch.get(node).arrayCounter[i][j];
                                    //traffic += instance.networkSwitch.get(node).arrayCounter[j][i];
                                    break;
                                }
                            }
                            instance.doCalculateMatrixElem(i, j, topology, traffic);
                            break;
                        }
                        }
                    }
                }
            }
        }
    }
}

From source file:org.apache.carbondata.core.scan.filter.FilterUtil.java

/**
 * Below method will be used to get filter executor instance for range filters
 * when local dictonary is present for in blocklet
 * @param rawColumnChunk/*  w  ww.j  ava 2  s  . co  m*/
 * raw column chunk
 * @param exp
 * filter expression
 * @param isNaturalSorted
 * is data was already sorted
 * @return
 */
public static FilterExecuter getFilterExecutorForRangeFilters(DimensionRawColumnChunk rawColumnChunk,
        Expression exp, boolean isNaturalSorted) {
    BitSet includeDictionaryValues;
    try {
        includeDictionaryValues = FilterUtil.getIncludeDictFilterValuesForRange(exp,
                rawColumnChunk.getLocalDictionary());
    } catch (FilterUnsupportedException e) {
        throw new RuntimeException(e);
    }
    boolean isExclude = includeDictionaryValues.cardinality() > 1 && FilterUtil.isExcludeFilterNeedsToApply(
            rawColumnChunk.getLocalDictionary().getDictionaryActualSize(),
            includeDictionaryValues.cardinality());
    byte[][] encodedFilterValues = FilterUtil.getEncodedFilterValuesForRange(includeDictionaryValues,
            rawColumnChunk.getLocalDictionary(), isExclude);
    FilterExecuter filterExecuter;
    if (!isExclude) {
        filterExecuter = new IncludeFilterExecuterImpl(encodedFilterValues, isNaturalSorted);
    } else {
        filterExecuter = new ExcludeFilterExecuterImpl(encodedFilterValues, isNaturalSorted);
    }
    return filterExecuter;
}

From source file:edu.umich.flowfence.service.SandboxManager.java

private void dumpSandboxes() {
    if (localLOGV) {
        BitSet seenSandboxes = new BitSet(SANDBOX_COUNT);
        Log.v(TAG, ">>> Dumping current sandbox state:");
        Log.v(TAG, "Running: " + mRunningSandboxes.size() + " sandboxes");
        for (Sandbox sb : mRunningSandboxes) {
            dumpSandbox(sb, seenSandboxes);
        }/*from w w  w .  j  ava2 s  . c o  m*/
        Log.v(TAG, "Idle: " + mIdleSandboxes.size() + " sandboxes (LRU order)");
        for (Sandbox sb : mIdleSandboxes.keySet()) {
            dumpSandbox(sb, seenSandboxes);
        }
        Log.v(TAG, "Stopped: " + mStoppedSandboxes.size() + " sandboxes");
        for (Sandbox sb : mStoppedSandboxes) {
            dumpSandbox(sb, seenSandboxes);
        }
        Log.v(TAG, "Hot spares: " + mHotSpares.size() + " sandboxes");
        for (Sandbox sb : mHotSpares) {
            dumpSandbox(sb, seenSandboxes);
        }
        seenSandboxes.flip(0, SANDBOX_COUNT); // true = unseen
        if (!seenSandboxes.isEmpty()) {
            Log.w(TAG, "WARNING: leaked " + seenSandboxes.cardinality() + " sandboxes");
            int leaked = -1;
            while ((leaked = seenSandboxes.nextSetBit(leaked + 1)) >= 0) {
                dumpSandbox(Sandbox.get(leaked), null);
            }
        } else {
            Log.v(TAG, "No leaks detected");
        }
        Log.v(TAG, "<<< End of state dump");
    }
}

From source file:org.springframework.kafka.listener.ConcurrentMessageListenerContainerTests.java

@Test
public void testManualCommitSyncExisting() throws Exception {
    this.logger.info("Start MANUAL_IMMEDIATE with Existing");
    Map<String, Object> senderProps = KafkaTestUtils.producerProps(embeddedKafka);
    ProducerFactory<Integer, String> pf = new DefaultKafkaProducerFactory<Integer, String>(senderProps);
    KafkaTemplate<Integer, String> template = new KafkaTemplate<>(pf);
    template.setDefaultTopic(topic8);// w ww .j av a 2 s . co  m
    template.sendDefault(0, "foo");
    template.sendDefault(2, "bar");
    template.sendDefault(0, "baz");
    template.sendDefault(2, "qux");
    template.flush();
    Map<String, Object> props = KafkaTestUtils.consumerProps("testManualExistingSync", "false", embeddedKafka);
    props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
    DefaultKafkaConsumerFactory<Integer, String> cf = new DefaultKafkaConsumerFactory<Integer, String>(props);
    ContainerProperties containerProps = new ContainerProperties(topic8);
    containerProps.setSyncCommits(true);
    final CountDownLatch latch = new CountDownLatch(8);
    final BitSet bitSet = new BitSet(8);
    containerProps.setMessageListener((AcknowledgingMessageListener<Integer, String>) (message, ack) -> {
        ConcurrentMessageListenerContainerTests.this.logger.info("manualExisting: " + message);
        ack.acknowledge();
        bitSet.set((int) (message.partition() * 4 + message.offset()));
        latch.countDown();
    });
    containerProps.setAckMode(AckMode.MANUAL_IMMEDIATE);

    ConcurrentMessageListenerContainer<Integer, String> container = new ConcurrentMessageListenerContainer<>(cf,
            containerProps);
    container.setConcurrency(1);
    container.setBeanName("testManualExisting");
    container.start();
    ContainerTestUtils.waitForAssignment(container, embeddedKafka.getPartitionsPerTopic());
    template.sendDefault(0, "fooo");
    template.sendDefault(2, "barr");
    template.sendDefault(0, "bazz");
    template.sendDefault(2, "quxx");
    template.flush();
    assertThat(latch.await(60, TimeUnit.SECONDS)).isTrue();
    assertThat(bitSet.cardinality()).isEqualTo(8);
    container.stop();
    this.logger.info("Stop MANUAL_IMMEDIATE with Existing");
}

From source file:org.apache.hadoop.mapred.split.TestGroupedSplits.java

@Test(timeout = 10000)
public void testFormat() throws Exception {
    JobConf job = new JobConf(defaultConf);

    Random random = new Random();
    long seed = random.nextLong();
    LOG.info("seed = " + seed);
    random.setSeed(seed);/*  w w w. jav a 2s .  com*/

    localFs.delete(workDir, true);
    FileInputFormat.setInputPaths(job, workDir);

    final int length = 10000;
    final int numFiles = 10;

    createFiles(length, numFiles, random);

    // create a combined split for the files
    TextInputFormat wrappedFormat = new TextInputFormat();
    wrappedFormat.configure(job);
    TezGroupedSplitsInputFormat<LongWritable, Text> format = new TezGroupedSplitsInputFormat<LongWritable, Text>();
    format.setConf(job);
    format.setDesiredNumberOfSplits(1);
    format.setInputFormat(wrappedFormat);
    LongWritable key = new LongWritable();
    Text value = new Text();
    for (int i = 0; i < 3; i++) {
        int numSplits = random.nextInt(length / 20) + 1;
        LOG.info("splitting: requesting = " + numSplits);
        InputSplit[] splits = format.getSplits(job, numSplits);
        LOG.info("splitting: got =        " + splits.length);

        // we should have a single split as the length is comfortably smaller than
        // the block size
        assertEquals("We got more than one splits!", 1, splits.length);
        InputSplit split = splits[0];
        assertEquals("It should be TezGroupedSplit", TezGroupedSplit.class, split.getClass());

        // check the split
        BitSet bits = new BitSet(length);
        LOG.debug("split= " + split);
        RecordReader<LongWritable, Text> reader = format.getRecordReader(split, job, voidReporter);
        try {
            int count = 0;
            while (reader.next(key, value)) {
                int v = Integer.parseInt(value.toString());
                LOG.debug("read " + v);
                if (bits.get(v)) {
                    LOG.warn("conflict with " + v + " at position " + reader.getPos());
                }
                assertFalse("Key in multiple partitions.", bits.get(v));
                bits.set(v);
                count++;
            }
            LOG.info("splits=" + split + " count=" + count);
        } finally {
            reader.close();
        }
        assertEquals("Some keys in no partition.", length, bits.cardinality());
    }
}

From source file:org.apache.hadoop.mapreduce.lib.input.TestCombineTextInputFormat.java

@Test(timeout = 10000)
public void testFormat() throws Exception {
    Job job = Job.getInstance(new Configuration(defaultConf));

    Random random = new Random();
    long seed = random.nextLong();
    LOG.info("seed = " + seed);
    random.setSeed(seed);/*from   w ww.  jav a2  s  . co m*/

    localFs.delete(workDir, true);
    FileInputFormat.setInputPaths(job, workDir);

    final int length = 10000;
    final int numFiles = 10;

    // create files with various lengths
    createFiles(length, numFiles, random);

    // create a combined split for the files
    CombineTextInputFormat format = new CombineTextInputFormat();
    for (int i = 0; i < 3; i++) {
        int numSplits = random.nextInt(length / 20) + 1;
        LOG.info("splitting: requesting = " + numSplits);
        List<InputSplit> splits = format.getSplits(job);
        LOG.info("splitting: got =        " + splits.size());

        // we should have a single split as the length is comfortably smaller than
        // the block size
        assertEquals("We got more than one splits!", 1, splits.size());
        InputSplit split = splits.get(0);
        assertEquals("It should be CombineFileSplit", CombineFileSplit.class, split.getClass());

        // check the split
        BitSet bits = new BitSet(length);
        LOG.debug("split= " + split);
        TaskAttemptContext context = MapReduceTestUtil.createDummyMapTaskAttemptContext(job.getConfiguration());
        RecordReader<LongWritable, Text> reader = format.createRecordReader(split, context);
        assertEquals("reader class is CombineFileRecordReader.", CombineFileRecordReader.class,
                reader.getClass());
        MapContext<LongWritable, Text, LongWritable, Text> mcontext = new MapContextImpl<LongWritable, Text, LongWritable, Text>(
                job.getConfiguration(), context.getTaskAttemptID(), reader, null, null,
                MapReduceTestUtil.createDummyReporter(), split);
        reader.initialize(split, mcontext);

        try {
            int count = 0;
            while (reader.nextKeyValue()) {
                LongWritable key = reader.getCurrentKey();
                assertNotNull("Key should not be null.", key);
                Text value = reader.getCurrentValue();
                final int v = Integer.parseInt(value.toString());
                LOG.debug("read " + v);
                assertFalse("Key in multiple partitions.", bits.get(v));
                bits.set(v);
                count++;
            }
            LOG.debug("split=" + split + " count=" + count);
        } finally {
            reader.close();
        }
        assertEquals("Some keys in no partition.", length, bits.cardinality());
    }
}

From source file:org.caleydo.core.util.impute.KNNImpute.java

/**
 * split the neighbor hood in two groups based on 2 k-means
 *
 * @param neighborhood/*from w w w. ja v  a 2s.c  om*/
 * @return
 */
private Pair<List<Gene>, List<Gene>> twoMeanClusterSplit(List<Gene> neighborhood) {
    final int n = neighborhood.size();

    final int maxit = desc.getMaxit();
    final double eps = desc.getEps();

    int a_start = r.nextInt(n);
    int b_start = r.nextInt(n);
    Gene a_center = new Gene(1, -1, Arrays.copyOf(neighborhood.get(a_start).data, samples));
    Gene b_center = new Gene(1, -1, Arrays.copyOf(neighborhood.get(b_start).data, samples));
    float[] a_center_pong = new float[samples];
    Arrays.fill(a_center_pong, Float.NaN);
    float[] b_center_pong = new float[samples];
    Arrays.fill(b_center_pong, Float.NaN);

    float[] tmp;
    BitSet partOf_a = new BitSet(n);

    double d_old = 0;
    for (int i = 0; i < maxit; ++i) {
        int j = 0;
        int changed = 0;
        double d_new = 0;
        for (Gene gene : neighborhood) {
            final double a_distance = distance(a_center, gene);
            final double b_distance = distance(b_center, gene);
            final boolean in_a = a_distance < b_distance;
            if (partOf_a.get(j) != in_a) {
                changed++;
                partOf_a.set(j, in_a);
            }
            d_new += in_a ? a_distance : b_distance;
            tmp = in_a ? a_center_pong : b_center_pong;
            // shift new center
            for (int k = 0; k < samples; ++k) {
                if (!gene.isNaN(k)) {
                    if (Float.isNaN(tmp[k]))
                        tmp[k] = gene.get(k);
                    else
                        tmp[k] += gene.get(k);
                }
            }
            j++;
        }
        if (changed == 0 || d_new == 0)
            break;
        final double ratio = Math.abs(d_new - d_old) / d_old;
        if (i > 0 && ratio < eps)
            break;
        d_old = d_new;
        int a_n = partOf_a.cardinality();
        int b_n = n - a_n;
        if (a_n == 0 || b_n == 0) {
            // FIXME
        }
        updateCenter(a_center, a_center_pong, a_n);
        updateCenter(b_center, b_center_pong, b_n);
    }

    return split(neighborhood, partOf_a);
}