Example usage for java.util BitSet BitSet

List of usage examples for java.util BitSet BitSet

Introduction

In this page you can find the example usage for java.util BitSet BitSet.

Prototype

public BitSet() 

Source Link

Document

Creates a new bit set.

Usage

From source file:dendroscope.autumn.hybridnumber.ComputeHybridNumber.java

/**
 * run the algorithm. This can be reentered by rerootings of the same two trees
 *
 * @param tree1/*from w  w  w  . ja  v  a2  s  . c  o  m*/
 * @param tree2
 * @return reduced trees
 */
int run(PhyloTree tree1, PhyloTree tree2, Taxa allTaxa) throws IOException, CanceledException {
    if (!initialized) {
        initialized = true;
        progressListener.setMaximum(20);
        progressListener.setProgress(0);
        startTime = System.currentTimeMillis();
        nextTime = this.startTime + waitTime;
    }

    if (bestScore.get() == LARGE) { // no upper bound given, use cluster network
        System.err.print("Computing upper bound using cluster network: ");
        int upperBound = Utilities.getNumberOfReticulationsInClusterNetwork(tree1, tree2, progressListener);
        System.err.println(upperBound);
        bestScore.set(upperBound);
    }

    Pair<Root, Root> roots = PreProcess.apply(tree1, tree2, allTaxa);
    Root root1 = roots.getFirst();
    Root root2 = roots.getSecond();

    BitSet onlyTree1 = Cluster.setminus(root1.getTaxa(), root2.getTaxa());
    BitSet onlyTree2 = Cluster.setminus(root2.getTaxa(), root1.getTaxa());

    if (root1.getTaxa().cardinality() == onlyTree1.cardinality())
        throw new IOException("None of the taxa in tree2 are contained in tree1");
    if (root2.getTaxa().cardinality() == onlyTree2.cardinality())
        throw new IOException("None of the taxa in tree1 are contained in tree2");

    if (onlyTree1.cardinality() > 0) {
        if (!silent)
            System.err.println("Killing all taxa only present in tree1: " + onlyTree1.cardinality());
        for (int t = onlyTree1.nextSetBit(0); t != -1; t = onlyTree1.nextSetBit(t + 1)) {
            BitSet one = new BitSet();
            one.set(t);
            root1 = CopyWithTaxaRemoved.apply(root1, one);
        }
    }

    if (onlyTree2.cardinality() > 0) {
        if (!silent)
            System.err.println("Killing all taxa only present in tree2: " + onlyTree2.cardinality());
        for (int t = onlyTree2.nextSetBit(0); t != -1; t = onlyTree2.nextSetBit(t + 1)) {
            BitSet one = new BitSet();
            one.set(t);
            root2 = CopyWithTaxaRemoved.apply(root2, one);
        }
    }

    if (!root1.getTaxa().equals(root2.getTaxa()))
        throw new IOException("Trees have unequal taxon sets (even after killing)");

    // run the refine algorithm
    if (!silent)
        System.err.println("Computing common refinement of both trees");
    Refine.apply(root1, root2);

    if (true) {
        System.err.println(root1.toStringTree());
        System.err.println(root2.toStringTree());
    }

    if (tree1.getRoot() == null || tree2.getRoot() == null) {
        throw new IOException("Can't compute hybrid number, at least one of the trees is empty or unrooted");
    }

    // we maintain both trees in lexicographic order for ease of comparison
    root1.reorderSubTree();
    root2.reorderSubTree();

    if (!silent)
        System.err.println("Computing hybridization number using Autumn algorithm...");
    if (!silent)
        System.err.println("(Number of worker threads: " + (additionalThreads + 1) + ")");

    int result = computeHybridNumberRec(root1, root2, false, null, null, true, 0, new ValuesList());
    if (!silent)
        System.err.println("(Result: " + result + ")");
    if (!silent)
        System.err.println("Hybridization number: " + bestScore.get());
    if (bestScore.get() > result)
        throw new IOException("bestScore > result: " + bestScore.get() + " " + result);

    return bestScore.get();
}

From source file:hivemall.ftvec.ranking.ItemPairsSamplingUDTF.java

@Override
public void process(Object[] args) throws HiveException {
    final int numPosItems;
    final BitSet bits;
    if (bitsetInput) {
        if (_rand == null) {
            this._rand = new Random(43);
        }/*  w  w w . ja v  a2  s.  com*/
        long[] longs = HiveUtils.asLongArray(args[0], listOI, listElemOI);
        bits = BitSet.valueOf(longs);
        numPosItems = bits.cardinality();
    } else {
        if (_bitset == null) {
            bits = new BitSet();
            this._bitset = bits;
            this._rand = new Random(43);
        } else {
            bits = _bitset;
            bits.clear();
        }
        numPosItems = HiveUtils.setBits(args[0], listOI, listElemOI, bits);
    }

    if (numPosItems == 0) {
        return;
    }
    final int numNegItems = maxItemId + 1 - numPosItems;
    if (numNegItems == 0) {
        return;
    } else if (numNegItems < 0) {
        throw new UDFArgumentException(
                "maxItemId + 1 - numPosItems = " + maxItemId + " + 1 - " + numPosItems + " = " + numNegItems);
    }

    if (withReplacement) {
        sampleWithReplacement(numPosItems, numNegItems, bits);
    } else {
        sampleWithoutReplacement(numPosItems, numNegItems, bits);
    }
}

From source file:org.apache.tez.runtime.library.output.UnorderedKVOutput.java

@Override
public synchronized List<Event> close() throws Exception {
    boolean outputGenerated = this.kvWriter.close();

    DataMovementEventPayloadProto.Builder payloadBuilder = DataMovementEventPayloadProto.newBuilder();

    LOG.info("Closing KVOutput: RawLength: " + this.kvWriter.getRawLength() + ", CompressedLength: "
            + this.kvWriter.getCompressedLength());

    if (dataViaEventsEnabled && outputGenerated
            && this.kvWriter.getCompressedLength() <= dataViaEventsMaxSize) {
        LOG.info("Serialzing actual data into DataMovementEvent, dataSize: "
                + this.kvWriter.getCompressedLength());
        byte[] data = this.kvWriter.getData();
        DataProto.Builder dataProtoBuilder = DataProto.newBuilder();
        dataProtoBuilder.setData(ByteString.copyFrom(data));
        dataProtoBuilder.setRawLength((int) this.kvWriter.getRawLength());
        dataProtoBuilder.setCompressedLength((int) this.kvWriter.getCompressedLength());
        payloadBuilder.setData(dataProtoBuilder.build());
    }//from   w ww  .  j  av  a 2s . c om

    // Set the list of empty partitions - single partition on this case.
    if (!outputGenerated) {
        LOG.info("No output was generated");
        BitSet emptyPartitions = new BitSet();
        emptyPartitions.set(0);
        ByteString emptyPartitionsBytesString = TezCommonUtils
                .compressByteArrayToByteString(TezUtilsInternal.toByteArray(emptyPartitions));
        payloadBuilder.setEmptyPartitions(emptyPartitionsBytesString);
    }
    if (outputGenerated) {
        String host = getHost();
        ByteBuffer shuffleMetadata = getContext()
                .getServiceProviderMetaData(ShuffleUtils.SHUFFLE_HANDLER_SERVICE_ID);
        int shufflePort = ShuffleUtils.deserializeShuffleProviderMetaData(shuffleMetadata);
        payloadBuilder.setHost(host);
        payloadBuilder.setPort(shufflePort);
        payloadBuilder.setPathComponent(getContext().getUniqueIdentifier());
    }
    DataMovementEventPayloadProto payloadProto = payloadBuilder.build();

    DataMovementEvent dmEvent = DataMovementEvent.create(0, payloadProto.toByteString().asReadOnlyByteBuffer());
    List<Event> events = Lists.newArrayListWithCapacity(1);
    events.add(dmEvent);
    return events;
}

From source file:mastodon.algorithms.SALinearAlgorithm.java

protected void tryPruning() {
    //choose the number of species in list to perturb based on a Poisson distributions with rate equal to variable "mean" above
    int numberToSet = 0;
    int numberToClear = 0;

    while (numberToSet < 1 || numberToSet > currPrunedSpeciesCount) {
        numberToSet = pd.sample() + 1;/*from w ww  . j a va 2  s .  co m*/
    }

    if (numberToSet > (bts.getTaxaCount() - currPrunedSpeciesCount)) {
        numberToSet = bts.getTaxaCount() - currPrunedSpeciesCount;
    }

    //if we are pruning by one more species now, clear one species less from the pruning list this time
    if (currPruning.cardinality() < currPrunedSpeciesCount) {
        numberToClear = numberToSet - 1;
    } else {
        numberToClear = numberToSet;
    }

    BitSet bitsToSet = new BitSet();
    BitSet bitsToClear = new BitSet();

    for (int e = 0; e < numberToSet; e++) {
        int choice = 0;
        while (true) {
            choice = (int) (Random.nextDouble() * bts.getTaxaCount());
            if (!currPruning.get(choice) && !bitsToSet.get(choice)) {
                break;
            }
        }
        bitsToSet.set(choice);
    }

    for (int e = 0; e < numberToClear; e++) {
        int choice = 0;
        while (true) {
            choice = (int) (Random.nextDouble() * bts.getTaxaCount());
            if (currPruning.get(choice) && !bitsToClear.get(choice)) {
                break;
            }
        }
        bitsToClear.set(choice);
    }

    currPruning.or(bitsToSet);
    currPruning.xor(bitsToClear);

    currScore = bts.pruneFast(currPruning);
    bts.unPrune();
}

From source file:org.apache.tez.dag.utils.TaskSpecificLaunchCmdOption.java

/**
 * Get the set of tasks that need additional launch command options within a vertex
 *
 * @param tasksInVertex/*from w  w w.  j a v a2s  .c o m*/
 * @return Set<Integer> containing the task indexes to be profiled
 */
private BitSet parseTasks(String tasksInVertex) {
    BitSet taskSet = new BitSet();
    if (Strings.isNullOrEmpty(tasksInVertex)) {
        return taskSet;
    }
    Iterable<String> tasks = Splitter.on(",").omitEmptyStrings().trimResults().split(tasksInVertex);
    for (String task : tasks) {
        /**
         * TODO: this is horrible way to check the ranges.
         * Should use RangeSet when guava is upgraded.  Also, need to support partial
         * ranges like "1:", ":50".  With current implementation partial ranges are not
         * allowed.
         */
        if (task.endsWith(":") || task.startsWith(":")) {
            //invalid range. e.g :20, 6: are not supported.
            LOG.warn("Partial range is considered as an invalid option");
            return null;
        }
        Matcher taskMatcher = RANGE_REGEX.matcher(task);
        if (taskMatcher.find()) {
            int start = Integer.parseInt((taskMatcher.group(1).trim()));
            int end = Integer.parseInt((taskMatcher.group(2).trim()));
            for (int i = Math.min(start, end); i <= Math.max(start, end); i++) {
                taskSet.set(i);
            }
        } else {
            taskSet.set(Integer.parseInt(task.trim()));
        }
    }
    return taskSet;
}

From source file:mastodon.algorithms.MHLinearAlgorithm.java

protected void tryPruning() {
    //choose the number of species in list to perturb based on a Poisson distributions with rate equal to variable "mean" above
    int numberToSet = 0;
    int numberToClear = 0;

    while (numberToSet < 1 || numberToSet > currPrunedSpeciesCount) {
        numberToSet = pd.sample() + 1;/* w  w w .j ava2s . c o m*/
    }

    if (numberToSet > (bts.getTaxaCount() - currPrunedSpeciesCount)) {
        numberToSet = bts.getTaxaCount() - currPrunedSpeciesCount;
    }

    //if we are pruning by one more species now, clear one species less from the pruning list this time
    if (currPruning.cardinality() < currPrunedSpeciesCount) {
        numberToClear = numberToSet - 1;
    } else {
        numberToClear = numberToSet;
    }

    BitSet bitsToSet = new BitSet();
    BitSet bitsToClear = new BitSet();

    for (int e = 0; e < numberToSet; e++) {
        int choice = 0;
        while (true) {
            choice = (int) (Random.nextDouble() * bts.getTaxaCount());
            if (!currPruning.get(choice) && !bitsToSet.get(choice)) {
                break;
            }
        }
        bitsToSet.set(choice);
    }

    for (int e = 0; e < numberToClear; e++) {
        int choice = 0;
        while (true) {
            choice = (int) (Random.nextDouble() * bts.getTaxaCount());
            if (currPruning.get(choice) && !bitsToClear.get(choice)) {
                break;
            }
        }
        bitsToClear.set(choice);
    }

    currPruning.or(bitsToSet);
    currPruning.xor(bitsToClear);

    currScore = bts.pruneFast(currPruning);
    bts.unPrune();

}

From source file:com.joliciel.jochre.graphics.VectorizerImplTest.java

@Test
public void testArrayListize(@NonStrict final Shape shape, @NonStrict final JochreImage image) {
    final int threshold = 100;
    final int whiteGapFillFactor = 5;

    new NonStrictExpectations() {
        {//from ww w  . j ava 2s. c o m
            shape.getHeight();
            returns(8);
            shape.getWidth();
            returns(8);
            shape.getJochreImage();
            returns(image);
            image.getBlackThreshold();
            returns(threshold);

            int[] pixels = { 0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1,
                    0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1,
                    1, 1, 0, 0, 0 };

            for (int x = -1; x <= 8; x++)
                for (int y = -1; y <= 8; y++) {
                    shape.isPixelBlack(x, y, threshold, whiteGapFillFactor);
                    if (x >= 0 && x < 8 && y >= 0 && y < 8)
                        returns(pixels[y * 8 + x] == 1);
                    else
                        returns(false);
                }

            int[] outlinePixels = { 0, 1, 1, 0, 0, 1, 1, 1, // row 0
                    0, 1, 0, 1, 0, 1, 0, 1, // row 1
                    0, 0, 1, 1, 0, 0, 1, 1, // row 2
                    0, 0, 1, 1, 0, 1, 1, 0, // row 3
                    0, 0, 0, 1, 1, 0, 1, 0, // row 4
                    0, 0, 0, 1, 0, 1, 0, 0, // row 5
                    0, 0, 1, 0, 1, 0, 0, 0, // row 6
                    1, 1, 1, 1, 1, 0, 0, 0, // row 7
            };

            BitSet outline = new BitSet();
            for (int i = 0; i < 8 * 8; i++)
                outline.set(i, outlinePixels[i] == 1);

            shape.getOutline(threshold);
            returns(outline);

        }
    };

    VectorizerImpl vectorizer = new VectorizerImpl();
    GraphicsServiceInternal graphicsService = new GraphicsServiceImpl();
    vectorizer.setGraphicsService(graphicsService);
    vectorizer.setWhiteGapFillFactor(whiteGapFillFactor);
    List<LineSegment> lines = vectorizer.vectorize(shape);
    int i = 0;
    for (LineSegment lineSegment : lines) {
        double slope = (double) (lineSegment.getEndY() - lineSegment.getStartY())
                / (double) (lineSegment.getEndX() - lineSegment.getStartX());
        LOG.debug("Line " + i++ + "(" + lineSegment.getStartX() + "," + lineSegment.getStartY() + ") " + "("
                + lineSegment.getEndX() + "," + lineSegment.getEndY() + "). Length = " + lineSegment.getLength()
                + ", Slope = " + slope);
    }

}

From source file:org.rdfhdt.hdt.impl.BufferedCompressedStreaming.java

public BufferedCompressedStreaming(OutputStream out, CSSpecification spec, PrefixDictionary prefixes,
        Map<String, Boolean> predicateDiscrete) {

    // Import prefixes
    this.prefixes = prefixes;

    if (this.prefixes.size() > 0)
        usePrefixes = true;/*from   w w  w .j  a v a2 s  . co m*/

    // Import predicateDiscrete
    this.predicateDiscrete = predicateDiscrete;

    // load max size dictionary
    String specMaxSizeDictionary = spec.get(CSVocabulary.SPEC_MAX_SIZE_DICTIONARY);
    max_size_dictionary = CSVocabulary.DEFAULT_MAX_SIZE_DICTIONARY;
    if (specMaxSizeDictionary != null && !"".equals(specMaxSizeDictionary)) {
        try {
            max_size_dictionary = Integer.parseInt(specMaxSizeDictionary);
        } catch (NumberFormatException e) {
            max_size_dictionary = CSVocabulary.DEFAULT_MAX_SIZE_DICTIONARY;
        }
    }
    // load block size if present
    String specBlockSize = spec.get(CSVocabulary.SPEC_BLOCK_SIZE);
    block_size = CSVocabulary.DEFAULT_BLOCK_SIZE;
    if (specBlockSize != null && !"".equals(specBlockSize)) {
        try {
            block_size = Integer.parseInt(specBlockSize);
        } catch (NumberFormatException e) {
            block_size = CSVocabulary.DEFAULT_BLOCK_SIZE;
        }
    }

    // load if the dictionary of subject must be stored
    String specStoreSubjectDictionary = spec.get(CSVocabulary.STORE_SUBJ_DICTIONARY);

    store_subj_dictionary = CSVocabulary.DEFAULT_STORE_SUBJ_DICTIONARY;
    if (specStoreSubjectDictionary != null && !"".equals(specStoreSubjectDictionary)) {

        store_subj_dictionary = Boolean.parseBoolean(specStoreSubjectDictionary);

    }

    // load if the predicates are not related with the same types and/or tags
    String specDisableLiteralTags = spec.get(CSVocabulary.DISABLE_CONSISTENT_PREDICATES);

    disable_consistent_predicates = CSVocabulary.DEFAULT_CONSISTENT_PREDICATES;
    if (specDisableLiteralTags != null && !"".equals(specDisableLiteralTags)) {

        disable_consistent_predicates = Boolean.parseBoolean(specDisableLiteralTags);

    }

    offset_tag = 1; // auxiliary offset by default for the last quote "
    if (disable_consistent_predicates)
        offset_tag = 0;

    if (store_subj_dictionary)
        subjects = new LRUDictionary<String, Integer>(max_size_dictionary);

    predicates = new HashMap<String, Integer>();
    // Consider ConcurrentHashMap<String, Integer>(); for parallel creation
    literalTagsByPredicate = new ArrayList<String>();
    sizeofTags = new ArrayList<Integer>();
    predicateLiterals = new BitSet();
    objects = new ArrayList<LRUDictionary<String, Integer>>(max_size_dictionary);
    structures = new LRUDictionary<String, Integer>(max_size_dictionary);

    currentBlock = new Block(store_subj_dictionary);

    subjectsToProcess = new ConcurrentHashMap<String, ArrayList<TripleString>>();

    this.out = out;
}

From source file:org.lockss.servlet.AddContentTab.java

/**
 * Handle a request/*from   www.java2s.  co m*/
 *
 * @throws java.io.IOException
 */
public void lockssHandleRequest() throws IOException {
    if (!StringUtil.isNullString(req.getParameter("isDaemonReady"))) {
        if (pluginMgr.areAusStarted()) {
            resp.setStatus(200);
            PrintWriter wrtr = resp.getWriter();
            resp.setContentType("text/plain");
            wrtr.println("true");
        } else {
            PrintWriter wrtr = resp.getWriter();
            resp.setContentType("text/plain");
            wrtr.println("false");
            resp.sendError(202, "Not ready");
        }
        return;
    }

    if (StringUtil.isNullString(action)) {
        try {
            getMultiPartRequest();
            if (multiReq != null) {
                action = multiReq.getString(ACTION_TAG);
            }
        } catch (FormDataTooLongException e) {
            errMsg = "Uploaded file too large: " + e.getMessage();
            // leave action null, will call displayAuSummary() below
        }
    }

    outputFmt = OUTPUT_HTML; // default output is html

    String outputParam = req.getParameter("output");
    if (!StringUtil.isNullString(outputParam)) {
        if ("html".equalsIgnoreCase(outputParam)) {
            outputFmt = OUTPUT_HTML;
        } else {
            log.warning("Unknown output format: " + outputParam);
        }
    }
    String optionsParam = req.getParameter("options");

    tableOptions = new BitSet();

    if (isDebugUser()) {
        log.debug2("Debug user.  Setting OPTION_DEBUG_USER");
        tableOptions.set(StatusTable.OPTION_DEBUG_USER);
    }

    for (String s : StringUtil.breakAt(optionsParam, ',')) {
        if ("norows".equalsIgnoreCase(s)) {
            tableOptions.set(StatusTable.OPTION_NO_ROWS);
        }
    }

    tableName = req.getParameter("table");
    tableKey = req.getParameter("key");
    if (StringUtil.isNullString(tableName)) {
        tableName = "AuOverview";
    }
    if (StringUtil.isNullString(tableKey)) {
        tableKey = null;
    }
    sortKey = req.getParameter("sort");
    if (StringUtil.isNullString(sortKey)) {
        sortKey = null;
    }
    groupKey = req.getParameter("group");
    if (StringUtil.isNullString(groupKey)) {
        groupKey = "publisher";
    }
    type = req.getParameter("type");
    if (StringUtil.isNullString(type)) {
        type = "";
    }
    typeKey = req.getParameter("type");
    String auStartString = req.getParameter("start");
    if (auStartString == null) {
        auStart = 'a';
    } else {
        auStart = auStartString.charAt(0);
    }
    String auEndString = req.getParameter("end");
    if (auEndString == null) {
        auEnd = 'a';
    } else {
        auEnd = auEndString.charAt(0);
    }
    filterKey = req.getParameter("filter");
    if (StringUtil.isNullString(filterKey)) {
        filterKey = "";
    }
    timeKey = req.getParameter("timeKey");
    if (StringUtil.isNullString(timeKey)) {
        timeKey = "";
    }
    switch (outputFmt) {
    case OUTPUT_HTML:
        doHtmlStatusTable();
        break;
    }
}

From source file:org.rdfhdt.hdt.impl.BufferedCompressedStreamingNoDictionary.java

public BufferedCompressedStreamingNoDictionary(OutputStream out, CSSpecification spec,
        PrefixDictionary prefixes, Map<String, Boolean> predicateDiscrete) {

    /* TEST *//*from  w  ww  .j a  v  a 2s . c  om*/
    // predicateNoDictObjects.put("http://www.w3.org/2000/01/rdf-schema#label",true);

    // Import prefixes
    this.prefixes = prefixes;

    if (this.prefixes.size() > 0)
        usePrefixes = true;

    // Import predicateDiscrete
    this.predicateDiscrete = predicateDiscrete;

    // Import predicateUniq
    // this.predicateUniq = predicateUniq;

    // load max size dictionary
    String specMaxSizeDictionary = spec.get(CSVocabulary.SPEC_MAX_SIZE_DICTIONARY);
    max_size_dictionary = CSVocabulary.DEFAULT_MAX_SIZE_DICTIONARY;
    if (specMaxSizeDictionary != null && !"".equals(specMaxSizeDictionary)) {
        try {
            max_size_dictionary = Integer.parseInt(specMaxSizeDictionary);
        } catch (NumberFormatException e) {
            max_size_dictionary = CSVocabulary.DEFAULT_MAX_SIZE_DICTIONARY;
        }
    }
    // load block size if present
    String specBlockSize = spec.get(CSVocabulary.SPEC_BLOCK_SIZE);
    block_size = CSVocabulary.DEFAULT_BLOCK_SIZE;
    if (specBlockSize != null && !"".equals(specBlockSize)) {
        try {
            block_size = Integer.parseInt(specBlockSize);
        } catch (NumberFormatException e) {
            block_size = CSVocabulary.DEFAULT_BLOCK_SIZE;
        }
    }

    // load if the dictionary of subjects must be stored
    String specStoreDictionary = spec.get(CSVocabulary.STORE_SUBJ_DICTIONARY);

    store_subj_dictionary = CSVocabulary.DEFAULT_STORE_SUBJ_DICTIONARY;
    if (specStoreDictionary != null && !"".equals(specStoreDictionary)) {

        store_subj_dictionary = Boolean.parseBoolean(specStoreDictionary);

    }

    // load if the dictionary of objects must be stored
    specStoreDictionary = spec.get(CSVocabulary.STORE_OBJ_DICTIONARY);

    store_obj_dictionary = false;

    // load if the predicates are not related with the same types and/or tags
    String specDisableLiteralTags = spec.get(CSVocabulary.DISABLE_CONSISTENT_PREDICATES);

    disable_consistent_predicates = CSVocabulary.DEFAULT_CONSISTENT_PREDICATES;
    if (specDisableLiteralTags != null && !"".equals(specDisableLiteralTags)) {

        disable_consistent_predicates = Boolean.parseBoolean(specDisableLiteralTags);

    }

    offset_tag = 1; // auxiliary offset by default for the last quote "
    if (disable_consistent_predicates)
        offset_tag = 0;

    if (store_subj_dictionary)
        subjects = new LRUDictionary<String, Integer>(max_size_dictionary);

    predicates = new HashMap<String, Integer>();
    // Consider ConcurrentHashMap<String, Integer>(); for parallel creation
    literalTagsByPredicate = new ArrayList<String>();
    sizeofTags = new ArrayList<Integer>();
    predicateLiterals = new BitSet();

    structures = new LRUDictionary<String, Integer>(max_size_dictionary);

    currentBlock = new BlockNoDictionary(store_subj_dictionary);

    subjectsToProcess = new ConcurrentHashMap<String, ArrayList<TripleString>>();

    this.out = out;
}