Example usage for java.util AbstractList AbstractList

List of usage examples for java.util AbstractList AbstractList

Introduction

In this page you can find the example usage for java.util AbstractList AbstractList.

Prototype

protected AbstractList() 

Source Link

Document

Sole constructor.

Usage

From source file:io.druid.segment.IndexMaker.java

private static void makeDimColumn(final FileSmoosher v9Smoosher, final List<IndexableAdapter> adapters,
        final ProgressIndicator progress, final Iterable<Rowboat> theRows, final int dimIndex,
        final String dimension, final Map<String, ColumnCapabilitiesImpl> columnCapabilities,
        final Map<String, Iterable<String>> dimensionValuesLookup, final List<IntBuffer> rowNumConversions,
        final BitmapSerdeFactory bitmapSerdeFactory,
        final CompressedObjectStrategy.CompressionStrategy compressionStrategy) throws IOException {
    final String section = String.format("make %s", dimension);
    progress.startSection(section);//from   ww  w  .  j a v  a  2  s  .c om

    final ColumnDescriptor.Builder dimBuilder = ColumnDescriptor.builder();
    dimBuilder.setValueType(ValueType.STRING);

    final List<ByteBuffer> outParts = Lists.newArrayList();

    ByteArrayOutputStream nameBAOS = new ByteArrayOutputStream();
    serializerUtils.writeString(nameBAOS, dimension);
    outParts.add(ByteBuffer.wrap(nameBAOS.toByteArray()));

    boolean hasMultipleValues = columnCapabilities.get(dimension).hasMultipleValues();
    dimBuilder.setHasMultipleValues(hasMultipleValues);

    // make dimension columns
    List<Integer> singleValCol;
    final VSizeIndexed multiValCol;

    ColumnDictionaryEntryStore adder = hasMultipleValues ? new MultiValColumnDictionaryEntryStore()
            : new SingleValColumnDictionaryEntryStore();

    final BitmapFactory bitmapFactory = bitmapSerdeFactory.getBitmapFactory();
    MutableBitmap nullSet = null;
    int rowCount = 0;

    for (Rowboat theRow : theRows) {
        if (dimIndex > theRow.getDims().length) {
            if (nullSet == null) {
                nullSet = bitmapFactory.makeEmptyMutableBitmap();
            }
            nullSet.add(rowCount);
            adder.add(null);
        } else {
            int[] dimVals = theRow.getDims()[dimIndex];
            if (dimVals == null || dimVals.length == 0) {
                if (nullSet == null) {
                    nullSet = bitmapFactory.makeEmptyMutableBitmap();
                }
                nullSet.add(rowCount);
            }
            adder.add(dimVals);
        }
        rowCount++;
    }

    final Iterable<String> dimensionValues = dimensionValuesLookup.get(dimension);
    GenericIndexed<String> dictionary = GenericIndexed.fromIterable(dimensionValues,
            GenericIndexed.STRING_STRATEGY);
    boolean bumpDictionary = false;

    if (hasMultipleValues) {
        final List<List<Integer>> vals = ((MultiValColumnDictionaryEntryStore) adder).get();
        if (nullSet != null) {
            log.info("Dimension[%s] has null rows.", dimension);

            if (Iterables.getFirst(dimensionValues, "") != null) {
                bumpDictionary = true;
                log.info("Dimension[%s] has no null value in the dictionary, expanding...", dimension);

                dictionary = GenericIndexed.fromIterable(
                        Iterables.concat(Collections.<String>singleton(null), dimensionValues),
                        GenericIndexed.STRING_STRATEGY);

                final int dictionarySize = dictionary.size();

                singleValCol = null;
                multiValCol = VSizeIndexed.fromIterable(
                        Iterables.transform(vals, new Function<List<Integer>, VSizeIndexedInts>() {
                            @Override
                            public VSizeIndexedInts apply(final List<Integer> input) {
                                if (input == null) {
                                    return VSizeIndexedInts.fromList(ImmutableList.<Integer>of(0),
                                            dictionarySize);
                                } else {
                                    return VSizeIndexedInts.fromList(
                                            new NullsAtZeroConvertingIntList(input, false), dictionarySize);
                                }
                            }
                        }));
            } else {
                final int dictionarySize = dictionary.size();
                singleValCol = null;
                multiValCol = VSizeIndexed.fromIterable(
                        Iterables.transform(vals, new Function<List<Integer>, VSizeIndexedInts>() {
                            @Override
                            public VSizeIndexedInts apply(List<Integer> input) {
                                if (input == null) {
                                    return VSizeIndexedInts.fromList(ImmutableList.<Integer>of(0),
                                            dictionarySize);
                                } else {
                                    return VSizeIndexedInts.fromList(input, dictionarySize);
                                }
                            }
                        }));
            }
        } else {
            final int dictionarySize = dictionary.size();
            singleValCol = null;
            multiValCol = VSizeIndexed
                    .fromIterable(Iterables.transform(vals, new Function<List<Integer>, VSizeIndexedInts>() {
                        @Override
                        public VSizeIndexedInts apply(List<Integer> input) {
                            return VSizeIndexedInts.fromList(input, dictionarySize);
                        }
                    }));
        }
    } else {
        final List<Integer> vals = ((SingleValColumnDictionaryEntryStore) adder).get();

        if (nullSet != null) {
            log.info("Dimension[%s] has null rows.", dimension);

            if (Iterables.getFirst(dimensionValues, "") != null) {
                bumpDictionary = true;
                log.info("Dimension[%s] has no null value in the dictionary, expanding...", dimension);

                final List<String> nullList = Lists.newArrayList();
                nullList.add(null);

                dictionary = GenericIndexed.fromIterable(Iterables.concat(nullList, dimensionValues),
                        GenericIndexed.STRING_STRATEGY);
                multiValCol = null;
                singleValCol = new NullsAtZeroConvertingIntList(vals, false);
            } else {
                multiValCol = null;
                singleValCol = new NullsAtZeroConvertingIntList(vals, true);
            }
        } else {
            multiValCol = null;
            singleValCol = new AbstractList<Integer>() {
                @Override
                public Integer get(int index) {
                    return vals.get(index);
                }

                @Override
                public int size() {
                    return vals.size();
                }
            };
        }
    }

    // Make bitmap indexes
    List<MutableBitmap> mutableBitmaps = Lists.newArrayList();
    for (String dimVal : dimensionValues) {
        List<Iterable<Integer>> convertedInverteds = Lists.newArrayListWithCapacity(adapters.size());
        for (int j = 0; j < adapters.size(); ++j) {
            convertedInverteds.add(new ConvertingIndexedInts(adapters.get(j).getBitmapIndex(dimension, dimVal),
                    rowNumConversions.get(j)));
        }

        MutableBitmap bitset = bitmapSerdeFactory.getBitmapFactory().makeEmptyMutableBitmap();
        for (Integer row : CombiningIterable.createSplatted(convertedInverteds,
                Ordering.<Integer>natural().nullsFirst())) {
            if (row != INVALID_ROW) {
                bitset.add(row);
            }
        }

        mutableBitmaps.add(bitset);
    }

    GenericIndexed<ImmutableBitmap> bitmaps;

    if (nullSet != null) {
        final ImmutableBitmap theNullSet = bitmapFactory.makeImmutableBitmap(nullSet);
        if (bumpDictionary) {
            bitmaps = GenericIndexed.fromIterable(Iterables.concat(Arrays.asList(theNullSet),
                    Iterables.transform(mutableBitmaps, new Function<MutableBitmap, ImmutableBitmap>() {
                        @Override
                        public ImmutableBitmap apply(MutableBitmap input) {
                            return bitmapFactory.makeImmutableBitmap(input);
                        }
                    })), bitmapSerdeFactory.getObjectStrategy());
        } else {
            Iterable<ImmutableBitmap> immutableBitmaps = Iterables.transform(mutableBitmaps,
                    new Function<MutableBitmap, ImmutableBitmap>() {
                        @Override
                        public ImmutableBitmap apply(MutableBitmap input) {
                            return bitmapFactory.makeImmutableBitmap(input);
                        }
                    });

            bitmaps = GenericIndexed.fromIterable(Iterables.concat(
                    Arrays.asList(theNullSet.union(Iterables.getFirst(immutableBitmaps, null))),
                    Iterables.skip(immutableBitmaps, 1)), bitmapSerdeFactory.getObjectStrategy());
        }
    } else {
        bitmaps = GenericIndexed.fromIterable(
                Iterables.transform(mutableBitmaps, new Function<MutableBitmap, ImmutableBitmap>() {
                    @Override
                    public ImmutableBitmap apply(MutableBitmap input) {
                        return bitmapFactory.makeImmutableBitmap(input);
                    }
                }), bitmapSerdeFactory.getObjectStrategy());
    }

    // Make spatial indexes
    ImmutableRTree spatialIndex = null;
    boolean hasSpatialIndexes = columnCapabilities.get(dimension).hasSpatialIndexes();
    RTree tree = null;
    if (hasSpatialIndexes) {
        tree = new RTree(2, new LinearGutmanSplitStrategy(0, 50, bitmapSerdeFactory.getBitmapFactory()),
                bitmapSerdeFactory.getBitmapFactory());
    }

    int dimValIndex = 0;
    for (String dimVal : dimensionValuesLookup.get(dimension)) {
        if (hasSpatialIndexes) {
            if (dimVal != null && !dimVal.isEmpty()) {
                List<String> stringCoords = Lists.newArrayList(SPLITTER.split(dimVal));
                float[] coords = new float[stringCoords.size()];
                for (int j = 0; j < coords.length; j++) {
                    coords[j] = Float.valueOf(stringCoords.get(j));
                }
                tree.insert(coords, mutableBitmaps.get(dimValIndex));
            }
            dimValIndex++;
        }
    }
    if (hasSpatialIndexes) {
        spatialIndex = ImmutableRTree.newImmutableFromMutable(tree);
    }

    log.info("Completed dimension[%s] with cardinality[%,d]. Starting write.", dimension, dictionary.size());

    final DictionaryEncodedColumnPartSerde.Builder dimPartBuilder = DictionaryEncodedColumnPartSerde.builder()
            .withDictionary(dictionary).withBitmapSerdeFactory(bitmapSerdeFactory).withBitmaps(bitmaps)
            .withSpatialIndex(spatialIndex).withByteOrder(IndexIO.BYTE_ORDER);

    if (singleValCol != null) {
        if (compressionStrategy != null) {
            dimPartBuilder.withSingleValuedColumn(
                    CompressedVSizeIntsIndexedSupplier.fromList(singleValCol, dictionary.size(),
                            CompressedVSizeIntsIndexedSupplier.maxIntsInBufferForValue(dictionary.size()),
                            IndexIO.BYTE_ORDER, compressionStrategy));
        } else {
            dimPartBuilder.withSingleValuedColumn(VSizeIndexedInts.fromList(singleValCol, dictionary.size()));
        }
    } else if (compressionStrategy != null) {
        dimPartBuilder.withMultiValuedColumn(CompressedVSizeIndexedSupplier.fromIterable(multiValCol,
                dictionary.size(), IndexIO.BYTE_ORDER, compressionStrategy));
    } else {
        dimPartBuilder.withMultiValuedColumn(multiValCol);
    }

    writeColumn(v9Smoosher, dimPartBuilder.build(), dimBuilder, dimension);

    progress.stopSection(section);
}

From source file:org.briljantframework.array.Arrays.java

/**
 * Broadcast the given arrays against each other.
 *
 * @param arrays the arrays to broadcast
 * @param <E> the array type// www  .  ja  v  a  2s  .c o m
 * @return a list of broadcasted array views
 */
public static <E extends BaseArray<E>> List<E> broadcastArrays(List<? extends E> arrays) {
    Check.argument(!arrays.isEmpty(), "no arrays given");
    if (arrays.size() == 1) {
        return new ArrayList<>(arrays);
    }
    int dims = arrays.stream().mapToInt(BaseArray::dims).max().getAsInt();
    int[] shape = new int[dims];
    java.util.Arrays.fill(shape, 1);
    for (E array : arrays) {
        for (int i = 0; i < shape.length; i++) {
            int shapeIndex = shape.length - 1 - i;
            int arrayIndex = array.dims() - 1 - i;
            if (i < array.dims()) {
                if (shape[shapeIndex] != array.size(arrayIndex)
                        && (shape[shapeIndex] != 1 && array.size(arrayIndex) != 1)) {
                    throw new IllegalArgumentException("arrays cannot be broadcast to the same shape");
                }
                shape[shapeIndex] = Math.max(shape[shapeIndex], array.size(arrayIndex));
            } else {
                shape[shapeIndex] = Math.max(shape[shapeIndex], 1);
            }
        }
    }
    final int[] newShape = shape;
    return new AbstractList<E>() {
        @Override
        public E get(int index) {
            E x = arrays.get(index);
            return x.asView(newShape, StrideUtils.broadcastStrides(x.getStride(), x.getShape(), newShape));
        }

        @Override
        public int size() {
            return arrays.size();
        }
    };
}

From source file:org.pentaho.di.repository.pur.PurRepositoryIT.java

@Test
public void testExportWithRules() throws Exception {
    String fileName = "testExportWithRuled.xml";
    final String exportFileName = new File(fileName).getAbsolutePath(); //$NON-NLS-1$

    RepositoryDirectoryInterface rootDir = initRepo();

    String transWithoutNoteName = "2" + EXP_DBMETA_NAME;
    TransMeta transWithoutNote = createTransMeta(transWithoutNoteName);
    String transUniqueName = EXP_TRANS_NAME.concat(transWithoutNoteName);

    RepositoryDirectoryInterface transDir = rootDir.findDirectory(DIR_TRANSFORMATIONS);
    repository.save(transWithoutNote, VERSION_COMMENT_V1, null);
    deleteStack.push(transWithoutNote); // So this transformation is cleaned up afterward
    assertNotNull(transWithoutNote.getObjectId());

    assertTrue(hasVersionWithComment(transWithoutNote, VERSION_COMMENT_V1));
    assertTrue(repository.exists(transUniqueName, transDir, RepositoryObjectType.TRANSFORMATION));

    // Second transformation (contained note)
    String transWithNoteName = "1" + EXP_DBMETA_NAME;
    TransMeta transWithNote = createTransMeta(transWithNoteName);
    transUniqueName = EXP_TRANS_NAME.concat(EXP_DBMETA_NAME);
    TransMeta transWithRules = createTransMeta(EXP_DBMETA_NAME);

    NotePadMeta note = new NotePadMeta("Note Message", 1, 1, 100, 5);
    transWithRules.addNote(note);//from   ww w.java  2  s . c o  m

    repository.save(transWithRules, VERSION_COMMENT_V1, null);
    deleteStack.push(transWithRules); // So this transformation is cleaned up afterward
    assertNotNull(transWithRules.getObjectId());

    assertTrue(hasVersionWithComment(transWithRules, VERSION_COMMENT_V1));
    assertTrue(repository.exists(transUniqueName, transDir, RepositoryObjectType.TRANSFORMATION));

    // create rules for export to .xml file
    List<ImportRuleInterface> rules = new AbstractList<ImportRuleInterface>() {
        @Override
        public ImportRuleInterface get(int index) {
            TransformationHasANoteImportRule rule = new TransformationHasANoteImportRule();
            rule.setEnabled(true);
            return rule;
        }

        @Override
        public int size() {
            return 1;
        }
    };
    ImportRules importRules = new ImportRules();
    importRules.setRules(rules);

    // create exporter
    IRepositoryExporter exporter = repository.getExporter();
    exporter.setImportRulesToValidate(importRules);

    // export itself
    try {
        exporter.exportAllObjects(new MockProgressMonitorListener(), exportFileName, null, "all"); //$NON-NLS-1$
        FileObject exportFile = KettleVFS.getFileObject(exportFileName);
        assertNotNull(exportFile);
        MockRepositoryExportParser parser = new MockRepositoryExportParser();
        SAXParserFactory.newInstance().newSAXParser().parse(KettleVFS.getInputStream(exportFile), parser);
        if (parser.getFatalError() != null) {
            throw parser.getFatalError();
        }
        // assumed transformation with note will be here and only it
        assertEquals("Incorrect number of transformations", 1,
                parser.getNodesWithName(RepositoryObjectType.TRANSFORMATION.getTypeDescription()).size()); //$NON-NLS-1$ //$NON-NLS-2$
    } finally {
        KettleVFS.getFileObject(exportFileName).delete();
    }
}

From source file:mondrian.rolap.RolapSchemaLoader.java

private void registerKey(Handler handler, ElementDef xmlTable, String columnName, final MondrianDef.Key xmlKey,
        List<RolapSchema.UnresolvedColumn> unresolvedColumnList, final RolapSchema.PhysRelationImpl physTable) {
    String keyName;/*from  www  . j a v  a  2s .  c  om*/
    List<Tcl> columns;
    if (xmlKey == null) {
        if (columnName == null) {
            // If both null, nothing to do.
            return;
        }
        columns = Collections.singletonList(new Tcl(null, columnName, xmlTable));
        keyName = "primary";
    } else {
        columns = new AbstractList<Tcl>() {
            public Tcl get(int index) {
                MondrianDef.Column column = xmlKey.array[index];
                return new Tcl(column.table, column.name, column);
            }

            public int size() {
                return xmlKey.array.length;
            }
        };
        keyName = xmlKey.name;
        if (keyName == null) {
            keyName = "primary";
        }
    }
    if (physTable.lookupKey(keyName) != null) {
        handler.error("Table has more than one key with name '" + keyName + "'", xmlKey, null);
        return;
    }
    final RolapSchema.PhysKey key = physTable.addKey(keyName, new ArrayList<RolapSchema.PhysColumn>());
    int i = 0;
    for (Tcl columnRef : columns) {
        final int index = i++;
        final RolapSchema.UnresolvedColumn unresolvedColumn = new RolapSchema.UnresolvedColumn(physTable,
                columnRef.table != null ? columnRef.table : physTable.alias, columnRef.column, columnRef.xml) {
            public void onResolve(RolapSchema.PhysColumn column) {
                assert column != null;
                key.columnList.set(index, column);
            }

            public String getContext() {
                return ", in key of table '" + physTable.alias + "'";
            }
        };
        key.columnList.add(unresolvedColumn);
        unresolvedColumnList.add(unresolvedColumn);
    }
    //        if (key.columnList.size() != 1) {
    //            handler.warning(
    //                "Key must have precisely one column; key "
    //                + key.columnList
    //                + " in table '"
    //                + physTable.alias + "'.",
    //                xmlKey,
    //                null);
    //        }
}

From source file:org.alfresco.rest.api.impl.NodesImpl.java

@Override
public CollectionWithPagingInfo<Node> listChildren(String parentFolderNodeId, Parameters parameters) {
    String path = parameters.getParameter(PARAM_RELATIVE_PATH);

    final NodeRef parentNodeRef = validateOrLookupNode(parentFolderNodeId, path);

    final List<String> includeParam = parameters.getInclude();

    // filters//  w w  w  .j  ava2  s . c o  m
    Boolean includeFolders = null;
    Boolean includeFiles = null;
    Boolean isPrimary = null;
    QName assocTypeQNameParam = null;
    QName filterNodeTypeQName = null;

    // note: for files/folders, include subtypes by default (unless filtering by a specific nodeType - see below)
    boolean filterIncludeSubTypes = true;

    Query q = parameters.getQuery();

    if (q != null) {
        // filtering via "where" clause
        MapBasedQueryWalker propertyWalker = new MapBasedQueryWalker(
                LIST_FOLDER_CHILDREN_EQUALS_QUERY_PROPERTIES, null);
        QueryHelper.walk(q, propertyWalker);

        isPrimary = propertyWalker.getProperty(PARAM_ISPRIMARY, WhereClauseParser.EQUALS, Boolean.class);

        String assocTypeQNameStr = propertyWalker.getProperty(PARAM_ASSOC_TYPE, WhereClauseParser.EQUALS,
                String.class);
        if (assocTypeQNameStr != null) {
            assocTypeQNameParam = getAssocType(assocTypeQNameStr);
        }

        Boolean isFolder = propertyWalker.getProperty(PARAM_ISFOLDER, WhereClauseParser.EQUALS, Boolean.class);
        Boolean isFile = propertyWalker.getProperty(PARAM_ISFILE, WhereClauseParser.EQUALS, Boolean.class);

        if (isFolder != null) {
            includeFolders = isFolder;
        }

        if (isFile != null) {
            includeFiles = isFile;
        }

        if (Boolean.TRUE.equals(includeFiles) && Boolean.TRUE.equals(includeFolders)) {
            throw new InvalidArgumentException(
                    "Invalid filter (isFile=true and isFolder=true) - a node cannot be both a file and a folder");
        }

        String nodeTypeStr = propertyWalker.getProperty(PARAM_NODETYPE, WhereClauseParser.EQUALS, String.class);
        if ((nodeTypeStr != null) && (!nodeTypeStr.isEmpty())) {
            if ((isFile != null) || (isFolder != null)) {
                throw new InvalidArgumentException(
                        "Invalid filter - nodeType and isFile/isFolder are mutually exclusive");
            }

            Pair<QName, Boolean> pair = parseNodeTypeFilter(nodeTypeStr);
            filterNodeTypeQName = pair.getFirst();
            filterIncludeSubTypes = pair.getSecond();
        }
    }

    List<SortColumn> sortCols = parameters.getSorting();
    List<Pair<QName, Boolean>> sortProps = null;
    if ((sortCols != null) && (sortCols.size() > 0)) {
        // TODO should we allow isFile in sort (and map to reverse of isFolder) ?
        sortProps = new ArrayList<>(sortCols.size());
        for (SortColumn sortCol : sortCols) {
            QName propQname = PARAM_SYNONYMS_QNAME.get(sortCol.column);
            if (propQname == null) {
                propQname = createQName(sortCol.column);
            }

            if (propQname != null) {
                sortProps.add(new Pair<>(propQname, sortCol.asc));
            }
        }
    } else {
        // default sort order
        sortProps = new ArrayList<>(
                Arrays.asList(new Pair<>(GetChildrenCannedQuery.SORT_QNAME_NODE_IS_FOLDER, Boolean.FALSE),
                        new Pair<>(ContentModel.PROP_NAME, true)));
    }

    List<FilterProp> filterProps = null;
    if (isPrimary != null) {
        filterProps = new ArrayList<>(1);
        filterProps.add(new FilterPropBoolean(GetChildrenCannedQuery.FILTER_QNAME_NODE_IS_PRIMARY, isPrimary));
    }

    Paging paging = parameters.getPaging();

    PagingRequest pagingRequest = Util.getPagingRequest(paging);

    final PagingResults<FileInfo> pagingResults;

    // notes (see also earlier validation checks):
    // - no filtering means any types/sub-types (well, apart from hidden &/or default ignored types - eg. systemfolder, fm types)
    // - node type filtering is mutually exclusive from isFile/isFolder, can optionally also include sub-types
    // - isFile & isFolder cannot both be true
    // - (isFile=false) means any other types/sub-types (other than files)
    // - (isFolder=false) means any other types/sub-types (other than folders)
    // - (isFile=false and isFolder=false) means any other types/sub-types (other than files or folders)

    if (filterNodeTypeQName == null) {
        if ((includeFiles == null) && (includeFolders == null)) {
            // no additional filtering
            filterNodeTypeQName = ContentModel.TYPE_CMOBJECT;
        } else if ((includeFiles != null) && (includeFolders != null)) {
            if ((!includeFiles) && (!includeFolders)) {
                // no files or folders
                filterNodeTypeQName = ContentModel.TYPE_CMOBJECT;
            }
        } else if ((includeFiles != null) && (!includeFiles)) {
            // no files
            filterNodeTypeQName = ContentModel.TYPE_CMOBJECT;
        } else if ((includeFolders != null) && (!includeFolders)) {
            // no folders
            filterNodeTypeQName = ContentModel.TYPE_CMOBJECT;
        }
    }

    Pair<Set<QName>, Set<QName>> pair = buildSearchTypesAndIgnoreAspects(filterNodeTypeQName,
            filterIncludeSubTypes, ignoreQNames, includeFiles, includeFolders);
    Set<QName> searchTypeQNames = pair.getFirst();
    Set<QName> ignoreAspectQNames = pair.getSecond();

    Set<QName> assocTypeQNames = buildAssocTypes(assocTypeQNameParam);

    // call GetChildrenCannedQuery (via FileFolderService)
    if (((filterProps == null) || (filterProps.size() == 0))
            && ((assocTypeQNames == null) || (assocTypeQNames.size() == 0))
            && (smartStore.isVirtual(parentNodeRef) || (smartStore.canVirtualize(parentNodeRef)))) {
        pagingResults = fileFolderService.list(parentNodeRef, searchTypeQNames, ignoreAspectQNames, sortProps,
                pagingRequest);
    } else {
        // TODO smart folders (see REPO-1173)
        pagingResults = fileFolderService.list(parentNodeRef, assocTypeQNames, searchTypeQNames,
                ignoreAspectQNames, sortProps, filterProps, pagingRequest);
    }

    final Map<String, UserInfo> mapUserInfo = new HashMap<>(10);

    final List<FileInfo> page = pagingResults.getPage();
    List<Node> nodes = new AbstractList<Node>() {
        @Override
        public Node get(int index) {
            FileInfo fInfo = page.get(index);

            // minimal info by default (unless "include"d otherwise)
            return getFolderOrDocument(fInfo.getNodeRef(), parentNodeRef, fInfo.getType(), includeParam,
                    mapUserInfo);
        }

        @Override
        public int size() {
            return page.size();
        }
    };

    Node sourceEntity = null;
    if (parameters.includeSource()) {
        sourceEntity = getFolderOrDocumentFullInfo(parentNodeRef, null, null, null, mapUserInfo);
    }

    return CollectionWithPagingInfo.asPaged(paging, nodes, pagingResults.hasMoreItems(),
            pagingResults.getTotalResultCount().getFirst(), sourceEntity);
}

From source file:mondrian.olap.Util.java

private static Id.KeySegment convert(final KeySegment keySegment) {
    return new Id.KeySegment(new AbstractList<Id.NameSegment>() {
        public Id.NameSegment get(int index) {
            return convert(keySegment.getKeyParts().get(index));
        }/*  www .jav  a2s. co m*/

        public int size() {
            return keySegment.getKeyParts().size();
        }
    });
}

From source file:mondrian.olap.Util.java

public static List<IdentifierSegment> toOlap4j(final List<Id.Segment> segments) {
    return new AbstractList<IdentifierSegment>() {
        public IdentifierSegment get(int index) {
            return toOlap4j(segments.get(index));
        }//www .  ja  v  a 2 s .c  om

        public int size() {
            return segments.size();
        }
    };
}

From source file:mondrian.olap.Util.java

private static KeySegment toOlap4j(final Id.KeySegment keySegment) {
    return new KeySegment(new AbstractList<NameSegment>() {
        public NameSegment get(int index) {
            return toOlap4j(keySegment.subSegmentList.get(index));
        }//from   w  ww.  j a  va  2s.co m

        public int size() {
            return keySegment.subSegmentList.size();
        }
    });
}

From source file:mondrian.olap.fun.FunUtil.java

/**
 * Marc's original algorithm for stable partial sort of a list.
 * Now superseded by {@link #stablePartialSortJulian}.
 *//*from w  w  w  . j av  a2s  . co m*/
public static <T> List<T> stablePartialSortMarc(final List<T> list, final Comparator<T> comp, int limit) {
    assert limit >= 0;

    // Load an array of pairs {list-item, list-index}.
    // List-index is a secondary sort key, to give a stable sort.
    // REVIEW Can we use a simple T[], with the index implied?
    // REVIEW When limit is big relative to list size, faster to
    // mergesort. Test for this.
    int n = list.size(); // O(n) to scan list
    @SuppressWarnings({ "unchecked" })
    final ObjIntPair<T>[] pairs = new ObjIntPair[n];

    int i = 0;
    for (T item : list) { // O(n) to scan list
        pairs[i] = new ObjIntPair<T>(item, i);
        ++i;
    }

    Comparator<ObjIntPair<T>> pairComp = new Comparator<ObjIntPair<T>>() {
        public int compare(ObjIntPair<T> x, ObjIntPair<T> y) {
            int val = comp.compare(x.t, y.t);
            if (val == 0) {
                val = x.i - y.i;
            }
            return val;
        }
    };

    final int length = Math.min(limit, n);
    // O(n + limit * log(limit)) to quicksort
    partialSort(pairs, pairComp, length);

    // Use an abstract list to avoid doing a copy. The result is immutable.
    return new AbstractList<T>() {
        @Override
        public T get(int index) {
            return pairs[index].t;
        }

        @Override
        public int size() {
            return length;
        }
    };
}

From source file:org.alfresco.repo.workflow.jbpm.JBPMEngine.java

protected List<WorkflowTask> getWorkflowTasks(List<TaskInstance> tasks, boolean sameSession) {
    final List<TaskInstance> filteredTasks;
    if (tenantService.isEnabled()) {
        filteredTasks = new ArrayList<TaskInstance>(tasks.size());
        for (TaskInstance task : tasks) {
            try {
                tenantService.checkDomain(task.getTask().getProcessDefinition().getName());
                filteredTasks.add(task);
            } catch (RuntimeException re) {
                // deliberately skip this one - due to domain mismatch - eg.
                // when querying by group authority
                continue;
            }//  w  w  w. jav a  2s  .c  om
        }
    } else {
        filteredTasks = tasks;
    }

    List<WorkflowTask> workflowTasks;
    if (sameSession) {
        workflowTasks = new AbstractList<WorkflowTask>() {
            @Override
            public WorkflowTask get(int index) {
                TaskInstance task = filteredTasks.get(index);
                return createWorkflowTask(task);
            }

            @Override
            public int size() {
                return filteredTasks.size();
            }
        };
    } else {
        workflowTasks = new ArrayList<WorkflowTask>(filteredTasks.size());
        for (TaskInstance task : filteredTasks) {
            try {
                WorkflowTask workflowTask = createWorkflowTask(task);
                workflowTasks.add(workflowTask);
            } catch (Exception ex) {
                logger.warn("Unable to load workflow task: '" + task + "' due to exception.", ex);
                continue;
            }
        }
    }

    return workflowTasks;
}