Example usage for java.util LinkedHashSet size

List of usage examples for java.util LinkedHashSet size

Introduction

In this page you can find the example usage for java.util LinkedHashSet size.

Prototype

int size();

Source Link

Document

Returns the number of elements in this set (its cardinality).

Usage

From source file:Simulator.PerformanceCalculation.java

public JPanel waitTime2() {
    LinkedHashSet no = new LinkedHashSet();
    LinkedHashMap<Integer, ArrayList<Double>> wait1 = new LinkedHashMap<>();

    for (Map.Entry<Integer, TraceObject> entry : l.getLocalTrace().entrySet()) {
        TraceObject traceObject = entry.getValue();

        if (wait1.get(traceObject.getSurgeonId()) == null) {
            ArrayList details = new ArrayList();
            details.add(traceObject.getWaitTime2());
            wait1.put(traceObject.getSurgeonId(), details);
        } else {//from w  w w. j  ava2  s  . c o m
            wait1.get(traceObject.getSurgeonId()).add(traceObject.getWaitTime2());
        }

        no.add(traceObject.getSurgeonId());
    }
    String[] column = new String[no.size()];

    String series1 = "Wait Time 2";
    for (int i = 0; i < no.size(); i++) {
        column[i] = "Surgeon " + (i + 1);
    }

    DefaultCategoryDataset dataset = new DefaultCategoryDataset();

    LinkedHashMap<Integer, Double> average = new LinkedHashMap<>();
    for (Map.Entry<Integer, ArrayList<Double>> entry : wait1.entrySet()) {
        Integer integer = entry.getKey();
        ArrayList<Double> arrayList = entry.getValue();
        double total = 0;
        for (Double double1 : arrayList) {
            total += double1;
        }
        average.put(integer, total / arrayList.size());
    }

    for (int i = 1; i <= average.size(); i++) {
        dataset.addValue(Math.round(average.get(i) / 600), series1, column[i - 1]);
    }

    JFreeChart chart = ChartFactory.createBarChart("Wait Time 2", // chart title
            "Surgeon ID", // domain axis label
            "Days", // range axis label
            dataset, // data
            PlotOrientation.VERTICAL, // orientation
            true, // include legend
            true, // tooltips?
            false // URLs?
    );

    return new ChartPanel(chart);
}

From source file:gaffer.accumulostore.operation.spark.handler.AccumuloStoreRelation.java

private void buildSchema() {
    LOGGER.info("Building Spark SQL schema for groups {}", StringUtils.join(groups, ','));
    for (final String group : groups) {
        final SchemaElementDefinition elementDefn = store.getSchema().getElement(group);
        final List<StructField> structFieldList = new ArrayList<>();
        if (elementDefn instanceof SchemaEntityDefinition) {
            entityOrEdgeByGroup.put(group, EntityOrEdge.ENTITY);
            final SchemaEntityDefinition entityDefinition = (SchemaEntityDefinition) elementDefn;
            final String vertexClass = store.getSchema().getType(entityDefinition.getVertex()).getClassString();
            final DataType vertexType = getType(vertexClass);
            if (vertexType == null) {
                throw new RuntimeException("Vertex must be a recognised type: found " + vertexClass);
            }/*from www  .j  a v  a 2  s.  c om*/
            LOGGER.info("Group {} is an entity group - {} is of type {}", group, VERTEX_COL_NAME, vertexType);
            structFieldList.add(new StructField(VERTEX_COL_NAME, vertexType, true, Metadata.empty()));
        } else {
            entityOrEdgeByGroup.put(group, EntityOrEdge.EDGE);
            final SchemaEdgeDefinition edgeDefinition = (SchemaEdgeDefinition) elementDefn;
            final String srcClass = store.getSchema().getType(edgeDefinition.getSource()).getClassString();
            final String dstClass = store.getSchema().getType(edgeDefinition.getDestination()).getClassString();
            final DataType srcType = getType(srcClass);
            final DataType dstType = getType(dstClass);
            if (srcType == null || dstType == null) {
                throw new RuntimeException("Both source and destination must be recognised types: source was "
                        + srcClass + " destination was " + dstClass);
            }
            LOGGER.info("Group {} is an edge group - {} is of type {}, {} is of type {}", group, SRC_COL_NAME,
                    srcType, DST_COL_NAME, dstType);
            structFieldList.add(new StructField(SRC_COL_NAME, srcType, true, Metadata.empty()));
            structFieldList.add(new StructField(DST_COL_NAME, dstType, true, Metadata.empty()));
        }
        final Set<String> properties = elementDefn.getProperties();
        for (final String property : properties) {
            final String propertyClass = elementDefn.getPropertyClass(property).getCanonicalName();
            final DataType propertyType = getType(propertyClass);
            if (propertyType == null) {
                LOGGER.warn("Ignoring property {} as it is not a recognised type", property);
            } else {
                LOGGER.info("Property {} is of type {}", property, propertyType);
                structFieldList.add(new StructField(property, propertyType, true, Metadata.empty()));
            }
        }
        structTypeByGroup.put(group,
                new StructType(structFieldList.toArray(new StructField[structFieldList.size()])));
    }
    // Create reverse map of field name to StructField
    final Map<String, Set<StructField>> fieldToStructs = new HashMap<>();
    for (final String group : groups) {
        final StructType groupSchema = structTypeByGroup.get(group);
        for (final String field : groupSchema.fieldNames()) {
            if (fieldToStructs.get(field) == null) {
                fieldToStructs.put(field, new HashSet<StructField>());
            }
            fieldToStructs.get(field).add(groupSchema.apply(field));
        }
    }
    // Check consistency, i.e. if the same field appears in multiple groups then the types are consistent
    for (final Map.Entry<String, Set<StructField>> entry : fieldToStructs.entrySet()) {
        final Set<StructField> schemas = entry.getValue();
        if (schemas.size() > 1) {
            throw new IllegalArgumentException("Inconsistent fields: the field " + entry.getKey()
                    + " has more than one definition: " + StringUtils.join(schemas, ','));
        }
    }
    // Merge schemas for groups together - fields should appear in the order the groups were provided
    final LinkedHashSet<StructField> fields = new LinkedHashSet<>();
    fields.add(new StructField(GROUP, DataTypes.StringType, false, Metadata.empty()));
    usedProperties.add(GROUP);
    for (final String group : groups) {
        final StructType groupSchema = structTypeByGroup.get(group);
        for (final String field : groupSchema.fieldNames()) {
            final StructField struct = groupSchema.apply(field);
            // Add struct to fields unless it has already been added
            if (!fields.contains(struct)) {
                fields.add(struct);
                usedProperties.add(field);
            }
        }
    }
    structType = new StructType(fields.toArray(new StructField[fields.size()]));
    LOGGER.info("Schema is {}", structType);
}

From source file:pt.webdetails.cda.utils.mondrian.CompactBandedMDXTableModel.java

public CompactBandedMDXTableModel(final Result resultSet, final int rowLimit) {
    if (resultSet == null) {
        throw new NullPointerException("ResultSet returned was null");
    }//ww w. j ava2s.c  om
    this.resultSet = resultSet;

    // rowcount is the product of all axis-sizes. If an axis contains more than one member, then
    // Mondrian already performs the crossjoin for us.

    // column count is the count of all hierachies of all axis.

    final Axis[] axes = this.resultSet.getAxes();
    this.rowCount = 0;
    this.axesSize = new int[axes.length];
    final int[] axesMembers = new int[axes.length];
    @SuppressWarnings("unchecked")
    final List<Dimension>[] dimensionsForMembersPerAxis = new List[axes.length];
    @SuppressWarnings("unchecked")
    final List<Integer>[] membersPerAxis = new List[axes.length];

    // process the column axis first ..
    if (axesSize.length > 0) {
        final Axis axis = axes[0];
        final List<Position> positions = axis.getPositions();

        axesSize[0] = positions.size();
        if (positions.isEmpty()) {
            noMeasures = true;
        }
    }

    // Axis contains (zero or more) positions, which contains (zero or more) members
    for (int axesIndex = axes.length - 1; axesIndex >= 1; axesIndex -= 1) {
        final Axis axis = axes[axesIndex];
        final List<Position> positions = axis.getPositions();

        axesSize[axesIndex] = positions.size();
        if (positions.isEmpty()) {
            noMeasures = true;
        }

        final ArrayList<Integer> memberList = new ArrayList<Integer>();
        final ArrayList<Dimension> dimensionsForMembers = new ArrayList<Dimension>();
        for (int positionsIndex = 0; positionsIndex < positions.size(); positionsIndex++) {
            final Position position = positions.get(positionsIndex);
            for (int positionIndex = 0; positionIndex < position.size(); positionIndex++) {
                Member m = position.get(positionIndex);
                final Dimension dimension = m.getDimension();
                int hierarchyLevelCount = 1; // Originally was 0

                //          // Change compared to BandedMDXTM - we don't want all levels
                //          while (false && m != null)
                //          {
                //            m = m.getParentMember();
                //            hierarchyLevelCount += 1;
                //          }

                if (memberList.size() <= positionIndex) {
                    memberList.add(hierarchyLevelCount);
                    dimensionsForMembers.add(dimension);
                } else {
                    final Integer existingLevel = memberList.get(positionIndex);
                    if (existingLevel.intValue() < hierarchyLevelCount) {
                        memberList.set(positionIndex, hierarchyLevelCount);
                        dimensionsForMembers.set(positionIndex, dimension);
                    }
                }
            }
        }

        int memberCount = 0;
        for (int i = 0; i < memberList.size(); i++) {
            memberCount += memberList.get(i);
        }
        axesMembers[axesIndex] = memberCount;
        dimensionsForMembersPerAxis[axesIndex] = dimensionsForMembers;
        membersPerAxis[axesIndex] = memberList;
    }

    if (axesSize.length > 1) {
        rowCount = axesSize[1];
        for (int i = 2; i < axesSize.length; i++) {
            final int size = axesSize[i];
            rowCount *= size;
        }
    }
    if (noMeasures == false) {
        rowCount = Math.max(1, rowCount);
    }
    if (axesSize.length == 0) {
        columnCount = 1;
    } else if (axesSize.length > 0) {
        columnCount = axesSize[0];
    }
    for (int i = 1; i < axesMembers.length; i++) {
        columnCount += axesMembers[i];
    }

    columnNames = new String[columnCount];
    columnToDimensionMapping = new Dimension[columnCount];
    columnToAxisPosition = new int[columnCount];

    int columnIndex = 0;
    int dimColIndex = 0;

    //    final FastStack memberStack = new FastStack();
    for (int axesIndex = axes.length - 1; axesIndex >= 1; axesIndex -= 1) {
        final Axis axis = axes[axesIndex];
        final List<Position> positions = axis.getPositions();
        final LinkedHashSet<String> columnNamesSet = new LinkedHashSet<String>();
        for (int positionsIndex = 0; positionsIndex < positions.size(); positionsIndex++) {
            final Position position = positions.get(positionsIndex);
            for (int positionIndex = 0; positionIndex < position.size(); positionIndex++) {
                //          memberStack.clear();
                Member m = position.get(positionIndex);
                // Get member's hierarchy
                final String name = m.getHierarchy().getName();
                if (columnNamesSet.contains(name) == false) {
                    columnNamesSet.add(name);
                }

            }
        }

        if (columnNamesSet.size() != axesMembers[axesIndex]) {
            logger.error("ERROR: Number of names is not equal the pre-counted number.");
        }

        final List<Dimension> dimForMemberPerAxis = dimensionsForMembersPerAxis[axesIndex];
        final List<Integer> memberCntPerAxis = membersPerAxis[axesIndex];
        for (int i = 0; i < memberCntPerAxis.size(); i++) {
            final Integer count = memberCntPerAxis.get(i);
            final Dimension dim = dimForMemberPerAxis.get(i);
            for (int x = 0; x < count.intValue(); x += 1) {
                this.columnToDimensionMapping[dimColIndex + x] = dim;
                this.columnToAxisPosition[dimColIndex + x] = axesIndex;
            }
            dimColIndex = count.intValue() + dimColIndex;
        }

        final String[] names = columnNamesSet.toArray(new String[columnNamesSet.size()]);
        System.arraycopy(names, 0, this.columnNames, columnIndex, names.length);
        columnIndex += names.length;
    }

    if (axesSize.length > 0) {
        // now create the column names for the column-axis
        final Axis axis = axes[0];
        final List<Position> positions = axis.getPositions();
        for (int i = 0; i < positions.size(); i++) {
            final Position position = positions.get(i);
            final StringBuffer positionName = new StringBuffer(100);
            for (int j = 0; j < position.size(); j++) {
                if (j != 0) {
                    positionName.append('/');
                }
                final Member member = position.get(j);
                //positionName.append(MondrianUtil.getUniqueMemberName(member));
                positionName.append(member.getName());

            }
            columnNames[columnIndex] = positionName.toString();
            columnIndex += 1;
        }
    }
    if (axesSize.length == 0) {
        columnNames[0] = "Measure";
    }
    if (rowLimit > 0) {
        rowCount = Math.min(rowLimit, rowCount);
    }
}

From source file:com.datatorrent.lib.io.fs.AbstractFileInputOperatorTest.java

@Test
public void testPartitioning() throws Exception {
    LineByLineFileInputOperator oper = new LineByLineFileInputOperator();
    oper.getScanner().setFilePatternRegexp(".*partition([\\d]*)");
    oper.setDirectory(new File(testMeta.dir).getAbsolutePath());

    Path path = new Path(new File(testMeta.dir).getAbsolutePath());
    FileContext.getLocalFSFileContext().delete(path, true);
    for (int file = 0; file < 4; file++) {
        FileUtils.write(new File(testMeta.dir, "partition00" + file), "");
    }/*from w  w  w  .j av a 2s  . co m*/

    List<Partition<AbstractFileInputOperator<String>>> partitions = Lists.newArrayList();
    partitions.add(new DefaultPartition<AbstractFileInputOperator<String>>(oper));
    Collection<Partition<AbstractFileInputOperator<String>>> newPartitions = oper.definePartitions(partitions,
            new PartitioningContextImpl(null, 2));
    Assert.assertEquals(2, newPartitions.size());
    Assert.assertEquals(1, oper.getCurrentPartitions()); // partitioned() wasn't called

    for (Partition<AbstractFileInputOperator<String>> p : newPartitions) {
        Assert.assertNotSame(oper, p.getPartitionedInstance());
        Assert.assertNotSame(oper.getScanner(), p.getPartitionedInstance().getScanner());
        Set<String> consumed = Sets.newHashSet();
        LinkedHashSet<Path> files = p.getPartitionedInstance().getScanner()
                .scan(FileSystem.getLocal(new Configuration(false)), path, consumed);
        Assert.assertEquals("partition " + files, 2, files.size());
    }
}

From source file:com.datatorrent.lib.io.fs.AbstractFileInputOperatorTest.java

/**
 * Partition the operator in 2/*from  ww  w .j  a va2  s.co m*/
 * create ten files with index of the file at the start, i.e 1_file, 2_file .. etc.
 * The scanner returns this index from getPartition method.
 * each partition should read 5 files as file index are from 0 to 9 (including 0 and 9).
 * @throws Exception
 */
@Test
public void testWithCustomScanner() throws Exception {
    LineByLineFileInputOperator oper = new LineByLineFileInputOperator();
    oper.setScanner(new MyScanner());
    oper.getScanner().setFilePatternRegexp(".*partition_([\\d]*)");
    oper.setDirectory(new File(testMeta.dir).getAbsolutePath());

    Random rand = new Random();
    Path path = new Path(new File(testMeta.dir).getAbsolutePath());
    FileContext.getLocalFSFileContext().delete(path, true);
    for (int file = 0; file < 10; file++) {
        FileUtils.write(new File(testMeta.dir, file + "_partition_00" + rand.nextInt(100)), "");
    }

    List<Partition<AbstractFileInputOperator<String>>> partitions = Lists.newArrayList();
    partitions.add(new DefaultPartition<AbstractFileInputOperator<String>>(oper));
    Collection<Partition<AbstractFileInputOperator<String>>> newPartitions = oper.definePartitions(partitions,
            new PartitioningContextImpl(null, 2));
    Assert.assertEquals(2, newPartitions.size());
    Assert.assertEquals(1, oper.getCurrentPartitions()); // partitioned() wasn't called

    for (Partition<AbstractFileInputOperator<String>> p : newPartitions) {
        Assert.assertNotSame(oper, p.getPartitionedInstance());
        Assert.assertNotSame(oper.getScanner(), p.getPartitionedInstance().getScanner());
        Set<String> consumed = Sets.newHashSet();
        LinkedHashSet<Path> files = p.getPartitionedInstance().getScanner()
                .scan(FileSystem.getLocal(new Configuration(false)), path, consumed);
        Assert.assertEquals("partition " + files, 5, files.size());
    }
}

From source file:ArrayUtils.java

/**
 * Merges all elements of a set of arrays into a single array with no
 * duplicates. For primitive types./*  ww w .  ja  v  a  2  s. c om*/
 * 
 * @param type
 *            The type of the result
 * @param arrays
 *            The arrays to merge
 * @return A new array containing all elements of <code>array1</code> and
 *         all elements of <code>array2</code> that are not present in
 *         <code>array1</code>
 * @throws NullPointerException
 *             If either array is null
 * @throws ArrayStoreException
 *             If elements in the arrays are incompatible with
 *             <code>type</code>
 */
public static Object mergeInclusiveP(Class<?> type, Object... arrays) {
    java.util.LinkedHashSet<Object> set = new java.util.LinkedHashSet<Object>();
    int i, j;
    for (i = 0; i < arrays.length; i++) {
        int len = Array.getLength(arrays[i]);
        for (j = 0; j < len; j++)
            set.add(Array.get(arrays[i], j));
    }
    Object ret = Array.newInstance(type, set.size());
    i = 0;
    for (Object el : set) {
        put(ret, el, i);
        i++;
    }
    return ret;
}

From source file:xyz.openmodloader.gradle.task.MergeJarsTask.java

private void processMethods(ClassNode cClass, ClassNode sClass) {
    List<MethodNode> cMethods = cClass.methods;
    List<MethodNode> sMethods = sClass.methods;
    LinkedHashSet<MethodWrapper> allMethods = Sets.newLinkedHashSet();

    int cPos = 0;
    int sPos = 0;
    int cLen = cMethods.size();
    int sLen = sMethods.size();
    String clientName = "";
    String lastName = clientName;
    String serverName = "";
    while (cPos < cLen || sPos < sLen) {
        do {/*from   ww w. ja  va 2  s . com*/
            if (sPos >= sLen) {
                break;
            }
            MethodNode sM = sMethods.get(sPos);
            serverName = sM.name;
            if (!serverName.equals(lastName) && cPos != cLen) {
                if (DEBUG) {
                    System.out.printf("Server -skip : %s %s %d (%s %d) %d [%s]\n", sClass.name, clientName,
                            cLen - cPos, serverName, sLen - sPos, allMethods.size(), lastName);
                }
                break;
            }
            MethodWrapper mw = new MethodWrapper(sM);
            mw.server = true;
            allMethods.add(mw);
            if (DEBUG) {
                System.out.printf("Server *add* : %s %s %d (%s %d) %d [%s]\n", sClass.name, clientName,
                        cLen - cPos, serverName, sLen - sPos, allMethods.size(), lastName);
            }
            sPos++;
        } while (sPos < sLen);
        do {
            if (cPos >= cLen) {
                break;
            }
            MethodNode cM = cMethods.get(cPos);
            lastName = clientName;
            clientName = cM.name;
            if (!clientName.equals(lastName) && sPos != sLen) {
                if (DEBUG) {
                    System.out.printf("Client -skip : %s %s %d (%s %d) %d [%s]\n", cClass.name, clientName,
                            cLen - cPos, serverName, sLen - sPos, allMethods.size(), lastName);
                }
                break;
            }
            MethodWrapper mw = new MethodWrapper(cM);
            mw.client = true;
            allMethods.add(mw);
            if (DEBUG) {
                System.out.printf("Client *add* : %s %s %d (%s %d) %d [%s]\n", cClass.name, clientName,
                        cLen - cPos, serverName, sLen - sPos, allMethods.size(), lastName);
            }
            cPos++;
        } while (cPos < cLen);
    }

    cMethods.clear();
    sMethods.clear();

    for (MethodWrapper mw : allMethods) {
        if (DEBUG) {
            System.out.println(mw);
        }
        cMethods.add(mw.node);
        sMethods.add(mw.node);
        if (mw.server && mw.client) {
            // no op
        } else {
            if (mw.node.visibleAnnotations == null) {
                mw.node.visibleAnnotations = Lists.newArrayListWithExpectedSize(1);
            }

            mw.node.visibleAnnotations.add(getSideAnn(mw.client));
        }
    }
}

From source file:com.persinity.ndt.dbdiff.rel.JsonTransformEntityStore.java

@Override
public Collection<TransformEntity> loadTransformEntities() {
    return resource.accessAndAutoClose(new Resource.Accessor<InputStream, LinkedHashSet<TransformEntity>>() {
        @Override//from   ww w .  ja v a  2 s. c  om
        public InputStream getResource() {
            final InputStream fis;
            try {
                fis = new FileInputStream(sourceFile);
            } catch (FileNotFoundException e) {
                throw new RuntimeException(e);
            }
            return new BufferedInputStream(fis);
        }

        @Override
        public LinkedHashSet<TransformEntity> access(final InputStream is) throws Exception {
            final LinkedHashSet<TransformEntity> transformEntities = new LinkedHashSet<>();
            final String jsonTxt = IOUtils.toString(is);
            final JSONObject obj = new JSONObject(jsonTxt);
            final JSONArray array = obj.getJSONArray(TRANSFORMATIONS_KEY);
            for (int i = 0; i < array.length(); i++) {
                final JSONObject element = array.getJSONObject(i);
                final String targetEntity = element.getString(TARGET_ENTITY_KEY);
                final String transformStatement = element.getString(TRANSFORM_STATEMENT_KEY);
                final String sourceLeadingEntity = element.getString(SOURCE_LEADING_ENTITY_KEY);
                final Set<String> sourceLeadingColumns = convertToStringSet(
                        element.getJSONArray(SOURCE_LEADING_COLUMNS_KEY));
                final TransformEntity transformEntity = new TransformEntity(targetEntity, transformStatement,
                        sourceLeadingEntity, sourceLeadingColumns);
                transformEntities.add(transformEntity);
            }
            log.info("Read transformations: {} from file: {}", transformEntities.size(), sourceFile);
            return transformEntities;
        }
    });
}

From source file:org.apache.apex.malhar.lib.io.fs.AbstractFileInputOperatorTest.java

@Test
public void testPartitioning() throws Exception {
    LineByLineFileInputOperator oper = new LineByLineFileInputOperator();
    oper.getScanner().setFilePatternRegexp(".*partition([\\d]*)");
    oper.setDirectory(new File(testMeta.dir).getAbsolutePath());

    Path path = new Path(new File(testMeta.dir).getAbsolutePath());
    FileContext.getLocalFSFileContext().delete(path, true);
    for (int file = 0; file < 4; file++) {
        FileUtils.write(new File(testMeta.dir, "partition00" + file), "");
    }//  w  w  w  . ja  va2s  .  c o m

    List<Partition<AbstractFileInputOperator<String>>> partitions = Lists.newArrayList();
    partitions.add(new DefaultPartition<AbstractFileInputOperator<String>>(oper));
    Collection<Partition<AbstractFileInputOperator<String>>> newPartitions = oper.definePartitions(partitions,
            new PartitioningContextImpl(null, 2));
    Assert.assertEquals(2, newPartitions.size());
    Assert.assertEquals(1, oper.getCurrentPartitions()); // partitioned() wasn't called

    for (Partition<AbstractFileInputOperator<String>> p : newPartitions) {
        Assert.assertNotSame(oper, p.getPartitionedInstance());
        Assert.assertNotSame(oper.getScanner(), p.getPartitionedInstance().getScanner());
        Set<String> consumed = Sets.newHashSet();
        LinkedHashSet<Path> files = p.getPartitionedInstance().getScanner()
                .scan(FileSystem.getLocal(new Configuration(false)), path, consumed);
        Assert.assertEquals("partition " + files, 3, files.size());
    }
}

From source file:org.apache.apex.malhar.lib.io.fs.AbstractFileInputOperatorTest.java

/**
 * Partition the operator in 2/* www.  j  a va 2s .c om*/
 * create ten files with index of the file at the start, i.e 1_file, 2_file .. etc.
 * The scanner returns this index from getPartition method.
 * each partition should read 5 files as file index are from 0 to 9 (including 0 and 9).
 * @throws Exception
 */
@Test
public void testWithCustomScanner() throws Exception {
    LineByLineFileInputOperator oper = new LineByLineFileInputOperator();
    oper.setScanner(new MyScanner());
    oper.getScanner().setFilePatternRegexp(".*partition_([\\d]*)");
    oper.setDirectory(new File(testMeta.dir).getAbsolutePath());

    Random rand = new Random();
    Path path = new Path(new File(testMeta.dir).getAbsolutePath());
    FileContext.getLocalFSFileContext().delete(path, true);
    for (int file = 0; file < 10; file++) {
        FileUtils.write(new File(testMeta.dir, file + "_partition_00" + rand.nextInt(100)), "");
    }

    List<Partition<AbstractFileInputOperator<String>>> partitions = Lists.newArrayList();
    partitions.add(new DefaultPartition<AbstractFileInputOperator<String>>(oper));
    Collection<Partition<AbstractFileInputOperator<String>>> newPartitions = oper.definePartitions(partitions,
            new PartitioningContextImpl(null, 2));
    Assert.assertEquals(2, newPartitions.size());
    Assert.assertEquals(1, oper.getCurrentPartitions()); // partitioned() wasn't called

    for (Partition<AbstractFileInputOperator<String>> p : newPartitions) {
        Assert.assertNotSame(oper, p.getPartitionedInstance());
        Assert.assertNotSame(oper.getScanner(), p.getPartitionedInstance().getScanner());
        Set<String> consumed = Sets.newHashSet();
        LinkedHashSet<Path> files = p.getPartitionedInstance().getScanner()
                .scan(FileSystem.getLocal(new Configuration(false)), path, consumed);
        Assert.assertEquals("partition " + files, 6, files.size());
    }
}