Example usage for java.util List clear

List of usage examples for java.util List clear

Introduction

In this page you can find the example usage for java.util List clear.

Prototype

void clear();

Source Link

Document

Removes all of the elements from this list (optional operation).

Usage

From source file:gdsc.core.match.MatchCalculator.java

private static <T> void clear(List<T> list) {
    if (list != null)
        list.clear();
}

From source file:edu.uci.ics.asterix.optimizer.rules.typecast.StaticTypeCastUtil.java

/**
 * This method statically cast the type of records from their current type to the required type.
 * /*from   w w w .  j  av a2  s. c  o  m*/
 * @param func
 *            The record constructor expression.
 * @param reqType
 *            The required type.
 * @param inputType
 *            The current type.
 * @param env
 *            The type environment.
 * @throws AlgebricksException
 */
private static boolean staticRecordTypeCast(AbstractFunctionCallExpression func, ARecordType reqType,
        ARecordType inputType, IVariableTypeEnvironment env) throws AlgebricksException {
    if (!(func.getFunctionIdentifier() == AsterixBuiltinFunctions.OPEN_RECORD_CONSTRUCTOR
            || func.getFunctionIdentifier() == AsterixBuiltinFunctions.CLOSED_RECORD_CONSTRUCTOR)) {
        return false;
    }
    IAType[] reqFieldTypes = reqType.getFieldTypes();
    String[] reqFieldNames = reqType.getFieldNames();
    IAType[] inputFieldTypes = inputType.getFieldTypes();
    String[] inputFieldNames = inputType.getFieldNames();

    int[] fieldPermutation = new int[reqFieldTypes.length];
    boolean[] nullFields = new boolean[reqFieldTypes.length];
    boolean[] openFields = new boolean[inputFieldTypes.length];

    Arrays.fill(nullFields, false);
    Arrays.fill(openFields, true);
    Arrays.fill(fieldPermutation, -1);

    // forward match: match from actual to required
    boolean matched = false;
    for (int i = 0; i < inputFieldNames.length; i++) {
        String fieldName = inputFieldNames[i];
        IAType fieldType = inputFieldTypes[i];

        if (2 * i + 1 > func.getArguments().size()) {
            // it is not a record constructor function
            return false;
        }

        // 2*i+1 is the index of field value expression
        ILogicalExpression arg = func.getArguments().get(2 * i + 1).getValue();
        matched = false;
        for (int j = 0; j < reqFieldNames.length; j++) {
            String reqFieldName = reqFieldNames[j];
            IAType reqFieldType = reqFieldTypes[j];
            if (fieldName.equals(reqFieldName)) {
                //type matched
                if (fieldType.equals(reqFieldType)) {
                    fieldPermutation[j] = i;
                    openFields[i] = false;
                    matched = true;

                    if (arg.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
                        ScalarFunctionCallExpression scalarFunc = (ScalarFunctionCallExpression) arg;
                        rewriteFuncExpr(scalarFunc, reqFieldType, fieldType, env);
                    }
                    break;
                }

                // match the optional field
                if (reqFieldType.getTypeTag() == ATypeTag.UNION
                        && NonTaggedFormatUtil.isOptionalField((AUnionType) reqFieldType)) {
                    IAType itemType = ((AUnionType) reqFieldType).getUnionList()
                            .get(AUnionType.OPTIONAL_TYPE_INDEX_IN_UNION_LIST);
                    reqFieldType = itemType;
                    if (fieldType.equals(BuiltinType.ANULL) || fieldType.equals(itemType)) {
                        fieldPermutation[j] = i;
                        openFields[i] = false;
                        matched = true;

                        // rewrite record expr
                        if (arg.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
                            ScalarFunctionCallExpression scalarFunc = (ScalarFunctionCallExpression) arg;
                            rewriteFuncExpr(scalarFunc, reqFieldType, fieldType, env);
                        }
                        break;
                    }
                }

                // match the optional type input for a non-optional field
                // delay that to runtime by calling the not-null function
                if (fieldType.getTypeTag() == ATypeTag.UNION
                        && NonTaggedFormatUtil.isOptionalField((AUnionType) fieldType)) {
                    IAType itemType = ((AUnionType) fieldType).getUnionList()
                            .get(AUnionType.OPTIONAL_TYPE_INDEX_IN_UNION_LIST);
                    if (reqFieldType.equals(itemType)) {
                        fieldPermutation[j] = i;
                        openFields[i] = false;
                        matched = true;

                        ScalarFunctionCallExpression notNullFunc = new ScalarFunctionCallExpression(
                                FunctionUtils.getFunctionInfo(AsterixBuiltinFunctions.NOT_NULL));
                        notNullFunc.getArguments().add(new MutableObject<ILogicalExpression>(arg));
                        //wrap the not null function to the original function
                        func.getArguments().get(2 * i + 1).setValue(notNullFunc);
                        break;
                    }
                }

                // match the record field: need cast
                if (arg.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
                    ScalarFunctionCallExpression scalarFunc = (ScalarFunctionCallExpression) arg;
                    rewriteFuncExpr(scalarFunc, reqFieldType, fieldType, env);
                    fieldPermutation[j] = i;
                    openFields[i] = false;
                    matched = true;
                    break;
                }
            }
        }
        // the input has extra fields
        if (!matched && !reqType.isOpen()) {
            throw new AlgebricksException(
                    "static type mismatch: the input record includes an extra closed field " + fieldName + ":"
                            + fieldType + "! Please check the field name and type.");
        }
    }

    // backward match: match from required to actual
    for (int i = 0; i < reqFieldNames.length; i++) {
        String reqFieldName = reqFieldNames[i];
        IAType reqFieldType = reqFieldTypes[i];
        matched = false;
        for (int j = 0; j < inputFieldNames.length; j++) {
            String fieldName = inputFieldNames[j];
            IAType fieldType = inputFieldTypes[j];
            if (!fieldName.equals(reqFieldName))
                continue;
            // should check open field here
            // because number of entries in fieldPermuations is the
            // number of required schema fields
            // here we want to check if an input field is matched
            // the entry index of fieldPermuatons is req field index
            if (!openFields[j]) {
                matched = true;
                break;
            }

            // match the optional field
            if (reqFieldType.getTypeTag() == ATypeTag.UNION
                    && NonTaggedFormatUtil.isOptionalField((AUnionType) reqFieldType)) {
                IAType itemType = ((AUnionType) reqFieldType).getUnionList()
                        .get(AUnionType.OPTIONAL_TYPE_INDEX_IN_UNION_LIST);
                if (fieldType.equals(BuiltinType.ANULL) || fieldType.equals(itemType)) {
                    matched = true;
                    break;
                }
            }
        }
        if (matched)
            continue;

        if (reqFieldType.getTypeTag() == ATypeTag.UNION
                && NonTaggedFormatUtil.isOptionalField((AUnionType) reqFieldType)) {
            // add a null field
            nullFields[i] = true;
        } else {
            // no matched field in the input for a required closed field
            if (inputType.isOpen()) {
                //if the input type is open, return false, give that to dynamic type cast to defer the error to the runtime
                return false;
            } else {
                throw new AlgebricksException(
                        "static type mismatch: the input record misses a required closed field " + reqFieldName
                                + ":" + reqFieldType + "! Please check the field name and type.");
            }
        }
    }

    List<Mutable<ILogicalExpression>> arguments = func.getArguments();
    List<Mutable<ILogicalExpression>> originalArguments = new ArrayList<Mutable<ILogicalExpression>>();
    originalArguments.addAll(arguments);
    arguments.clear();
    // re-order the closed part and fill in null fields
    for (int i = 0; i < fieldPermutation.length; i++) {
        int pos = fieldPermutation[i];
        if (pos >= 0) {
            arguments.add(originalArguments.get(2 * pos));
            arguments.add(originalArguments.get(2 * pos + 1));
        }
        if (nullFields[i]) {
            // add a null field
            arguments.add(new MutableObject<ILogicalExpression>(
                    new ConstantExpression(new AsterixConstantValue(new AString(reqFieldNames[i])))));
            arguments.add(new MutableObject<ILogicalExpression>(
                    new ConstantExpression(new AsterixConstantValue(ANull.NULL))));
        }
    }

    // add the open part
    for (int i = 0; i < openFields.length; i++) {
        if (openFields[i]) {
            arguments.add(originalArguments.get(2 * i));
            Mutable<ILogicalExpression> expRef = originalArguments.get(2 * i + 1);
            injectCastToRelaxType(expRef, inputFieldTypes[i], env);
            arguments.add(expRef);
        }
    }
    return true;
}

From source file:com.px100systems.data.core.InMemoryDatabase.java

/**
 * Used by Restore utility. Transfers emergency shutdown data to the database backing the data grid.
 * @param f one of backup files//  ww  w .  jav a2  s .c o m
 * @param persister persistence server
 */
public static void readBackupFile(File f, final PersistenceProvider persister) {
    BackupFile file = new BackupFile(f);
    final Connection conn = persister.open();
    try {
        final List<RawRecord> buffer = new ArrayList<RawRecord>();
        file.read(new PersistenceProvider.LoadCallback() {
            @Override
            public void process(RawRecord record) {
                buffer.add(record);
                if (buffer.size() > 100)
                    try {
                        persister.transactionalSave(conn, buffer);
                        buffer.clear();
                    } catch (Exception e) {
                        throw new RuntimeException(e);
                    }
            }
        });

        if (!buffer.isEmpty())
            try {
                persister.transactionalSave(conn, buffer);
                buffer.clear();
            } catch (Exception e) {
                throw new RuntimeException(e);
            }
    } catch (IOException e) {
        throw new RuntimeException(e);
    } finally {
        file.close();
        conn.close();
    }
}

From source file:io.fabric8.jube.local.NodeHelper.java

public static void setPodTerminated(PodStatus podStatus, String message) {
    List<ContainerStatus> containerStatuses = podStatus.getContainerStatuses();
    if (containerStatuses == null) {
        containerStatuses = new ArrayList<ContainerStatus>();
        podStatus.setContainerStatuses(containerStatuses);
    }/*from   w w  w  .ja v  a2  s. c  o m*/
    containerStatuses.clear();
    ContainerStatus status = new ContainerStatusBuilder().withNewState().withNewTermination()
            .withMessage(message).withFinishedAt(createAtString()).endTermination().endState().build();
    containerStatuses.add(status);
    podStatus.setContainerStatuses(containerStatuses);
}

From source file:io.fabric8.jube.local.NodeHelper.java

public static void setPodRunning(PodStatus podStatus) {
    List<ContainerStatus> containerStatuses = podStatus.getContainerStatuses();
    if (containerStatuses == null) {
        containerStatuses = new ArrayList<ContainerStatus>();
        podStatus.setContainerStatuses(containerStatuses);
    }/*from w  ww . j  a  v a2  s.  com*/
    containerStatuses.clear();
    ContainerStatus status = new ContainerStatusBuilder().withNewState().withNewRunning()
            .withStartedAt(createAtString()).endRunning().endState().build();
    containerStatuses.add(status);
    podStatus.setContainerStatuses(containerStatuses);
}

From source file:io.fabric8.jube.local.NodeHelper.java

public static void setPodWaiting(PodStatus podStatus, String reason) {
    List<ContainerStatus> containerStatuses = podStatus.getContainerStatuses();
    if (containerStatuses == null) {
        containerStatuses = new ArrayList<ContainerStatus>();
        podStatus.setContainerStatuses(containerStatuses);
    }//from   w w w.j  ava  2s. co  m
    containerStatuses.clear();
    ContainerStatus status = new ContainerStatusBuilder().withNewState().withNewWaiting().withReason(reason)
            .endWaiting().endState().build();
    containerStatuses.add(status);
    podStatus.setContainerStatuses(containerStatuses);
}

From source file:Main.java

@SuppressWarnings("unchecked")
public static <T> void removeDuplicate(List<T> dest, List<T> src) {
    if (dest == null) {
        return;/*from   ww w. ja v a 2 s  .c o m*/
    }

    if (src == null || src.isEmpty()) {
        return;
    }

    int capacity = dest.size() > src.size() ? dest.size() : src.size();
    HashMap<T, Integer> map = new HashMap<T, Integer>(capacity);
    for (int i = 0; i < dest.size(); i++) {
        map.put(dest.get(i), i);
    }

    T[] oldData = (T[]) dest.toArray();
    int length = oldData.length;
    for (T t : src) {
        Integer index = map.get(t);
        if (index != null) {
            oldData[index] = null;
            length--;
        }
    }

    T[] removedDuplicate = (T[]) new Object[length];
    int index = 0;
    for (int i = 0; i < oldData.length; i++) {
        if (oldData[i] != null) {
            removedDuplicate[index++] = oldData[i];
        }
    }

    dest.clear();
    dest.addAll(Arrays.asList(removedDuplicate));
}

From source file:org.dimitrovchi.conf.service.ServiceParameterUtils.java

static AnnotationParameters annotationParameters() {
    final Class<?>[] stack = ClassResolver.CLASS_RESOLVER.getClassContext();
    final Class<?> caller = stack[3];
    final List<Class<? extends Annotation>> interfaces = new ArrayList<>();
    Class<?> topCaller = null;
    for (int i = 3; i < stack.length && caller.isAssignableFrom(stack[i]); i++) {
        final Class<?> c = stack[i];
        topCaller = stack[i];/*from   w w  w .j  av a 2  s  .  c om*/
        if (c.getTypeParameters().length != 0) {
            final TypeVariable<? extends Class<?>> var = c.getTypeParameters()[0];
            final List<Class<? extends Annotation>> bounds = new ArrayList<>(var.getBounds().length);
            for (final Type type : var.getBounds()) {
                if (type instanceof Class<?> && ((Class<?>) type).isAnnotation()) {
                    bounds.add((Class) type);
                }
            }
            if (bounds.size() > interfaces.size()) {
                interfaces.clear();
                interfaces.addAll(bounds);
            }
        }
    }
    final Map<Class<? extends Annotation>, List<Annotation>> annotationMap = new IdentityHashMap<>();
    for (int i = 3; i < stack.length && caller.isAssignableFrom(stack[i]); i++) {
        final Class<?> c = stack[i];
        for (final Class<? extends Annotation> itf : interfaces) {
            final Annotation annotation = c.getAnnotation(itf);
            if (annotation != null) {
                List<Annotation> annotationList = annotationMap.get(itf);
                if (annotationList == null) {
                    annotationMap.put(itf, annotationList = new ArrayList<>());
                }
                annotationList.add(0, annotation);
            }
        }
    }
    return new AnnotationParameters(topCaller, interfaces, annotationMap);
}

From source file:com.bittorrent.mpetazzoni.common.Torrent.java

/**
 * Accumulate the piece hashes into a given {@link StringBuilder}.
 *
 * @param hashes The {@link StringBuilder} to append hashes to.
 * @param results The list of {@link Future}s that will yield the piece
 *   hashes.//w w w . j  av a 2  s . c  o m
 */
private static int accumulateHashes(StringBuilder hashes, List<Future<String>> results)
        throws InterruptedException, IOException {
    try {
        int pieces = results.size();
        for (Future<String> chunk : results) {
            hashes.append(chunk.get());
        }
        results.clear();
        return pieces;
    } catch (ExecutionException ee) {
        throw new IOException("Error while hashing the torrent data!", ee);
    }
}

From source file:com.ask.hive.hbase.HiveHBaseTextTableInputFormat.java

/**
* Parses the HBase columns mapping to identify the column families, qualifiers
* and also caches the byte arrays corresponding to them. One of the Hive table
* columns maps to the HBase row key, by default the first column.
*
* @param columnMapping - the column mapping specification to be parsed
* @param colFamilies - the list of HBase column family names
* @param colFamiliesBytes - the corresponding byte array
* @param colQualifiers - the list of HBase column qualifier names
* @param colQualifiersBytes - the corresponding byte array
* @return the row key index in the column names list
* @throws SerDeException// w w w. j a v  a 2  s  .c o m
*/
public static int parseColumnMapping(String columnMapping, List<String> colFamilies,
        List<byte[]> colFamiliesBytes, List<String> colQualifiers, List<byte[]> colQualifiersBytes)
        throws IOException {

    int rowKeyIndex = -1;

    if (colFamilies == null || colQualifiers == null) {
        throw new IOException("Error: caller must pass in lists for the column families " + "and qualifiers.");
    }

    colFamilies.clear();
    colQualifiers.clear();

    if (columnMapping == null) {
        throw new IOException("Error: hbase.columns.mapping missing for this HBase table.");
    }

    if (columnMapping.equals("") || columnMapping.equals(HBASE_KEY_COL)) {
        throw new IOException("Error: hbase.columns.mapping specifies only the HBase table"
                + " row key. A valid Hive-HBase table must specify at least one additional column.");
    }

    String[] mapping = columnMapping.split(",");

    for (int i = 0; i < mapping.length; i++) {
        String elem = mapping[i];
        int idxFirst = elem.indexOf(":");
        int idxLast = elem.lastIndexOf(":");

        if (idxFirst < 0 || !(idxFirst == idxLast)) {
            throw new IOException("Error: the HBase columns mapping contains a badly formed "
                    + "column family, column qualifier specification.");
        }

        if (elem.equals(HBASE_KEY_COL)) {
            rowKeyIndex = i;
            colFamilies.add(elem);
            colQualifiers.add(null);
        } else {
            String[] parts = elem.split(":");
            assert (parts.length > 0 && parts.length <= 2);
            colFamilies.add(parts[0]);

            if (parts.length == 2) {
                colQualifiers.add(parts[1]);
            } else {
                colQualifiers.add(null);
            }
        }
    }

    if (rowKeyIndex == -1) {
        colFamilies.add(0, HBASE_KEY_COL);
        colQualifiers.add(0, null);
        rowKeyIndex = 0;
    }

    if (colFamilies.size() != colQualifiers.size()) {
        throw new IOException("Error in parsing the hbase columns mapping.");
    }

    // populate the corresponding byte [] if the client has passed in a non-null list
    if (colFamiliesBytes != null) {
        colFamiliesBytes.clear();

        for (String fam : colFamilies) {
            colFamiliesBytes.add(Bytes.toBytes(fam));
        }
    }

    if (colQualifiersBytes != null) {
        colQualifiersBytes.clear();

        for (String qual : colQualifiers) {
            if (qual == null) {
                colQualifiersBytes.add(null);
            } else {
                colQualifiersBytes.add(Bytes.toBytes(qual));
            }
        }
    }

    if (colFamiliesBytes != null && colQualifiersBytes != null) {
        if (colFamiliesBytes.size() != colQualifiersBytes.size()) {
            /* throw new SerDeException("Error in caching the bytes for the hbase column families " +
                 "and qualifiers.");*/
        }
    }

    return rowKeyIndex;
}