Example usage for java.util Arrays deepEquals

List of usage examples for java.util Arrays deepEquals

Introduction

In this page you can find the example usage for java.util Arrays deepEquals.

Prototype

public static boolean deepEquals(Object[] a1, Object[] a2) 

Source Link

Document

Returns true if the two specified arrays are deeply equal to one another.

Usage

From source file:de.bund.bfr.knime.node.editableTable.JSONDataTable.java

/**
 * {@inheritDoc}//from  w w w .  j av a 2s  .c o m
 */
@Override
public boolean equals(final Object obj) {
    if (this == obj) {
        return true;
    }
    if (obj == null) {
        return false;
    }
    if (getClass() != obj.getClass()) {
        return false;
    }
    JSONDataTable other = (JSONDataTable) obj;
    if (!Arrays.deepEquals(m_extensions, other.m_extensions)) {
        return false;
    }
    if (!Arrays.equals(m_rows, other.m_rows)) {
        return false;
    }
    if (m_spec == null) {
        if (other.m_spec != null) {
            return false;
        }
    } else if (!m_spec.equals(other.m_spec)) {
        return false;
    }
    return true;
}

From source file:com.opengamma.financial.analytics.LabelledMatrix3D.java

@SuppressWarnings("rawtypes")
@Override/* ww w. j  av a  2 s. co m*/
public boolean equals(final Object obj) {
    if (this == obj) {
        return true;
    }
    if (!(obj instanceof LabelledMatrix3D)) {
        return false;
    }
    final LabelledMatrix3D other = (LabelledMatrix3D) obj;
    return Arrays.deepEquals(_values, other._values) && Arrays.equals(_xKeys, other._xKeys)
            && Arrays.equals(_xLabels, other._xLabels) && Arrays.equals(_yKeys, other._yKeys)
            && Arrays.equals(_yLabels, other._yLabels) && Arrays.equals(_zKeys, other._zKeys)
            && Arrays.equals(_zLabels, other._zLabels);
}

From source file:hivemall.utils.math.MatrixUtils.java

/**
 * Find eigenvalues and eigenvectors of given tridiagonal matrix T.
 *
 * @see http://web.csulb.edu/~tgao/math423/s94.pdf
 * @see http://stats.stackexchange.com/questions/20643/finding-matrix-eigenvectors-using-qr-
 *      decomposition//from  ww w . jav  a  2 s  . c o  m
 * @param T target tridiagonal matrix
 * @param nIter number of iterations for the QR method
 * @param eigvals eigenvalues are stored here
 * @param eigvecs eigenvectors are stored here
 */
public static void tridiagonalEigen(@Nonnull final RealMatrix T, @Nonnull final int nIter,
        @Nonnull final double[] eigvals, @Nonnull final RealMatrix eigvecs) {
    Preconditions.checkArgument(Arrays.deepEquals(T.getData(), T.transpose().getData()),
            "Target matrix T must be a symmetric (tridiagonal) matrix");
    Preconditions.checkArgument(eigvecs.getRowDimension() == eigvecs.getColumnDimension(),
            "eigvecs must be a square matrix");
    Preconditions.checkArgument(T.getRowDimension() == eigvecs.getRowDimension(),
            "T and eigvecs must be the same shape");
    Preconditions.checkArgument(eigvals.length == eigvecs.getRowDimension(),
            "Number of eigenvalues and eigenvectors must be same");

    int nEig = eigvals.length;

    // initialize eigvecs as an identity matrix
    eigvecs.setSubMatrix(eye(nEig), 0, 0);

    RealMatrix T_ = T.copy();

    for (int i = 0; i < nIter; i++) {
        // QR decomposition for the tridiagonal matrix T
        RealMatrix R = new Array2DRowRealMatrix(nEig, nEig);
        RealMatrix Qt = new Array2DRowRealMatrix(nEig, nEig);
        tridiagonalQR(T_, R, Qt);

        RealMatrix Q = Qt.transpose();
        T_ = R.multiply(Q);
        eigvecs.setSubMatrix(eigvecs.multiply(Q).getData(), 0, 0);
    }

    // diagonal elements correspond to the eigenvalues
    for (int i = 0; i < nEig; i++) {
        eigvals[i] = T_.getEntry(i, i);
    }
}

From source file:org.nextframework.controller.MultiActionController.java

@SuppressWarnings("unused")
private Method firstMethod(Class<?> ofClass, Method expectedMethod) {
    if (ofClass.equals(expectedMethod.getDeclaringClass())) {
        return expectedMethod;
    }//from  w  w  w  .j a v a2  s.  co  m
    Method[] methods = ofClass.getDeclaredMethods();
    for (Method method : methods) {
        if (method.getName().equals(expectedMethod.getName())
                && Arrays.deepEquals(method.getParameterTypes(), expectedMethod.getParameterTypes())) {
            return method;
        }
    }
    return firstMethod(ofClass.getSuperclass(), expectedMethod);
}

From source file:hex.Model.java

/**
 * @param test Frame to be adapted/* www  .java2s  .co  m*/
 * @param origNames Training column names before categorical column encoding - can be the same as names
 * @param origDomains Training column levels before categorical column encoding - can be the same as domains
 * @param names Training column names
 * @param domains Training column levels
 * @param parms Model parameters
 * @param expensive Whether to actually do the hard work
 * @param computeMetrics Whether metrics can be (and should be) computed
 * @param interactions Column names to create pairwise interactions with
 * @param catEncoded Whether the categorical columns of the test frame were already transformed via categorical_encoding
 */
public static String[] adaptTestForTrain(Frame test, String[] origNames, String[][] origDomains, String[] names,
        String[][] domains, Parameters parms, boolean expensive, boolean computeMetrics,
        InteractionSpec interactions, ToEigenVec tev, IcedHashMap<Key, String> toDelete, boolean catEncoded)
        throws IllegalArgumentException {
    String[] msg = new String[0];
    if (test == null)
        return msg;
    if (catEncoded && origNames == null)
        return msg;

    // test frame matches the training frame (after categorical encoding, if applicable)
    String[][] tdomains = test.domains();
    if (names == test._names && domains == tdomains
            || (Arrays.equals(names, test._names) && Arrays.deepEquals(domains, tdomains)))
        return msg;

    String[] backupNames = names;
    String[][] backupDomains = domains;

    final String weights = parms._weights_column;
    final String offset = parms._offset_column;
    final String fold = parms._fold_column;
    final String response = parms._response_column;

    // whether we need to be careful with categorical encoding - the test frame could be either in original state or in encoded state
    final boolean checkCategoricals = parms._categorical_encoding == Parameters.CategoricalEncodingScheme.OneHotExplicit
            || parms._categorical_encoding == Parameters.CategoricalEncodingScheme.Eigen
            || parms._categorical_encoding == Parameters.CategoricalEncodingScheme.Binary;

    // test frame matches the user-given frame (before categorical encoding, if applicable)
    if (checkCategoricals && origNames != null) {
        boolean match = Arrays.equals(origNames, test._names);
        if (!match) {
            match = true;
            // In case the test set has extra columns not in the training set - check that all original pre-encoding columns are available in the test set
            // We could be lenient here and fill missing columns with NA, but then it gets difficult to decide whether this frame is pre/post encoding, if a certain fraction of columns mismatch...
            for (String s : origNames) {
                match &= ArrayUtils.contains(test.names(), s);
                if (!match)
                    break;
            }
        }
        // still have work to do below, make sure we set the names/domains to the original user-given values such that we can do the int->enum mapping and cat. encoding below (from scratch)
        if (match) {
            names = origNames;
            domains = origDomains;
        }
    }

    // create the interactions now and bolt them on to the front of the test Frame
    if (null != interactions) {
        InteractionPair[] interactionPairs = interactions.makeInteractionPairs(test);
        test.add(makeInteractions(test, false, interactionPairs, true, true, false));
    }

    // Build the validation set to be compatible with the training set.
    // Toss out extra columns, complain about missing ones, remap categoricals
    ArrayList<String> msgs = new ArrayList<>();
    Vec vvecs[] = new Vec[names.length];
    int good = 0; // Any matching column names, at all?
    int convNaN = 0; // count of columns that were replaced with NA
    for (int i = 0; i < names.length; i++) {
        Vec vec = test.vec(names[i]); // Search in the given validation set
        boolean isResponse = response != null && names[i].equals(response);
        boolean isWeights = weights != null && names[i].equals(weights);
        boolean isOffset = offset != null && names[i].equals(offset);
        boolean isFold = fold != null && names[i].equals(fold);
        // If a training set column is missing in the test set, complain (if it's ok, fill in with NAs (or 0s if it's a fold-column))
        if (vec == null) {
            if (isResponse && computeMetrics)
                throw new IllegalArgumentException(
                        "Test/Validation dataset is missing response column '" + response + "'");
            else if (isOffset)
                throw new IllegalArgumentException(
                        H2O.technote(12, "Test/Validation dataset is missing offset column '" + offset
                                + "'. If your intention is to disable the effect of the offset add a zero offset column."));
            else if (isWeights && computeMetrics) {
                if (expensive) {
                    vec = test.anyVec().makeCon(1);
                    toDelete.put(vec._key, "adapted missing vectors");
                    msgs.add(H2O.technote(1, "Test/Validation dataset is missing weights column '" + names[i]
                            + "' (needed because a response was found and metrics are to be computed): substituting in a column of 1s"));
                }
            } else if (expensive) { // generate warning even for response columns.  Other tests depended on this.
                final double defval;
                if (isWeights)
                    defval = 1; // note: even though computeMetrics is false we should still have sensible weights (GLM skips rows with NA weights)
                else if (isFold)
                    defval = 0;
                else {
                    defval = parms.missingColumnsType();
                    convNaN++;
                }
                String str = "Test/Validation dataset is missing column '" + names[i]
                        + "': substituting in a column of " + defval;
                vec = test.anyVec().makeCon(defval);
                toDelete.put(vec._key, "adapted missing vectors");
                msgs.add(str);
            }
        }
        if (vec != null) { // I have a column with a matching name
            if (domains[i] != null) { // Model expects an categorical
                if (vec.isString())
                    vec = VecUtils.stringToCategorical(vec); //turn a String column into a categorical column (we don't delete the original vec here)
                if (expensive && vec.domain() != domains[i] && !Arrays.equals(vec.domain(), domains[i])) { // Result needs to be the same categorical
                    Vec evec;
                    try {
                        evec = vec.adaptTo(domains[i]); // Convert to categorical or throw IAE
                        toDelete.put(evec._key, "categorically adapted vec");
                    } catch (NumberFormatException nfe) {
                        throw new IllegalArgumentException(
                                "Test/Validation dataset has a non-categorical column '" + names[i]
                                        + "' which is categorical in the training data");
                    }
                    String[] ds = evec.domain();
                    assert ds != null && ds.length >= domains[i].length;
                    if (isResponse && vec.domain() != null
                            && ds.length == domains[i].length + vec.domain().length)
                        throw new IllegalArgumentException(
                                "Test/Validation dataset has a categorical response column '" + names[i]
                                        + "' with no levels in common with the model");
                    if (ds.length > domains[i].length)
                        msgs.add("Test/Validation dataset column '" + names[i] + "' has levels not trained on: "
                                + Arrays.toString(Arrays.copyOfRange(ds, domains[i].length, ds.length)));
                    vec = evec;
                }
            } else if (vec.isCategorical()) {
                if (parms._categorical_encoding == Parameters.CategoricalEncodingScheme.LabelEncoder) {
                    Vec evec = vec.toNumericVec();
                    toDelete.put(evec._key, "label encoded vec");
                    vec = evec;
                } else {
                    throw new IllegalArgumentException("Test/Validation dataset has categorical column '"
                            + names[i] + "' which is real-valued in the training data");
                }
            }
            good++; // Assumed compatible; not checking e.g. Strings vs UUID
        }
        vvecs[i] = vec;
    }
    if (good == names.length || (response != null && test.find(response) == -1 && good == names.length - 1)) // Only update if got something for all columns
        test.restructure(names, vvecs, good);

    boolean haveCategoricalPredictors = false;
    if (expensive && checkCategoricals && !catEncoded) {
        for (int i = 0; i < test.numCols(); ++i) {
            if (test.names()[i].equals(response))
                continue;
            if (test.names()[i].equals(weights))
                continue;
            if (test.names()[i].equals(offset))
                continue;
            if (test.names()[i].equals(fold))
                continue;
            // either the column of the test set is categorical (could be a numeric col that's already turned into a factor)
            if (test.vec(i).cardinality() > 0) {
                haveCategoricalPredictors = true;
                break;
            }
            // or a equally named column of the training set is categorical, but the test column isn't (e.g., numeric column provided to be converted to a factor)
            int whichCol = ArrayUtils.find(names, test.name(i));
            if (whichCol >= 0 && domains[whichCol] != null) {
                haveCategoricalPredictors = true;
                break;
            }
        }
    }
    // check if we first need to expand categoricals before calling this method again
    if (expensive && !catEncoded && haveCategoricalPredictors) {
        Frame updated = categoricalEncoder(test, new String[] { weights, offset, fold, response },
                parms._categorical_encoding, tev, parms._max_categorical_levels);
        toDelete.put(updated._key, "categorically encoded frame");
        test.restructure(updated.names(), updated.vecs()); //updated in place
        String[] msg2 = adaptTestForTrain(test, origNames, origDomains, backupNames, backupDomains, parms,
                expensive, computeMetrics, interactions, tev, toDelete, true /*catEncoded*/);
        msgs.addAll(Arrays.asList(msg2));
        return msgs.toArray(new String[msgs.size()]);
    }
    if (good == convNaN)
        throw new IllegalArgumentException(
                "Test/Validation dataset has no columns in common with the training set");

    return msgs.toArray(new String[msgs.size()]);
}

From source file:org.sakaiproject.gradebook.gwt.server.ImportExportUtilityImpl.java

public Upload parseImportGeneric(ImportExportDataFile rawData) {

    String msgs = rawData.getMessages();
    boolean errorsFound = rawData.isErrorsFound();

    if (errorsFound) {
        return emptyUploadFileWithNotes(msgs);
    }/*from w  ww.j ava2  s  . c  o  m*/

    Gradebook gradebook = service.getGradebook(rawData.getImportSettings().getGradebookUid());
    Item gradebookItemModel = gradebook.getGradebookItemModel();

    List<UserDereference> userDereferences = this.service.findAllUserDereferences();
    Map<String, UserDereference> userDereferenceMap = new HashMap<String, UserDereference>();
    buildDereferenceIdMap(userDereferences, userDereferenceMap);
    ImportExportInformation ieInfo = new ImportExportInformation();

    UploadImpl importFile = new UploadImpl();

    // this is just housekeeping and may not be immediately necessary
    importFile.getImportSettings().setScantron(rawData.isScantronFile());
    importFile.getImportSettings().setJustStructure(rawData.isJustStructure());
    ieInfo.setImportSettings(rawData.getImportSettings());
    //

    ieInfo.setGradebookItemModel(gradebookItemModel);

    ArrayList<Learner> importRows = new ArrayList<Learner>();

    Map<StructureRow, String[]> structureColumnsMap = new HashMap<StructureRow, String[]>();

    int structureStop = 0;

    structureStop = readDataForStructureInformation(rawData, buildRowIndicatorMap(), structureColumnsMap);
    if (structureStop != -1) {
        try {
            readInHeaderRow(rawData, ieInfo, structureStop);
            processStructureInformation(ieInfo, structureColumnsMap);

            /*
             * if a header name changes when it returns from processHeaders, there was a unique name
             * generated, and in that case, send notificaton
             */
            String[] headerNames = new String[ieInfo.getHeaders().length];
            for (int i = 0; i < headerNames.length; ++i) {
                headerNames[i] = ieInfo.getHeaders()[i].getHeaderName();
            }
            String[] copy = Arrays.copyOf(headerNames, headerNames.length);
            processHeaders(ieInfo, structureColumnsMap);
            for (int i = 0; i < headerNames.length; ++i) {
                headerNames[i] = ieInfo.getHeaders()[i].getHeaderName();
            }

            if (!Arrays.deepEquals(copy, headerNames)) {
                importFile.setNotifyAssignmentName(true);
                importFile.setNotes(i18n.getString("gb2ImportItemSameName"));
            }

            // At this point, we need to remove assignments that are not in the import
            // file
            adjustGradebookItemModel(ieInfo);

            if (!ieInfo.getImportSettings().isJustStructure()) {// GRBK-514
                readInGradeDataFromImportFile(rawData, ieInfo, userDereferenceMap, importRows, structureStop,
                        service);
            }

            GradeItem gradebookGradeItem = (GradeItem) ieInfo.getGradebookItemModel();
            service.decorateGradebook(gradebookGradeItem, null, null);
            importFile.setGradebookItemModel(gradebookGradeItem);
            importFile.setRows(importRows);
            importFile.setGradeType(gradebookItemModel.getGradeType());
            importFile.setCategoryType(gradebookItemModel.getCategoryType());

            if (ieInfo.isUserNotFound())
                importFile.addNotes(i18n.getString("importUserNotFoundMessage"));

            if (ieInfo.isInvalidScore())
                importFile.addNotes(i18n.getString("importInvalidScoresMessage"));
        } catch (Exception e) {
            importFile.setErrors(true);
            importFile.setNotes(e.getMessage());
            importFile.setRows(null);
            log.warn(e, e);
        }

        // GRBK-806 code was here to disable percentage gradebooks in general but if we're a scantron we will not allow it.

    } else {
        importFile.setErrors(true);
        importFile.setNotes(i18n.getString("importMissingHeaderMessage"));
    }

    service.postEvent("gradebook2.import", String.valueOf(gradebook.getGradebookId()));

    return importFile;
}

From source file:base.BasePlayer.BedCanvas.java

boolean checkIntersect(VarNode current, Object[] t1, Object[] t2) {
    t2 = new Object[t1.length];

    if (current == null || current.getBedHits() == null) {
        for (int i = 0; i < this.bedTrack.size(); i++) {
            if (this.bedTrack.get(i).intersect && this.bedTrack.get(i).getIntersectBox().isSelected()) {
                return false;
            }//  w w w.  j a va2  s  .  c  om
        }
        return true;
    }

    for (int i = 0; i < current.getBedHits().size(); i++) {
        if (!current.getBedHits().get(i).getTrack().intersect) {
            continue;
        }
        if (current.getBedHits().get(i).getTrack().getSubtracttBox().isSelected()) {
            t2[current.getBedHits().get(i).getTrack().trackIndex] = 1;
        } else if (current.getBedHits().get(i).getTrack().getIntersectBox().isSelected()) {
            t2[current.getBedHits().get(i).getTrack().trackIndex] = 1;
        }
    }

    if (Arrays.deepEquals(t1, t2)) {
        return true;
    } else {
        return false;
    }
}