List of usage examples for weka.core Instance toDoubleArray
public double[] toDoubleArray();
From source file:SpectralClusterer.java
License:Open Source License
/** * Classifies an instance w.r.t. the partitions found. It applies a naive * min-distance algorithm.//ww w . j a v a2s . com * * @param instance * the instance to classify * @return the cluster that contains the nearest point to the instance */ public int clusterInstance(Instance instance) throws java.lang.Exception { DoubleMatrix1D u = DoubleFactory1D.dense.make(instance.toDoubleArray()); double min_dist = Double.POSITIVE_INFINITY; int c = -1; for (int i = 0; i < v.rows(); i++) { double dist = distnorm2(u, v.viewRow(i)); if (dist < min_dist) { c = cluster[i]; min_dist = dist; } } return c; }
From source file:MPCKMeans.java
License:Open Source License
/** lookup the instance in the checksum hash, assuming transductive clustering * @param instance instance to be looked up * @return the index of the cluster to which the instance was assigned, -1 if the instance has not bee clustered *//* w w w .j a v a2s . co m*/ protected int lookupInstanceCluster(Instance instance) throws Exception { int classIdx = instance.classIndex(); double checksum = 0; // need to normalize using original metric, since cluster data is normalized similarly if (m_metric.doesNormalizeData()) { if (m_Trainable == TRAINING_INTERNAL) { m_metric.resetMetric(); } m_metric.normalizeInstanceWeighted(instance); } double[] values1 = instance.toDoubleArray(); for (int i = 0; i < values1.length; i++) { if (i != classIdx) { checksum += m_checksumCoeffs[i] * values1[i]; } } Object list = m_checksumHash.get(new Double((float) checksum)); if (list != null) { // go through the list of instances with the same checksum and find the one that is equivalent ArrayList checksumList = (ArrayList) list; for (int i = 0; i < checksumList.size(); i++) { int instanceIdx = ((Integer) checksumList.get(i)).intValue(); Instance listInstance = m_Instances.instance(instanceIdx); double[] values2 = listInstance.toDoubleArray(); boolean equal = true; for (int j = 0; j < values1.length && equal == true; j++) { if (j != classIdx) { if ((float) values1[j] != (float) values2[j]) { equal = false; } } } if (equal == true) { return m_ClusterAssignments[instanceIdx]; } } } return -1; }
From source file:MPCKMeans.java
License:Open Source License
/** Sets training instances */ public void setInstances(Instances instances) { m_Instances = instances;/* ww w . j av a2s . c om*/ // create the checksum coefficients m_checksumCoeffs = new double[instances.numAttributes()]; for (int i = 0; i < m_checksumCoeffs.length; i++) { m_checksumCoeffs[i] = m_RandomNumberGenerator.nextDouble(); } // hash the instance checksums m_checksumHash = new HashMap(instances.numInstances()); int classIdx = instances.classIndex(); for (int i = 0; i < instances.numInstances(); i++) { Instance instance = instances.instance(i); double[] values = instance.toDoubleArray(); double checksum = 0; for (int j = 0; j < values.length; j++) { if (j != classIdx) { checksum += m_checksumCoeffs[j] * values[j]; } } // take care of chaining Object list = m_checksumHash.get(new Double((float) checksum)); ArrayList idxList = null; if (list == null) { idxList = new ArrayList(); m_checksumHash.put(new Double((float) checksum), idxList); } else { // chaining idxList = (ArrayList) list; } idxList.add(new Integer(i)); } }
From source file:adams.data.instances.AbstractInstanceGenerator.java
License:Open Source License
/** * For adding IDs, notes, additional fields to the data. * * @param data the input data//from ww w. j av a2s.c o m * @param inst the generated instance * @return the processed instance */ protected Instance postProcessOutput(T data, Instance inst) { Instance result; double[] values; int index; Report report; values = inst.toDoubleArray(); report = data.getReport(); if (m_AddDatabaseID) { index = m_OutputHeader.attribute(ArffUtils.getDBIDName()).index(); values[index] = report.getDatabaseID(); } result = new DenseInstance(1.0, values); result.setDataset(m_OutputHeader); return result; }
From source file:adams.flow.transformer.WekaInstanceBuffer.java
License:Open Source License
/** * Executes the flow item.// w w w . j a v a 2s . c o m * * @return null if everything is fine, otherwise error message */ @Override protected String doExecute() { String result; Instance[] insts; Instance inst; double[] values; int i; int n; boolean updated; result = null; if (m_Operation == Operation.INSTANCE_TO_INSTANCES) { if (m_InputToken.getPayload() instanceof Instance) { insts = new Instance[] { (Instance) m_InputToken.getPayload() }; } else { insts = (Instance[]) m_InputToken.getPayload(); } for (n = 0; n < insts.length; n++) { inst = insts[n]; if ((m_Buffer != null) && m_CheckHeader) { if (!m_Buffer.equalHeaders(inst.dataset())) { getLogger().info("Header changed, resetting buffer"); m_Buffer = null; } } // buffer instance if (m_Buffer == null) m_Buffer = new Instances(inst.dataset(), 0); // we need to make sure that string and relational values are in our // buffer header and update the current Instance accordingly before // buffering it values = inst.toDoubleArray(); updated = false; for (i = 0; i < values.length; i++) { if (inst.isMissing(i)) continue; if (inst.attribute(i).isString()) { values[i] = m_Buffer.attribute(i).addStringValue(inst.stringValue(i)); updated = true; } else if (inst.attribute(i).isRelationValued()) { values[i] = m_Buffer.attribute(i).addRelation(inst.relationalValue(i)); updated = true; } } if (updated) { if (inst instanceof SparseInstance) { inst = new SparseInstance(inst.weight(), values); } else if (inst instanceof BinarySparseInstance) { inst = new BinarySparseInstance(inst.weight(), values); } else { if (!(inst instanceof DenseInstance)) { getLogger().severe("Unhandled instance class (" + inst.getClass().getName() + "), " + "defaulting to " + DenseInstance.class.getName()); } inst = new DenseInstance(inst.weight(), values); } } else { inst = (Instance) inst.copy(); } m_Buffer.add(inst); } if (m_Buffer.numInstances() % m_Interval == 0) { m_OutputToken = new Token(m_Buffer); if (m_ClearBuffer) m_Buffer = null; } } else if (m_Operation == Operation.INSTANCES_TO_INSTANCE) { m_Buffer = (Instances) m_InputToken.getPayload(); m_Iterator = m_Buffer.iterator(); } else { throw new IllegalStateException("Unhandled operation: " + m_Operation); } return result; }
From source file:adams.flow.transformer.WekaInstancesAppend.java
License:Open Source License
/** * Executes the flow item./*from ww w .j a v a 2 s . c o m*/ * * @return null if everything is fine, otherwise error message */ @Override protected String doExecute() { String result; String[] filesStr; File[] files; int i; int n; Instances[] inst; Instances full; String msg; StringBuilder relation; double[] values; result = null; // get filenames files = null; inst = null; if (m_InputToken.getPayload() instanceof String[]) { filesStr = (String[]) m_InputToken.getPayload(); files = new File[filesStr.length]; for (i = 0; i < filesStr.length; i++) files[i] = new PlaceholderFile(filesStr[i]); } else if (m_InputToken.getPayload() instanceof File[]) { files = (File[]) m_InputToken.getPayload(); } else if (m_InputToken.getPayload() instanceof Instances[]) { inst = (Instances[]) m_InputToken.getPayload(); } else { throw new IllegalStateException("Unhandled input type: " + m_InputToken.getPayload().getClass()); } // load data? if (files != null) { inst = new Instances[files.length]; for (i = 0; i < files.length; i++) { try { inst[i] = DataSource.read(files[i].getAbsolutePath()); } catch (Exception e) { result = handleException("Failed to load dataset: " + files[i], e); break; } } } // test compatibility if (result == null) { for (i = 0; i < inst.length - 1; i++) { for (n = i + 1; n < inst.length; n++) { if ((msg = inst[i].equalHeadersMsg(inst[n])) != null) { result = "Dataset #" + (i + 1) + " and #" + (n + 1) + " are not compatible:\n" + msg; break; } } if (result != null) break; } } // append if (result == null) { full = new Instances(inst[0]); relation = new StringBuilder(inst[0].relationName()); for (i = 1; i < inst.length; i++) { relation.append("+" + inst[i].relationName()); for (Instance row : inst[i]) { values = row.toDoubleArray(); for (n = 0; n < values.length; n++) { if (row.attribute(n).isString()) values[n] = full.attribute(n).addStringValue(row.stringValue(n)); else if (row.attribute(n).isRelationValued()) values[n] = full.attribute(n).addRelation(row.relationalValue(n)); } if (row instanceof SparseInstance) row = new SparseInstance(row.weight(), values); else row = new DenseInstance(row.weight(), values); full.add(row); } } full.setRelationName(relation.toString()); m_OutputToken = new Token(full); } return result; }
From source file:CEP.CEPListener.java
Instances SetDuration(Instances in) throws InterruptedException { Instances out = HeaderManager.GetEmptyStructure(); for (Instance inst : in) { double time = inst.toDoubleArray()[5]; if (time > 20) { inst.setValue(5, time - windowSize); out.add(inst);//from ww w .ja va 2 s . c om } } return out; }
From source file:cezeri.utils.FactoryInstance.java
public static double[][] getData(Instances m) { double[][] ret = new double[m.numInstances()][m.numAttributes()]; for (int i = 0; i < m.numInstances(); i++) { Instance ins = m.instance(i); ret[i] = ins.toDoubleArray(); }//from w w w . j av a2 s . co m return ret; }
From source file:cezeri.utils.FactoryInstance.java
public static CMatrix toMatrix(Instances m) { double[][] ret = new double[m.numInstances()][m.numAttributes()]; for (int i = 0; i < m.numInstances(); i++) { Instance ins = m.instance(i); ret[i] = ins.toDoubleArray(); }// ww w. java2 s . c o m return CMatrix.getInstance(ret); }
From source file:cezeri.utils.FactoryInstance.java
public static CMatrix fromInstances(Instances m) { double[][] ret = new double[m.numInstances()][m.numAttributes()]; for (int i = 0; i < m.numInstances(); i++) { Instance ins = m.instance(i); ret[i] = ins.toDoubleArray(); }//from ww w .ja v a 2 s . c om return CMatrix.getInstance(ret); }