List of usage examples for weka.core Instance dataset
public Instances dataset();
From source file:adams.flow.transformer.WekaInstanceEvaluator.java
License:Open Source License
/** * Executes the flow item./*from w w w.j a v a 2s .com*/ * * @return null if everything is fine, otherwise error message */ @Override protected String doExecute() { String result; Instance inst; Instance newInst; double eval; result = null; // the Instance to evaluate inst = (Instance) m_InputToken.getPayload(); // obtain dataset first? if (m_Header == null) result = setUpEvaluator(); // generate new header? if ((result == null) && (m_Header == null)) result = generateHeader(inst); // generate evaluation if (result == null) { try { eval = m_Evaluator.evaluate(inst); if (isLoggingEnabled()) getLogger().info("Evaluation " + eval + " for instance: " + inst); m_Filter.input(inst); m_Filter.batchFinished(); newInst = m_Filter.output(); newInst.setValue(newInst.dataset().attribute(m_AttributeName), eval); m_OutputToken = new Token(newInst); } catch (Exception e) { m_OutputToken = null; result = handleException("Failed to evaluate instance: " + inst, e); } } return result; }
From source file:adams.flow.transformer.WekaInstanceStreamPlotGenerator.java
License:Open Source License
/** * Executes the flow item.//from ww w . j a va 2 s .c o m * * @return null if everything is fine, otherwise error message */ @Override protected String doExecute() { String result; Instance inst; SequencePlotterContainer cont; int[] indices; int i; result = null; inst = (Instance) m_InputToken.getPayload(); m_Counter++; m_Containers.clear(); m_Attributes.setMax(inst.numAttributes()); indices = m_Attributes.getIntIndices(); for (i = 0; i < indices.length; i++) { if (inst.attribute(indices[i]).isNominal()) cont = new SequencePlotterContainer(inst.dataset().attribute(indices[i]).name(), new Double(m_Counter), inst.stringValue(indices[i])); else cont = new SequencePlotterContainer(inst.dataset().attribute(indices[i]).name(), new Double(m_Counter), inst.value(indices[i])); m_Containers.add(cont); } return result; }
From source file:adams.flow.transformer.WekaPrimeForecaster.java
License:Open Source License
/** * Executes the flow item.// w ww. jav a2 s . co m * * @return null if everything is fine, otherwise error message */ @Override protected String doExecute() { String result; Instances data; Instance inst; AbstractForecaster cls; result = null; try { cls = getForecasterInstance(); if (cls == null) result = "Failed to obtain forecaster!"; if (result == null) { if ((m_InputToken != null) && (m_InputToken.getPayload() instanceof Instances)) { data = (Instances) m_InputToken.getPayload(); cls.primeForecaster(data); m_OutputToken = new Token(new WekaModelContainer(cls, new Instances(data, 0), data)); } else if ((m_InputToken != null) && (m_InputToken.getPayload() instanceof Instance)) { inst = (Instance) m_InputToken.getPayload(); data = inst.dataset(); if (cls instanceof IncrementallyPrimeable) { ((IncrementallyPrimeable) cls).primeForecasterIncremental(inst); m_OutputToken = new Token(new WekaModelContainer(cls, new Instances(data, 0), data)); } else { result = m_Forecaster.getValue() + " (= " + cls.getClass().getName() + ") does not implement " + IncrementallyPrimeable.class.getName() + "! Cannot prime incrementally!"; } } } } catch (Exception e) { m_OutputToken = null; result = handleException("Failed to process data:", e); } if (m_OutputToken != null) updateProvenance(m_OutputToken); return result; }
From source file:adams.flow.transformer.WekaRelationName.java
License:Open Source License
/** * Executes the flow item./*from www . ja v a 2s .c o m*/ * * @return null if everything is fine, otherwise error message */ protected String doExecute() { String result; Instance inst; Instances data; result = null; if (m_InputToken.getPayload() instanceof Instance) { inst = (Instance) m_InputToken.getPayload(); data = inst.dataset(); } else { inst = null; data = (Instances) m_InputToken.getPayload(); } m_OutputToken = new Token(data.relationName()); return result; }
From source file:adams.flow.transformer.WekaRenameRelation.java
License:Open Source License
/** * Executes the flow item.//from www .j a v a 2 s.c o m * * @return null if everything is fine, otherwise error message */ @Override protected String doExecute() { String result; weka.core.Instance inst; weka.core.Instances data; adams.data.instance.Instance instA; String oldName; String newName; result = null; if (m_InputToken.getPayload() instanceof weka.core.Instance) { inst = (weka.core.Instance) m_InputToken.getPayload(); data = inst.dataset(); } else if (m_InputToken.getPayload() instanceof adams.data.instance.Instance) { inst = ((adams.data.instance.Instance) m_InputToken.getPayload()).toInstance(); data = inst.dataset(); } else { inst = null; data = (weka.core.Instances) m_InputToken.getPayload(); } if (isLoggingEnabled()) getLogger().info("Renaming: " + m_Find + " -> " + m_Replace); // perform rename if (data != null) { oldName = data.relationName(); newName = oldName.replaceAll(m_Find, m_Replace); data.setRelationName(newName); if (isLoggingEnabled()) getLogger().info("Renamed: " + oldName + " -> " + newName); } else { if (isLoggingEnabled()) getLogger().info("weka.core.Instance doesn't have access to dataset?"); } if (inst == null) { m_OutputToken = new Token(data); } else { if (m_InputToken.getPayload() instanceof adams.data.instance.Instance) { instA = new adams.data.instance.Instance(); instA.set(inst); m_OutputToken = new Token(instA); } else { m_OutputToken = new Token(inst); } } return result; }
From source file:adams.flow.transformer.WekaReorderAttributesToReference.java
License:Open Source License
/** * Executes the flow item.//w ww . ja va 2 s .com * * @return null if everything is fine, otherwise error message */ @Override protected String doExecute() { String result; Instances dataOld; Instance instOld; Instances dataNew; Instance instNew; Attribute att; int i; StringBuilder order; List<Add> adds; Add add; int index; StringBuilder labels; int n; List<Filter> filters; Reorder reorder; result = null; if (m_OnTheFly && (m_Reference == null)) { result = setUpReference(); if (result != null) return result; } dataNew = null; instNew = null; // get input data if (m_InputToken.getPayload() instanceof Instance) { instOld = (Instance) m_InputToken.getPayload(); dataOld = instOld.dataset(); } else { instOld = null; dataOld = (Instances) m_InputToken.getPayload(); } // do we need to initialize filter? if (m_InitializeOnce || (m_Reorder == null)) { // check incoming data if (!m_Lenient) { for (i = 0; i < m_Reference.numAttributes(); i++) { att = m_Reference.attribute(i); if (dataOld.attribute(att.name()) == null) { if (result == null) result = "Missing attribute(s) in incoming data: " + att.name(); else result += ", " + att.name(); } } if (result != null) getLogger().severe(result); } if (result == null) { try { // determine indices order = new StringBuilder(); adds = new ArrayList<Add>(); for (i = 0; i < m_Reference.numAttributes(); i++) { att = m_Reference.attribute(i); if (dataOld.attribute(att.name()) == null) { index = dataOld.numAttributes() + adds.size(); add = new Add(); add.setAttributeIndex("last"); add.setAttributeName(att.name()); add.setAttributeType(new SelectedTag(att.type(), Add.TAGS_TYPE)); if (att.isNominal()) { labels = new StringBuilder(); for (n = 0; n < att.numValues(); n++) { if (labels.length() > 0) labels.append(","); labels.append(att.value(n)); } add.setNominalLabels(labels.toString()); } adds.add(add); } else { index = dataOld.attribute(att.name()).index(); } if (order.length() > 0) order.append(","); order.append((index + 1)); } // build reorder filter reorder = new Reorder(); reorder.setAttributeIndices(order.toString()); // build multifilter filters = new ArrayList<Filter>(); filters.addAll(adds); filters.add(reorder); m_Reorder = new MultiFilter(); m_Reorder.setFilters(filters.toArray(new Filter[filters.size()])); // initialize filter m_Reorder.setInputFormat(dataOld); } catch (Exception e) { result = handleException("Failed to initialize reorder filter!", e); } } } // reorder data if (result == null) { try { if (instOld != null) { m_Reorder.input(instOld); m_Reorder.batchFinished(); instNew = m_Reorder.output(); if (m_KeepRelationName) instNew.dataset().setRelationName(dataOld.relationName()); } else { dataNew = Filter.useFilter(dataOld, m_Reorder); if (m_KeepRelationName) dataNew.setRelationName(dataOld.relationName()); } } catch (Exception e) { result = handleException("Failed to reorder data!", e); instNew = null; dataNew = null; } } if (instNew != null) m_OutputToken = new Token(instNew); else if (dataNew != null) m_OutputToken = new Token(dataNew); return result; }
From source file:adams.flow.transformer.WekaSetInstanceValue.java
License:Open Source License
/** * Executes the flow item.//from ww w .jav a 2 s. co m * * @return null if everything is fine, otherwise error message */ @Override protected String doExecute() { String result; Instance inst; int index; result = null; inst = (Instance) m_InputToken.getPayload(); inst = (Instance) inst.copy(); m_Index.setData(inst.dataset()); index = m_Index.getIntIndex(); try { if (m_Value.equals("?")) { inst.setMissing(index); } else { switch (inst.attribute(index).type()) { case Attribute.NUMERIC: inst.setValue(index, Utils.toDouble(m_Value)); break; case Attribute.DATE: inst.setValue(index, inst.attribute(index).parseDate(m_Value)); break; case Attribute.NOMINAL: case Attribute.STRING: inst.setValue(index, m_Value); break; case Attribute.RELATIONAL: result = "Relational attributes cannot be set!"; break; default: result = "Unhandled attribute type: " + inst.attribute(index).type(); } } } catch (Exception e) { result = handleException("Failed to set value: " + m_Index.getIndex() + " -> " + m_Value, e); } // broadcast data if (result == null) m_OutputToken = new Token(inst); return result; }
From source file:adams.flow.transformer.WekaStoreInstance.java
License:Open Source License
/** * Executes the flow item.//ww w. j a va 2s . c o m * * @return null if everything is fine, otherwise error message */ @Override protected String doExecute() { String result; Instance inst; Instances data; Storage storage; result = null; inst = (Instance) m_InputToken.getPayload(); storage = getStorageHandler().getStorage(); // dataset present? if (!storage.has(m_Dataset)) { data = new Instances(inst.dataset(), 0); storage.put(m_Dataset, data); if (isLoggingEnabled()) getLogger().info("Adding dataset to storage: " + m_Dataset); } else { data = (Instances) storage.get(m_Dataset); if (isLoggingEnabled()) getLogger().info("Dataset present in storage: " + m_Dataset); } data.add(inst); storage.put(m_Dataset, data); if (isLoggingEnabled()) getLogger().info("Added instance to storage: " + m_Dataset); // broadcast data m_OutputToken = new Token(data); return result; }
From source file:adams.flow.transformer.WekaStreamEvaluator.java
License:Open Source License
/** * Executes the flow item./*ww w .j av a2 s.c o m*/ * * @return null if everything is fine, otherwise error message */ @Override protected String doExecute() { String result; Instance inst; Instances data; result = null; inst = (Instance) m_InputToken.getPayload(); data = inst.dataset(); if (m_Evaluation == null) { try { m_Evaluation = new Evaluation(data); m_Current = 0; m_Header = data; initOutputBuffer(); m_Output.setHeader(m_Header); } catch (Exception e) { result = handleException("Failed to set up evaluation!", e); } } // evaluate/train if (result == null) { try { if (m_Classifier == null) { m_Classifier = getClassifierInstance(); m_Classifier.buildClassifier(data); } if (m_Current > 0) { if (m_DiscardPredictions) m_Evaluation.evaluateModelOnce(m_Classifier, inst); else m_Evaluation.evaluateModelOnceAndRecordPrediction(m_Classifier, inst); } ((UpdateableClassifier) m_Classifier).updateClassifier(inst); } catch (Exception e) { result = handleException("Failed to evaluate/update the classifier!", e); } } // output? m_Current++; if (m_Current % m_Interval == 0) { if (m_Output instanceof Null) { m_OutputToken = new Token(new WekaEvaluationContainer(m_Evaluation)); } else { if (m_AlwaysUseContainer) m_OutputToken = new Token( new WekaEvaluationContainer(m_Evaluation, null, m_Output.getBuffer().toString())); else m_OutputToken = new Token(m_Output.getBuffer().toString()); } } return result; }
From source file:adams.flow.transformer.WekaStreamFilter.java
License:Open Source License
/** * Executes the flow item.//w ww . ja va 2 s .c om * * @return null if everything is fine, otherwise error message */ @Override protected String doExecute() { String result; weka.core.Instances data; weka.core.Instance inst; adams.data.instance.Instance instA; weka.core.Instance filteredInst; weka.core.Instances filteredData; String relation; weka.filters.Filter filter; result = null; inst = null; data = null; filteredInst = null; filteredData = null; filter = (weka.filters.Filter) m_Filter; if (m_InputToken.getPayload() instanceof weka.core.Instance) inst = (weka.core.Instance) m_InputToken.getPayload(); else if (m_InputToken.getPayload() instanceof weka.core.Instances) data = (weka.core.Instances) m_InputToken.getPayload(); else inst = ((adams.data.instance.Instance) m_InputToken.getPayload()).toInstance(); if (data == null) data = inst.dataset(); try { // initialize filter? if (!m_Initialized) { result = setUpContainers(filter); if (result == null) result = updateObject(filter); filter.setInputFormat(new weka.core.Instances(data, 0)); } if (result == null) { // filter data relation = data.relationName(); if (inst == null) { filteredData = Filter.useFilter(data, filter); if (m_KeepRelationName) filteredData.setRelationName(relation); } else { filter.input(inst); filter.batchFinished(); filteredInst = filter.output(); if (m_KeepRelationName) filteredInst.dataset().setRelationName(relation); } // build output token if (m_InputToken.getPayload() instanceof weka.core.Instance) { m_OutputToken = new Token(filteredInst); } else if (m_InputToken.getPayload() instanceof weka.core.Instances) { m_OutputToken = new Token(filteredData); } else { instA = new adams.data.instance.Instance(); instA.set(filteredInst); m_OutputToken = new Token(instA); } } } catch (Exception e) { result = handleException("Failed to filter data: ", e); } if (m_OutputToken != null) updateProvenance(m_OutputToken); return result; }