Example usage for weka.core Instance value

List of usage examples for weka.core Instance value

Introduction

In this page you can find the example usage for weka.core Instance value.

Prototype

public double value(Attribute att);

Source Link

Document

Returns an instance's attribute value in internal format.

Usage

From source file:com.relationalcloud.partitioning.explanation.ExplanationHandler.java

License:Open Source License

/**
 * Repeat the selection from the database removing duplicates, since they will
 * only increase the execution time. And run the tuples through the classifier
 * to populate the justifiedpartition column.
 * /*  w  ww.j a  v a 2  s .  co  m*/
 * @param tableProcessed
 * @param classifier
 * @param wa
 * @throws SQLException
 * @throws Exception
 */
public void populateJustifiedColumn(String tableProcessed, Classifier classifier, ArrayList<String> attributes,
        Connection conn, int numbPart, Enumeration enumclassvalues) throws SQLException, Exception {
    if (true) {
        labelTest(tableProcessed, classifier, conn);
        return;
    }

    tableProcessed = removeQuotes(tableProcessed);

    // get from the DB the tuples content and their partitioning column
    String sqlstring = "SELECT distinct g.tupleid, ";
    for (String sc : attributes) {
        sqlstring += "s." + sc + ", ";
    }
    sqlstring += "g." + pcol + " FROM " + "(SELECT distinct tupleid," + pcol + " FROM `" + testingtable
            + "` WHERE tableid = '" + tableProcessed + "') AS g, relcloud_" + tableProcessed + " AS s "
            + "WHERE s.relcloud_id = g.tupleid;";

    System.out.println(sqlstring);
    Statement stmt = conn.createStatement();

    // initializing the testing table to avoid complaints from classifier with
    // an hash partition like distribution
    if (!testingtable.equals(sampledtrainingtable)) {
        int i = 0;

        Object o = enumclassvalues.nextElement();

        // set everything to an existing value to ensure that every field is
        // covered
        stmt.executeUpdate("UPDATE " + testingtable + " SET " + pcol + "=" + o + " WHERE tableid = '"
                + tableProcessed + "'");
        // and than sparkly in a bunch of other values (unsure whether it is
        // required);
        while (enumclassvalues.hasMoreElements()) {
            o = enumclassvalues.nextElement();

            // FIXME there might still be an issue in which tupleid%i do not exists,
            // and thus one of the "o" never appears in the instance...
            stmt.executeUpdate("UPDATE " + testingtable + " SET " + pcol + "=" + o + " WHERE tupleid%"
                    + numbPart + "=" + i + " AND tableid = '" + tableProcessed + "'");
            i++;
        }
    }

    ResultSet res = stmt.executeQuery(sqlstring);
    // create an instance from the resultset
    Instances data_tupleid = WekaHelper.retrieveInstanceFromResultSetComplete(res, dbPropertyFile);
    res.close();

    data_tupleid.setClassIndex(data_tupleid.numAttributes() - 1);
    Instances data_no_tupleid = makeLastNominal(data_tupleid);
    data_no_tupleid.setClassIndex(data_no_tupleid.numAttributes() - 1);
    // remove tupleid from data_no_tupleid, still available in data_tupleid
    data_no_tupleid.deleteAttributeAt(0);

    // if(data_no_tupleid.classAttribute().numValues()>1){
    System.out.println("Running the tuples through the classifier to populate " + explainedPartitionCol);

    // use data that still has the tupleid and newData for the classification
    Enumeration enum_data_tupleid = data_tupleid.enumerateInstances();
    Enumeration enum_data_no_tupleid = data_no_tupleid.enumerateInstances();

    PreparedStatement updateJustCol = conn.prepareStatement("UPDATE `" + testingtable + "` SET `"
            + explainedPartitionCol + "` = ? " + "WHERE tableid = '" + tableProcessed + "' AND tupleid = ?;");

    while (enum_data_tupleid.hasMoreElements() && enum_data_no_tupleid.hasMoreElements()) {

        Instance tupIDinstance = (Instance) enum_data_tupleid.nextElement();
        Instance instance = (Instance) enum_data_no_tupleid.nextElement();

        double part = classifier.classifyInstance(instance);
        if (part == Instance.missingValue())
            System.err.println("No classification for:" + instance.toString());
        updateJustCol.setInt(1, (int) part);
        updateJustCol.setInt(2, (int) tupIDinstance.value(0));

        // System.out.println(tableProcessed+" "+ instance.value(0) + " " +
        // tupIDinstance.classValue() +" "+ part);

        updateJustCol.execute();
        updateJustCol.clearParameters();

    }

    updateJustCol.close();

}

From source file:com.spread.experiment.tempuntilofficialrelease.ClassificationViaClustering108.java

License:Open Source License

/**
 * Returns class probability distribution for the given instance.
 * /* w  w  w . j  av  a  2 s.com*/
 * @param instance the instance to be classified
 * @return the class probabilities
 * @throws Exception if an error occurred during the prediction
 */
@Override
public double[] distributionForInstance(Instance instance) throws Exception {

    if (m_ZeroR != null) {
        return m_ZeroR.distributionForInstance(instance);
    } else {
        double[] result = new double[instance.numClasses()];

        if (m_ActualClusterer != null) {
            // build new instance
            Instances tempData = m_ClusteringHeader.stringFreeStructure();
            double[] values = new double[tempData.numAttributes()];
            int n = 0;
            for (int i = 0; i < instance.numAttributes(); i++) {
                if (i == instance.classIndex()) {
                    continue;
                }
                if (instance.attribute(i).isString()) {
                    values[n] = tempData.attribute(n).addStringValue(instance.stringValue(i));
                } else if (instance.attribute(i).isRelationValued()) {
                    values[n] = tempData.attribute(n).addRelation(instance.relationalValue(i));
                } else {
                    values[n] = instance.value(i);
                }
                n++;
            }
            Instance newInst = new DenseInstance(instance.weight(), values);
            newInst.setDataset(tempData);

            if (!getLabelAllClusters()) {

                // determine cluster/class
                double r = m_ClustersToClasses[m_ActualClusterer.clusterInstance(newInst)];
                if (r == -1) {
                    return result; // Unclassified
                } else {
                    result[(int) r] = 1.0;
                    return result;
                }
            } else {
                double[] classProbs = new double[instance.numClasses()];
                double[] dist = m_ActualClusterer.distributionForInstance(newInst);
                for (int i = 0; i < dist.length; i++) {
                    for (int j = 0; j < instance.numClasses(); j++) {
                        classProbs[j] += dist[i] * m_ClusterClassProbs[i][j];
                    }
                }
                Utils.normalize(classProbs);
                return classProbs;
            }
        } else {
            return result; // Unclassified
        }
    }
}

From source file:com.tum.classifiertest.FastRandomTree.java

License:Open Source License

/**
 * Computes class distribution of an instance using the FastRandomTree.<p>
 *
 * In Weka's RandomTree, the distributions were normalized so that all
 * probabilities sum to 1; this would abolish the effect of instance weights
 * on voting. In FastRandomForest 0.97 onwards, the distributions are
 * normalized by dividing with the number of instances going into a leaf.<p>
 * //  w  ww  .ja  v  a2  s . com
 * @param instance the instance to compute the distribution for
 * @return the computed class distribution
 * @throws Exception if computation fails
 */
@Override
public double[] distributionForInstance(Instance instance) throws Exception {

    double[] returnedDist = null;

    if (m_Attribute > -1) { // ============================ node is not a leaf

        if (instance.isMissing(m_Attribute)) { // ---------------- missing value

            returnedDist = new double[m_MotherForest.getM_Info().numClasses()];
            // split instance up
            for (int i = 0; i < m_Successors.length; i++) {
                double[] help = m_Successors[i].distributionForInstance(instance);
                if (help != null) {
                    for (int j = 0; j < help.length; j++) {
                        returnedDist[j] += m_Prop[i] * help[j];
                    }
                }
            }

        } else if (m_MotherForest.getM_Info().attribute(m_Attribute).isNominal()) { // ------ nominal

            //returnedDist = m_Successors[(int) instance.value(m_Attribute)]
            //        .distributionForInstance(instance);

            // 0.99: new - binary splits (also) for nominal attributes
            if (instance.value(m_Attribute) == m_SplitPoint) {
                returnedDist = m_Successors[0].distributionForInstance(instance);
            } else {
                returnedDist = m_Successors[1].distributionForInstance(instance);
            }

        } else { // ------------------------------------------ numeric attributes

            if (instance.value(m_Attribute) < m_SplitPoint) {
                returnedDist = m_Successors[0].distributionForInstance(instance);
            } else {
                returnedDist = m_Successors[1].distributionForInstance(instance);
            }
        }

        return returnedDist;

    } else { // =============================================== node is a leaf

        return m_ClassProbs;

    }

}

From source file:control.CosineDistance.java

License:Open Source License

/**
 * Calculates the distance between two instances.
 * //from w ww .j a v  a 2 s  .com
 * @param first    the first instance
 * @param second    the second instance
 * @return       the distance between the two given instances
 */
public double distance(Instance first, Instance second) {

    HashMap<String, Double> fInstance = new HashMap<String, Double>();
    HashMap<String, Double> sInstance = new HashMap<String, Double>();

    for (int i = 0; i < first.numAttributes(); i++) {
        fInstance.put(first.attribute(i).name(), first.value(i));
        sInstance.put(second.attribute(i).name(), second.value(i));
    }

    return 1 - CosineSimilarity.calculateCosineSimilarity(fInstance, sInstance);
}

From source file:control.CosineDistance.java

License:Open Source License

/**
 * Returns true if the value of the given dimension is smaller or equal the
 * value to be compared with./*from w  w  w.ja  v a  2s. co  m*/
 * 
 * @param instance    the instance where the value should be taken of
 * @param dim    the dimension of the value
 * @param value    the value to compare with
 * @return       true if value of instance is smaller or equal value
 */
public boolean valueIsSmallerEqual(Instance instance, int dim, double value) { //This stays
    return instance.value(dim) <= value;
}

From source file:controller.BothClassificationsServlet.java

@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {
    request.setCharacterEncoding("UTF-8");
    String dir = "/data/";
    String path = getServletContext().getRealPath(dir);

    String action = request.getParameter("action");

    switch (action) {
    case "create": {
        String fileName = request.getParameter("file");

        String aux = fileName.substring(0, fileName.indexOf("."));
        String pathInput = path + "/" + request.getParameter("file");
        String pathTrainingOutput = path + "/" + aux + "-training-arff.txt";
        String pathTestOutput = path + "/" + aux + "-test-arff.txt";
        String pathBothClassifications = path + "/" + aux + "-bothClassifications.txt";

        String name = request.getParameter("name");
        int range = Integer.parseInt(request.getParameter("range"));

        int size = Integer.parseInt(request.getParameter("counter"));
        String[] columns = new String[size];
        String[] types = new String[size];
        int[] positions = new int[size];
        int counter = 0;
        for (int i = 0; i < size; i++) {
            if (request.getParameter("column-" + (i + 1)) != null) {
                columns[counter] = request.getParameter("column-" + (i + 1));
                types[counter] = request.getParameter("type-" + (i + 1));
                positions[counter] = Integer.parseInt(request.getParameter("position-" + (i + 1)));
                counter++;/*from  ww w  .ja  v  a  2  s  .  com*/
            }
        }

        FormatFiles.convertTxtToArff(pathInput, pathTrainingOutput, pathTestOutput, name, columns, types,
                positions, counter, range);
        try {
            J48 j48 = new J48();

            BufferedReader readerTraining = new BufferedReader(new FileReader(pathTrainingOutput));
            Instances instancesTraining = new Instances(readerTraining);
            instancesTraining.setClassIndex(instancesTraining.numAttributes() - 1);

            j48.buildClassifier(instancesTraining);

            BufferedReader readerTest = new BufferedReader(new FileReader(pathTestOutput));
            //BufferedReader readerTest = new BufferedReader(new FileReader(pathTrainingOutput));
            Instances instancesTest = new Instances(readerTest);
            instancesTest.setClassIndex(instancesTest.numAttributes() - 1);

            int correctsDecisionTree = 0;

            for (int i = 0; i < instancesTest.size(); i++) {
                Instance instance = instancesTest.get(i);
                double correctValue = instance.value(instance.attribute(instancesTest.numAttributes() - 1));
                double classification = j48.classifyInstance(instance);

                if (correctValue == classification) {
                    correctsDecisionTree++;
                }
            }

            Evaluation eval = new Evaluation(instancesTraining);
            eval.evaluateModel(j48, instancesTest);

            PrintWriter writer = new PrintWriter(
                    new BufferedWriter(new FileWriter(pathBothClassifications, false)));

            writer.println("?rvore de Deciso\n\n");

            writer.println(j48.toString());

            writer.println("");
            writer.println("");
            writer.println("Results");
            writer.println(eval.toSummaryString());

            NaiveBayes naiveBayes = new NaiveBayes();

            naiveBayes.buildClassifier(instancesTraining);

            eval = new Evaluation(instancesTraining);
            eval.evaluateModel(naiveBayes, instancesTest);

            int correctsNaiveBayes = 0;

            for (int i = 0; i < instancesTest.size(); i++) {
                Instance instance = instancesTest.get(i);
                double correctValue = instance.value(instance.attribute(instancesTest.numAttributes() - 1));
                double classification = naiveBayes.classifyInstance(instance);

                if (correctValue == classification) {
                    correctsNaiveBayes++;
                }
            }

            writer.println("Naive Bayes\n\n");

            writer.println(naiveBayes.toString());

            writer.println("");
            writer.println("");
            writer.println("Results");
            writer.println(eval.toSummaryString());

            writer.close();

            response.sendRedirect("BothClassifications?action=view&correctsDecisionTree=" + correctsDecisionTree
                    + "&correctsNaiveBayes=" + correctsNaiveBayes + "&totalTest=" + instancesTest.size()
                    + "&totalTrainig=" + instancesTraining.size() + "&range=" + range + "&fileName=" + aux
                    + "-bothClassifications.txt");
        } catch (Exception e) {
            System.out.println(e.getMessage());
            response.sendRedirect("Navigation?action=decisionTree");
        }

        break;
    }
    default:
        response.sendError(404);
    }
}

From source file:controller.DecisionTreeServlet.java

@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {
    request.setCharacterEncoding("UTF-8");
    String dir = "/data/";
    String path = getServletContext().getRealPath(dir);

    String action = request.getParameter("action");

    switch (action) {
    case "create": {
        String fileName = request.getParameter("file");

        String aux = fileName.substring(0, fileName.indexOf("."));
        String pathInput = path + "/" + request.getParameter("file");
        String pathTrainingOutput = path + "/" + aux + "-training-arff.txt";
        String pathTestOutput = path + "/" + aux + "-test-arff.txt";
        String pathDecisionTree = path + "/" + aux + "-decisionTree.txt";

        String name = request.getParameter("name");
        int range = Integer.parseInt(request.getParameter("range"));

        int size = Integer.parseInt(request.getParameter("counter"));
        String[] columns = new String[size];
        String[] types = new String[size];
        int[] positions = new int[size];
        int counter = 0;
        for (int i = 0; i < size; i++) {
            if (request.getParameter("column-" + (i + 1)) != null) {
                columns[counter] = request.getParameter("column-" + (i + 1));
                types[counter] = request.getParameter("type-" + (i + 1));
                positions[counter] = Integer.parseInt(request.getParameter("position-" + (i + 1)));
                counter++;//from  w  ww.j a v  a  2 s .  c  o m
            }
        }

        FormatFiles.convertTxtToArff(pathInput, pathTrainingOutput, pathTestOutput, name, columns, types,
                positions, counter, range);
        try {
            J48 j48 = new J48();

            BufferedReader readerTraining = new BufferedReader(new FileReader(pathTrainingOutput));
            Instances instancesTraining = new Instances(readerTraining);
            instancesTraining.setClassIndex(instancesTraining.numAttributes() - 1);

            j48.buildClassifier(instancesTraining);

            BufferedReader readerTest = new BufferedReader(new FileReader(pathTestOutput));
            //BufferedReader readerTest = new BufferedReader(new FileReader(pathTrainingOutput));
            Instances instancesTest = new Instances(readerTest);
            instancesTest.setClassIndex(instancesTest.numAttributes() - 1);

            int corrects = 0;
            int truePositive = 0;
            int trueNegative = 0;
            int falsePositive = 0;
            int falseNegative = 0;

            for (int i = 0; i < instancesTest.size(); i++) {
                Instance instance = instancesTest.get(i);
                double correctValue = instance.value(instance.attribute(instancesTest.numAttributes() - 1));
                double classification = j48.classifyInstance(instance);

                if (correctValue == classification) {
                    corrects++;
                }
                if (correctValue == 1 && classification == 1) {
                    truePositive++;
                }
                if (correctValue == 1 && classification == 0) {
                    falseNegative++;
                }
                if (correctValue == 0 && classification == 1) {
                    falsePositive++;
                }
                if (correctValue == 0 && classification == 0) {
                    trueNegative++;
                }
            }

            Evaluation eval = new Evaluation(instancesTraining);
            eval.evaluateModel(j48, instancesTest);

            PrintWriter writer = new PrintWriter(new BufferedWriter(new FileWriter(pathDecisionTree, false)));

            writer.println(j48.toString());

            writer.println("");
            writer.println("");
            writer.println("Results");
            writer.println(eval.toSummaryString());

            writer.close();

            response.sendRedirect("DecisionTree?action=view&corrects=" + corrects + "&totalTest="
                    + instancesTest.size() + "&totalTrainig=" + instancesTraining.size() + "&truePositive="
                    + truePositive + "&trueNegative=" + trueNegative + "&falsePositive=" + falsePositive
                    + "&falseNegative=" + falseNegative + "&fileName=" + aux + "-decisionTree.txt");
        } catch (Exception e) {
            System.out.println(e.getMessage());
            response.sendRedirect("Navigation?action=decisionTree");
        }

        break;
    }
    default:
        response.sendError(404);
    }
}

From source file:controller.NaiveBayesServlet.java

@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {
    request.setCharacterEncoding("UTF-8");
    String dir = "/data/";
    String path = getServletContext().getRealPath(dir);

    String action = request.getParameter("action");

    switch (action) {
    case "create": {
        String fileName = request.getParameter("file");

        String aux = fileName.substring(0, fileName.indexOf("."));
        String pathInput = path + "/" + request.getParameter("file");
        String pathTrainingOutput = path + "/" + aux + "-training-arff.txt";
        String pathTestOutput = path + "/" + aux + "-test-arff.txt";
        String pathNaivebayes = path + "/" + aux + "-naiveBayes.txt";

        String name = request.getParameter("name");
        int range = Integer.parseInt(request.getParameter("range"));

        int size = Integer.parseInt(request.getParameter("counter"));
        String[] columns = new String[size];
        String[] types = new String[size];
        int[] positions = new int[size];
        int counter = 0;

        for (int i = 0; i < size; i++) {
            if (request.getParameter("column-" + (i + 1)) != null) {
                columns[counter] = request.getParameter("column-" + (i + 1));
                types[counter] = request.getParameter("type-" + (i + 1));
                positions[counter] = Integer.parseInt(request.getParameter("position-" + (i + 1)));
                counter++;/*from   ww w  . j  a va2s.  c  o m*/
            }
        }

        FormatFiles.convertTxtToArff(pathInput, pathTrainingOutput, pathTestOutput, name, columns, types,
                positions, counter, range);

        try {
            NaiveBayes naiveBayes = new NaiveBayes();

            BufferedReader readerTraining = new BufferedReader(new FileReader(pathTrainingOutput));
            Instances instancesTraining = new Instances(readerTraining);
            instancesTraining.setClassIndex(instancesTraining.numAttributes() - 1);

            naiveBayes.buildClassifier(instancesTraining);

            BufferedReader readerTest = new BufferedReader(new FileReader(pathTestOutput));
            //BufferedReader readerTest = new BufferedReader(new FileReader(pathTrainingOutput));
            Instances instancesTest = new Instances(readerTest);
            instancesTest.setClassIndex(instancesTest.numAttributes() - 1);

            Evaluation eval = new Evaluation(instancesTraining);
            eval.evaluateModel(naiveBayes, instancesTest);

            int corrects = 0;
            int truePositive = 0;
            int trueNegative = 0;
            int falsePositive = 0;
            int falseNegative = 0;

            for (int i = 0; i < instancesTest.size(); i++) {
                Instance instance = instancesTest.get(i);
                double correctValue = instance.value(instance.attribute(instancesTest.numAttributes() - 1));
                double classification = naiveBayes.classifyInstance(instance);

                if (correctValue == classification) {
                    corrects++;
                }
                if (correctValue == 1 && classification == 1) {
                    truePositive++;
                }
                if (correctValue == 1 && classification == 0) {
                    falseNegative++;
                }
                if (correctValue == 0 && classification == 1) {
                    falsePositive++;
                }
                if (correctValue == 0 && classification == 0) {
                    trueNegative++;
                }
            }

            PrintWriter writer = new PrintWriter(new BufferedWriter(new FileWriter(pathNaivebayes, false)));

            writer.println(naiveBayes.toString());

            writer.println("");
            writer.println("");
            writer.println("Results");
            writer.println(eval.toSummaryString());

            writer.close();

            response.sendRedirect(
                    "NaiveBayes?action=view&corrects=" + corrects + "&totalTest=" + instancesTest.size()
                            + "&totalTrainig=" + instancesTraining.size() + "&range=" + range + "&truePositive="
                            + truePositive + "&trueNegative=" + trueNegative + "&falsePositive=" + falsePositive
                            + "&falseNegative=" + falseNegative + "&fileName=" + aux + "-naiveBayes.txt");

        } catch (Exception e) {
            System.out.println(e.getMessage());
            response.sendRedirect("Navigation?action=naiveBayes");
        }

        break;
    }
    default:
        response.sendError(404);
    }

}

From source file:core.ClusterEvaluationEX.java

License:Open Source License

/**
 * num??/*from   w  w  w. j a va 2 s  .c om*/
 * Returns the Centroids
 * */
public Instances getCentroids(int num) {
    FastVector atts = new FastVector();
    Attribute clusterID = new Attribute("clusterID");
    atts.addElement(clusterID);
    Instances data = new Instances("centroids", atts, m_numClusters);
    for (int i = 0; i < ID.numAttributes() - 1; i++) {
        Attribute att = new Attribute("Subject" + String.valueOf(i));
        atts.addElement(att);
    }
    double[] map = new double[m_numClusters];
    double[] temp = new double[m_clusterAssignments.length];
    System.arraycopy(m_clusterAssignments, 0, temp, 0, m_clusterAssignments.length);
    int n = map.length;
    for (int i = 0; i < m_clusterAssignments.length; i++) {
        double id = temp[i];
        if (id == -1)
            continue;
        boolean flag = true;
        for (int j = 0; j < temp.length; j++) {
            if (temp[j] == id) {
                temp[j] = -1;
            }
        }
        if (flag && n != -1) {
            map[map.length - n] = id + num;
            n--;
        } else if (n != -1) {
            continue;
        } else {
            break;
        }
    }
    for (int i = 0; i < map.length; i++) {
        double id = map[i];
        double[] averatts = new double[ID.numAttributes()];
        int count = 0;
        for (int j = 0; j < ID.numInstances(); j++) {
            Instance iter = ID.instance(j);
            if (iter.value(0) == id) {
                averatts = CommonMethords.add(averatts, iter.toDoubleArray());
                count++;
            }
        }
        averatts = CommonMethords.calAver(averatts, count);
        Instance ins = new Instance(1, averatts);
        data.add(ins);
    }
    return data;
}

From source file:core.DatabaseSaverEx.java

License:Open Source License

/**
 * inserts the given instance into the table.
 * /*ww  w  .j  a  va  2  s .  c  om*/
 * @param inst the instance to insert
 * @throws Exception if something goes wrong
 */
public void writeInstance(Instance inst) throws Exception {

    StringBuffer insert = new StringBuffer();
    insert.append("INSERT INTO ");
    insert.append(m_tableName);
    insert.append(" VALUES ( ");
    if (m_id) {
        insert.append(m_count);
        insert.append(", ");
        m_count++;
    }
    for (int j = 0; j < inst.numAttributes(); j++) {
        if (inst.isMissing(j))
            insert.append("NULL");
        else {
            if ((inst.attribute(j)).isDate())
                insert.append("'" + m_DateFormat.format((long) inst.value(j)) + "'");
            else if ((inst.attribute(j)).isNumeric())
                insert.append(inst.value(j));
            else {
                String stringInsert = "'" + inst.stringValue(j) + "'";
                if (stringInsert.length() > 2)
                    stringInsert = stringInsert.replaceAll("''", "'");
                insert.append(stringInsert);
            }
        }
        if (j != inst.numAttributes() - 1)
            insert.append(", ");
    }
    insert.append(" )");
    //System.out.println(insert.toString());
    if (m_DataBaseConnection.update(insert.toString()) < 1) {
        throw new IOException("Tuple cannot be inserted.");
    } else {
        m_DataBaseConnection.close();
    }
}