List of usage examples for org.apache.mahout.math Vector assign
Vector assign(DoubleFunction function);
From source file:org.qcri.pca.MahoutCompatibilityTest.java
License:Apache License
@Test public void testMAHOUT_1221() { // create a matrix with an unassigned row 0 Matrix matrix = new SparseMatrix(1, 1); Vector view = matrix.viewRow(0); final double value = 1.23; view.assign(value); // test whether the update in the view is reflected in the matrix assertEquals("Matrix valye", view.getQuick(0), matrix.getQuick(0, 0), EPSILON); }
From source file:org.trustedanalytics.atk.giraph.algorithms.lbp.LoopyBeliefPropagationComputation.java
License:Apache License
/** * Initialize vertex// ww w.j a v a 2 s . c o m * * @param vertex of the graph */ private void initializeVertex(Vertex<LongWritable, VertexData4LBPWritable, DoubleWritable> vertex) { // normalize prior and posterior Vector prior = vertex.getValue().getPriorVector(); Vector posterior = vertex.getValue().getPosteriorVector(); int nStates = prior.size(); double sum = 0d; for (int i = 0; i < nStates; i++) { double v = prior.getQuick(i); if (v < 0d) { throw new IllegalArgumentException("Vertex ID: " + vertex.getId() + " has negative prior value."); } else if (v < MIN_PRIOR_VALUE) { v = MIN_PRIOR_VALUE; prior.setQuick(i, v); } sum += v; } for (int i = 0; i < nStates; i++) { posterior.setQuick(i, prior.getQuick(i) / sum); prior.setQuick(i, Math.log(posterior.getQuick(i))); } // collect graph statistics VertexType vt = vertex.getValue().getType(); vt = ignoreVertexType ? VertexType.TRAIN : vt; switch (vt) { case TRAIN: aggregate(SUM_TRAIN_VERTICES, new LongWritable(1)); break; case VALIDATE: aggregate(SUM_VALIDATE_VERTICES, new LongWritable(1)); break; case TEST: aggregate(SUM_TEST_VERTICES, new LongWritable(1)); break; default: throw new IllegalArgumentException("Unknown vertex type: " + vt.toString()); } // if it's not a training vertex, use uniform posterior and don't send out messages if (vt != VertexType.TRAIN) { posterior.assign(1.0 / nStates); return; } // calculate messages IdWithVectorMessage newMessage = new IdWithVectorMessage(); newMessage.setData(vertex.getId().get()); // calculate initial belief Vector belief = prior.clone(); for (Edge<LongWritable, DoubleWritable> edge : vertex.getEdges()) { double weight = edge.getValue().get(); if (weight <= 0d) { throw new IllegalArgumentException("Vertex ID: " + vertex.getId() + " has an edge with negative or zero weight value " + weight); } for (int i = 0; i < nStates; i++) { sum = 0d; for (int j = 0; j < nStates; j++) { double msg = Math.exp( prior.getQuick(j) + edgePotential(Math.abs(i - j) / (double) (nStates - 1), weight)); if (maxProduct) { sum = sum > msg ? sum : msg; } else { sum += msg; } } belief.setQuick(i, sum > 0d ? Math.log(sum) : Double.MIN_VALUE); } belief = belief.plus(-belief.maxValue()); // send out messages newMessage.setVector(belief); sendMessage(edge.getTargetVertexId(), newMessage); } }
From source file:org.trustedanalytics.atk.giraph.algorithms.lbp.LoopyBeliefPropagationComputation.java
License:Apache License
@Override public void compute(Vertex<LongWritable, VertexData4LBPWritable, DoubleWritable> vertex, Iterable<IdWithVectorMessage> messages) throws IOException { long step = getSuperstep(); if (step == 0) { initializeVertex(vertex);/*from ww w .j a v a 2 s .c o m*/ return; } // collect messages sent to this vertex HashMap<Long, Vector> map = new HashMap<Long, Vector>(); for (IdWithVectorMessage message : messages) { map.put(message.getData(), message.getVector()); } // update posterior according to prior and messages VertexData4LBPWritable vertexValue = vertex.getValue(); VertexType vt = vertexValue.getType(); vt = ignoreVertexType ? VertexType.TRAIN : vt; Vector prior = vertexValue.getPriorVector(); double nStates = prior.size(); if (vt != VertexType.TRAIN) { // assign a uniform prior for validate/test vertex prior = prior.clone().assign(Math.log(1.0 / nStates)); } // sum of prior and messages Vector sumPosterior = prior; for (IdWithVectorMessage message : messages) { sumPosterior = sumPosterior.plus(message.getVector()); } sumPosterior = sumPosterior.plus(-sumPosterior.maxValue()); // update posterior if this isn't an anchor vertex if (prior.maxValue() < anchorThreshold) { // normalize posterior Vector posterior = sumPosterior.clone().assign(Functions.EXP); posterior = posterior.normalize(1d); Vector oldPosterior = vertexValue.getPosteriorVector(); double delta = posterior.minus(oldPosterior).norm(1d); // aggregate deltas switch (vt) { case TRAIN: aggregate(SUM_TRAIN_DELTA, new DoubleWritable(delta)); break; case VALIDATE: aggregate(SUM_VALIDATE_DELTA, new DoubleWritable(delta)); break; case TEST: aggregate(SUM_TEST_DELTA, new DoubleWritable(delta)); break; default: throw new IllegalArgumentException("Unknown vertex type: " + vt.toString()); } // update posterior vertexValue.setPosteriorVector(posterior); } if (step < maxSupersteps) { // if it's not a training vertex, don't send out messages if (vt != VertexType.TRAIN) { return; } IdWithVectorMessage newMessage = new IdWithVectorMessage(); newMessage.setData(vertex.getId().get()); // update belief Vector belief = prior.clone(); for (Edge<LongWritable, DoubleWritable> edge : vertex.getEdges()) { double weight = edge.getValue().get(); long id = edge.getTargetVertexId().get(); Vector tempVector = sumPosterior; if (map.containsKey(id)) { tempVector = sumPosterior.minus(map.get(id)); } for (int i = 0; i < nStates; i++) { double sum = 0d; for (int j = 0; j < nStates; j++) { double msg = Math.exp( tempVector.getQuick(j) + edgePotential(Math.abs(i - j) / (nStates - 1), weight)); if (maxProduct) { sum = sum > msg ? sum : msg; } else { sum += msg; } } belief.setQuick(i, sum > 0d ? Math.log(sum) : Double.MIN_VALUE); } belief = belief.plus(-belief.maxValue()); newMessage.setVector(belief); sendMessage(edge.getTargetVertexId(), newMessage); } } else { // convert prior back to regular scale before output prior = vertexValue.getPriorVector(); prior = prior.assign(Functions.EXP); vertexValue.setPriorVector(prior); vertex.voteToHalt(); } }
From source file:sample.DisplayClustering.java
License:Apache License
/** * This method plots points and colors them according to their cluster * membership, rather than drawing ellipses. * * As of commit, this method is used only by K-means spectral clustering. * Since the cluster assignments are set within the eigenspace of the data, it * is not inherent that the original data cluster as they would in K-means: * that is, as symmetric gaussian mixtures. * * Since Spectral K-Means uses K-Means to cluster the eigenspace data, the raw * output is not directly usable. Rather, the cluster assignments from the raw * output need to be transferred back to the original data. As such, this * method will read the SequenceFile cluster results of K-means and transfer * the cluster assignments to the original data, coloring them appropriately. * * @param g2// w w w.ja v a 2s . c om * @param data */ protected static void plotClusteredSampleData(Graphics2D g2, Path data) { double sx = (double) res / DS; g2.setTransform(AffineTransform.getScaleInstance(sx, sx)); g2.setColor(Color.BLACK); Vector dv = new DenseVector(2).assign(SIZE / 2.0); plotRectangle(g2, new DenseVector(2).assign(2), dv); plotRectangle(g2, new DenseVector(2).assign(-2), dv); // plot the sample data, colored according to the cluster they belong to dv.assign(0.03); Path clusteredPointsPath = new Path(data, "clusteredPoints"); Path inputPath = new Path(clusteredPointsPath, "part-m-00000"); Map<Integer, Color> colors = new HashMap<>(); int point = 0; for (Pair<IntWritable, WeightedVectorWritable> record : new SequenceFileIterable<IntWritable, WeightedVectorWritable>( inputPath, new Configuration())) { int clusterId = record.getFirst().get(); VectorWritable v = SAMPLE_DATA.get(point++); Integer key = clusterId; if (!colors.containsKey(key)) { colors.put(key, COLORS[Math.min(COLORS.length - 1, colors.size())]); } plotClusteredRectangle(g2, v.get(), dv, colors.get(key)); } }