Example usage for com.mongodb BasicDBList BasicDBList

List of usage examples for com.mongodb BasicDBList BasicDBList

Introduction

In this page you can find the example usage for com.mongodb BasicDBList BasicDBList.

Prototype

BasicDBList

Source Link

Usage

From source file:com.ikanow.infinit.e.processing.custom.utils.HadoopUtils.java

License:Open Source License

public static BasicDBList getBsonFromTextFiles(CustomMapReduceJobPojo cmr, int nLimit, String fields)
        throws IOException, SAXException, ParserConfigurationException {

    BasicDBList dbl = new BasicDBList();

    PropertiesManager props = new PropertiesManager();
    Configuration conf = getConfiguration(props);

    Path pathDir = HadoopUtils.getPathForJob(cmr, conf, false);
    FileSystem fs = FileSystem.get(conf);

    FileStatus[] files = fs.globStatus(new Path(pathDir.toString() + "/part-*"));
    for (FileStatus file : files) {
        if (file.getLen() > 0) {
            FSDataInputStream in = fs.open(file.getPath());
            BufferedReader bin = new BufferedReader(new InputStreamReader(in));
            for (;;) {
                String s = bin.readLine();
                if (null == s)
                    break;

                String[] keyValue = s.split("\t", 2);
                BasicDBObject dbo = new BasicDBObject();
                if (keyValue.length > 1) {
                    dbo.put("key", keyValue[0]);
                    dbo.put("value", keyValue[1]);
                } else {
                    dbo.put("value", keyValue[0]);
                }/*  ww w .java  2  s . c om*/
                dbl.add(dbo);
            }
            in.close();
        }
    }
    return dbl;
}

From source file:com.ikanow.infinit.e.processing.custom.utils.HadoopUtils.java

License:Open Source License

public static BasicDBList getBsonFromSequenceFile(CustomMapReduceJobPojo cmr, int nLimit, String fields)
        throws SAXException, IOException, ParserConfigurationException {

    BasicDBList dbl = new BasicDBList();

    PropertiesManager props = new PropertiesManager();
    Configuration conf = getConfiguration(props);

    Path pathDir = HadoopUtils.getPathForJob(cmr, conf, false);

    @SuppressWarnings({ "unchecked", "rawtypes" })
    SequenceFileDirIterable<? extends Writable, ? extends Writable> seqFileDir = new SequenceFileDirIterable(
            pathDir, PathType.LIST, PathFilters.logsCRCFilter(), conf);

    // Very basic, only allow top level, 1 level of nesting, and field removal
    HashSet<String> fieldLookup = null;
    if (null != fields) {
        fieldLookup = new HashSet<String>();
        String[] fieldArray = fields.split(",");
        for (String field : fieldArray) {
            String[] fieldDecomp = field.split(":");
            fieldLookup.add(fieldDecomp[0]);
        }//from   w  w w  .  j av a 2 s . c om
    } //TOTEST

    int nRecords = 0;
    for (Pair<? extends Writable, ? extends Writable> record : seqFileDir) {
        BasicDBObject element = new BasicDBObject();

        // KEY

        Writable key = record.getFirst();
        if (key instanceof org.apache.hadoop.io.Text) {
            org.apache.hadoop.io.Text writable = (org.apache.hadoop.io.Text) key;
            element.put("key", writable.toString());
        } else if (key instanceof org.apache.hadoop.io.DoubleWritable) {
            org.apache.hadoop.io.DoubleWritable writable = (org.apache.hadoop.io.DoubleWritable) key;
            element.put("key", Double.toString(writable.get()));
        } else if (key instanceof org.apache.hadoop.io.IntWritable) {
            org.apache.hadoop.io.IntWritable writable = (org.apache.hadoop.io.IntWritable) key;
            element.put("key", Integer.toString(writable.get()));
        } else if (key instanceof org.apache.hadoop.io.LongWritable) {
            org.apache.hadoop.io.LongWritable writable = (org.apache.hadoop.io.LongWritable) key;
            element.put("key", Long.toString(writable.get()));
        } else if (key instanceof BSONWritable) {
            element.put("key", MongoDbUtil.convert((BSONWritable) key));
        }

        // VALUE

        Writable value = record.getSecond();
        if (value instanceof org.apache.hadoop.io.Text) {
            org.apache.hadoop.io.Text writable = (org.apache.hadoop.io.Text) value;
            element.put("value", writable.toString());
        } else if (value instanceof org.apache.hadoop.io.DoubleWritable) {
            org.apache.hadoop.io.DoubleWritable writable = (org.apache.hadoop.io.DoubleWritable) value;
            element.put("value", Double.toString(writable.get()));
        } else if (value instanceof org.apache.hadoop.io.IntWritable) {
            org.apache.hadoop.io.IntWritable writable = (org.apache.hadoop.io.IntWritable) value;
            element.put("value", Integer.toString(writable.get()));
        } else if (value instanceof org.apache.hadoop.io.LongWritable) {
            org.apache.hadoop.io.LongWritable writable = (org.apache.hadoop.io.LongWritable) value;
            element.put("value", Long.toString(writable.get()));
        } else if (value instanceof BSONWritable) {
            element.put("value", MongoDbUtil.convert((BSONWritable) value));
        } else if (value instanceof org.apache.mahout.math.VectorWritable) {
            Vector vec = ((org.apache.mahout.math.VectorWritable) value).get();
            BasicDBList dbl2 = listFromMahoutVector(vec, "value", element);
            element.put("value", dbl2);
        } else if (value instanceof org.apache.mahout.clustering.classify.WeightedVectorWritable) {
            org.apache.mahout.clustering.classify.WeightedVectorWritable vecW = (org.apache.mahout.clustering.classify.WeightedVectorWritable) value;
            element.put("valueWeight", vecW.getWeight());
            BasicDBList dbl2 = listFromMahoutVector(vecW.getVector(), "value", element);
            element.put("value", dbl2);
        } else if (value instanceof org.apache.mahout.clustering.iterator.ClusterWritable) {
            Cluster cluster = ((org.apache.mahout.clustering.iterator.ClusterWritable) value).getValue();
            BasicDBObject clusterVal = new BasicDBObject();
            clusterVal.put("center", listFromMahoutVector(cluster.getCenter(), "center", clusterVal));
            clusterVal.put("radius", listFromMahoutVector(cluster.getRadius(), "radius", clusterVal));
            element.put("value", clusterVal);
        } else {
            element.put("unknownValue", value.getClass().toString());
        }

        // Check the fields settings:
        // Only handle a few...
        if (null != fieldLookup) {
            for (String fieldToRemove : fieldLookup) {
                if (fieldToRemove.startsWith("value.")) {
                    fieldToRemove = fieldToRemove.substring(6);
                    BasicDBObject nested = (BasicDBObject) element.get("value.");
                    if (null != nested) {
                        nested.remove(fieldToRemove);
                    }
                } else {
                    element.remove(fieldToRemove);
                }
            } //TOTEST
        }

        dbl.add(element);
        nRecords++;
        if ((nLimit > 0) && (nRecords >= nLimit)) {
            break;
        }
    }

    return dbl;
}

From source file:com.ikanow.infinit.e.processing.custom.utils.HadoopUtils.java

License:Open Source License

private static BasicDBList listFromMahoutVector(Vector vec, String prefix, BasicDBObject element) {
    if (vec instanceof NamedVector) {
        element.put(prefix + "Name", ((NamedVector) vec).getName());
    }/*from   ww  w  . jav  a 2 s. c om*/
    BasicDBList dbl2 = new BasicDBList();
    if (vec.isDense()) {
        int nSize = vec.size();
        dbl2.ensureCapacity(nSize);
        for (int i = 0; i < nSize; ++i) {
            dbl2.add(vec.getQuick(i));
        }
    } else { // sparse, write as a set in the format [{int:double}]
        Iterator<org.apache.mahout.math.Vector.Element> elIt = vec.iterateNonZero();
        while (elIt.hasNext()) {
            BasicDBObject el2 = new BasicDBObject();
            org.apache.mahout.math.Vector.Element el = elIt.next();
            el2.put("k", el.index());
            el2.put("v", el.get());
            dbl2.add(el2);
        }
    }
    return dbl2;
}

From source file:com.impetus.client.mongodb.DocumentObjectMapper.java

License:Apache License

/**
 * Extract entity field.//from  w  w w . ja va 2  s . c  o m
 * 
 * @param entity
 *            the entity
 * @param dbObj
 *            the db obj
 * @param column
 *            the column
 * @throws PropertyAccessException
 *             the property access exception
 */
static void extractFieldValue(Object entity, DBObject dbObj, Attribute column) throws PropertyAccessException {
    try {
        Object valueObject = PropertyAccessorHelper.getObject(entity, (Field) column.getJavaMember());

        if (valueObject != null) {
            Class javaType = column.getJavaType();
            switch (AttributeType.getType(javaType)) {
            case MAP:
                Map mapObj = (Map) valueObject;
                // BasicDBObjectBuilder builder =
                // BasicDBObjectBuilder.start(mapObj);
                BasicDBObjectBuilder b = new BasicDBObjectBuilder();
                Iterator i = mapObj.entrySet().iterator();
                while (i.hasNext()) {
                    Map.Entry entry = (Map.Entry) i.next();
                    b.add(entry.getKey().toString(),
                            MongoDBUtils.populateValue(entry.getValue(), entry.getValue().getClass()));
                }
                dbObj.put(((AbstractAttribute) column).getJPAColumnName(), b.get());
                break;
            case SET:
            case LIST:
                Collection collection = (Collection) valueObject;
                BasicDBList basicDBList = new BasicDBList();
                for (Object o : collection) {
                    basicDBList.add(o);
                }
                dbObj.put(((AbstractAttribute) column).getJPAColumnName(), basicDBList);
                break;
            case POINT:

                Point p = (Point) valueObject;
                double[] coordinate = new double[] { p.getX(), p.getY() };
                dbObj.put(((AbstractAttribute) column).getJPAColumnName(), coordinate);
                break;
            case ENUM:
            case PRIMITIVE:
                dbObj.put(((AbstractAttribute) column).getJPAColumnName(),
                        MongoDBUtils.populateValue(valueObject, javaType));
                break;
            }
        }
    } catch (PropertyAccessException paex) {
        log.error("Error while getting column {} value, caused by : .",
                ((AbstractAttribute) column).getJPAColumnName(), paex);
        throw new PersistenceException(paex);
    }
}

From source file:com.impetus.client.mongodb.MongoDBClient.java

License:Apache License

/**
 * Execute native query./* ww  w .j  av a  2s  .  com*/
 * 
 * @param jsonClause
 *            the json clause
 * @param entityMetadata
 *            the entity metadata
 * @return the list
 */
public List executeNativeQuery(String jsonClause, EntityMetadata entityMetadata) {
    List entities = new ArrayList();
    String[] tempArray = jsonClause.split("\\.");
    String tempClause = tempArray[tempArray.length - 1];

    if (tempClause.contains("findOne(") || tempClause.contains("findAndModify(")) {
        DBObject obj = (BasicDBObject) executeScript(jsonClause);
        populateEntity(entityMetadata, entities, obj);
        return entities;

    } else if (tempClause.contains("find(") || jsonClause.contains("aggregate(")) {
        jsonClause = jsonClause.concat(".toArray()");
        BasicDBList list = (BasicDBList) executeScript(jsonClause);
        for (Object obj : list) {
            populateEntity(entityMetadata, entities, (DBObject) obj);
        }
        return entities;

    } else if (tempClause.contains("count(") || tempClause.contains("dataSize(")
            || tempClause.contains("storageSize(") || tempClause.contains("totalIndexSize(")
            || tempClause.contains("totalSize(")) {
        Long count = ((Double) executeScript(jsonClause)).longValue();
        entities.add(count);
        return entities;

    } else if (tempClause.contains("distinct(")) {
        BasicDBList list = (BasicDBList) executeScript(jsonClause);
        for (Object obj : list) {
            entities.add(obj);
        }
        return entities;

    } else if (jsonClause.contains("mapReduce(")) {
        final MapReduceCommand command = parseMapReduceCommand(jsonClause);
        final MapReduceOutput output = mongoDb.getCollection(command.getInput()).mapReduce(command);

        final BasicDBList list = new BasicDBList();
        for (final DBObject item : output.results()) {
            list.add(item);
        }
        return list;
    } else {
        BasicDBList list = (BasicDBList) executeScript(jsonClause);
        for (Object obj : list) {
            entities.add(obj);
        }
        return entities;
    }
}

From source file:com.impetus.kundera.mongodb.MongoDBDataHandler.java

License:Apache License

/**
 * @param entity/*ww  w .jav a2 s. co m*/
 * @param dbObj
 * @param column
 * @throws PropertyAccessException
 */
private void extractEntityField(Object entity, BasicDBObject dbObj, Column column)
        throws PropertyAccessException {
    //A column field may be a collection(not defined as 1-to-M relationship)
    if (column.getField().getType().equals(List.class) || column.getField().getType().equals(Set.class)) {
        Collection collection = (Collection) PropertyAccessorHelper.getObject(entity, column.getField());
        BasicDBList basicDBList = new BasicDBList();
        for (Object o : collection) {
            basicDBList.add(o);
        }
        dbObj.put(column.getName(), basicDBList);
    } else {
        dbObj.put(column.getName(), PropertyAccessorHelper.getString(entity, column.getField()));
    }
}

From source file:com.itfsw.query.builder.support.parser.mongodb.DefaultGroupParser.java

License:Apache License

/**
 * ?/*w w w .  j av  a  2 s .c  om*/
 * @param group
 * @param parser
 * @return
 */
@Override
public Object parse(IGroup group, JsonRuleParser parser) {
    // rules
    BasicDBList operates = new BasicDBList();
    for (JsonRule jsonRule : group.getRules()) {
        operates.add(parser.parse(jsonRule));
    }

    // AND or OR
    BasicDBObject andOrObj = new BasicDBObject();
    andOrObj.append(EnumCondition.AND.equals(group.getCondition()) ? "$and" : "$or", operates);

    // Not
    if (group.getNot() != null && group.getNot()) {
        BasicDBList list = new BasicDBList();
        list.add(andOrObj);
        return new BasicDBObject("$nor", list);
    }
    return andOrObj;
}

From source file:com.jagornet.dhcp.db.MongoLeaseManager.java

License:Open Source License

protected DBObject convertDhcpOptions(final Collection<DhcpOption> dhcpOptions) {
    DBObject dbObj = null;/*from  w  w w. j  av  a 2s . co m*/
    if ((dhcpOptions != null) && !dhcpOptions.isEmpty()) {
        BasicDBList dbList = new BasicDBList();
        for (DhcpOption dhcpOption : dhcpOptions) {
            dbList.add(new BasicDBObject("code", dhcpOption.getCode()).append("value", dhcpOption.getValue()));
        }
        dbObj = dbList;
    }
    return dbObj;
}

From source file:com.jagornet.dhcp.db.MongoLeaseManager.java

License:Open Source License

@Override
public List<InetAddress> findExistingIPs(final InetAddress startAddr, final InetAddress endAddr) {
    List<InetAddress> inetAddrs = new ArrayList<InetAddress>();

    BasicDBList ipBetw = new BasicDBList();
    ipBetw.add(new BasicDBObject("ipAddress", new BasicDBObject("$gte", startAddr.getAddress())));
    ipBetw.add(new BasicDBObject("ipAddress", new BasicDBObject("$lte", endAddr.getAddress())));

    DBObject query = new BasicDBObject("$and", ipBetw);

    DBCursor cursor = dhcpLeases.find(query).sort(new BasicDBObject("ipAddress", 1));
    try {/* w ww .  ja v  a2s  . c om*/
        if (cursor.count() > 0) {
            while (cursor.hasNext()) {
                inetAddrs.add(convertDBObject(cursor.next()).getIpAddress());
            }
        }
    } finally {
        cursor.close();
    }
    return inetAddrs;
}

From source file:com.jagornet.dhcp.db.MongoLeaseManager.java

License:Open Source License

@Override
public List<IaAddress> findUnusedIaAddresses(final InetAddress startAddr, final InetAddress endAddr) {
    long offerExpireMillis = DhcpServerPolicies.globalPolicyAsLong(Property.BINDING_MANAGER_OFFER_EXPIRATION);
    final Date offerExpiration = new Date(new Date().getTime() - offerExpireMillis);

    BasicDBList ipAdvBetw = new BasicDBList();
    ipAdvBetw.add(new BasicDBObject("state", IaAddress.ADVERTISED));
    ipAdvBetw.add(new BasicDBObject("startTime", new BasicDBObject("$lte", offerExpiration)));
    ipAdvBetw.add(new BasicDBObject("ipAddress", new BasicDBObject("$gte", startAddr.getAddress())));
    ipAdvBetw.add(new BasicDBObject("ipAddress", new BasicDBObject("$lte", endAddr.getAddress())));

    BasicDBList ipExpRel = new BasicDBList();
    ipExpRel.add(IaAddress.EXPIRED);//from ww w.j a  va2s. com
    ipExpRel.add(IaAddress.RELEASED);

    BasicDBList ipExpRelBetw = new BasicDBList();
    ipExpRelBetw.add(new BasicDBObject("state", new BasicDBObject("$in", ipExpRel)));
    ipExpRelBetw.add(new BasicDBObject("ipAddress", new BasicDBObject("$gte", startAddr.getAddress())));
    ipExpRelBetw.add(new BasicDBObject("ipAddress", new BasicDBObject("$lte", endAddr.getAddress())));

    BasicDBList ipBetw = new BasicDBList();
    ipBetw.add(new BasicDBObject("$and", ipAdvBetw));
    ipBetw.add(new BasicDBObject("$and", ipExpRelBetw));

    DBObject query = new BasicDBObject("$or", ipBetw);
    DBCursor cursor = dhcpLeases.find(query).sort(new BasicDBObject("state", 1))
            .sort(new BasicDBObject("validEndTime", 1)).sort(new BasicDBObject("ipAddress", 1));
    try {
        if (cursor.count() > 0) {
            List<DhcpLease> leases = new ArrayList<DhcpLease>();
            while (cursor.hasNext()) {
                leases.add(convertDBObject(cursor.next()));
            }
            return toIaAddresses(leases);
        }
    } finally {
        cursor.close();
    }

    return null;
}