Example usage for com.mongodb DBCursor size

List of usage examples for com.mongodb DBCursor size

Introduction

In this page you can find the example usage for com.mongodb DBCursor size.

Prototype

public int size() 

Source Link

Document

Counts the number of objects matching the query this does take limit/skip into consideration

Usage

From source file:controllers.FilterController.java

License:Apache License

private static Graph getSquareRootHistogram(Filter f, String property) {
    BasicDBObject query = Application.getFilterQuery(f);
    DBCursor cursor = Configurator.getDefaultConfigurator().getPersistence().find(Constants.TBL_ELEMENTS,
            query);// www  .j av  a 2 s.  c  o  m
    int n = cursor.size();
    int bins = (int) Math.sqrt(n);
    MapReduceJob job = new NumericAggregationJob(f.getCollection(), property);
    job.setFilterquery(query);

    MapReduceOutput output = job.execute();
    List<BasicDBObject> results = (List<BasicDBObject>) output.getCommandResult().get("results");
    Graph g = null;
    if (!results.isEmpty()) {
        BasicDBObject aggregation = (BasicDBObject) results.get(0).get("value");
        long max = aggregation.getLong("max");
        int width = (int) (max / bins);
        Map<String, String> config = new HashMap<String, String>();
        config.put("bin_width", width + "");

        job = new HistogramJob(f.getCollection(), property);
        job.setFilterquery(query);
        job.setConfig(config);
        output = job.execute();
        List<String> keys = new ArrayList<String>();
        List<String> values = new ArrayList<String>();

        calculateNumericHistogramResults(output, keys, values, width);

        g = new Graph(property, keys, values);
    }

    return g;
}

From source file:cz.vse.fis.keg.entityclassifier.core.ontologymapper.YagoOntologyManager.java

License:Open Source License

public String getYagoTypeLabel(String uri) {

    DBCursor cursor = db.getCollection("entities_yago").find(new BasicDBObject().append("uri", uri));

    if (cursor.size() > 0) {
        DBObject tmp = cursor.next();/* w  w  w.java2 s.  co  m*/
        BasicDBList labels = (BasicDBList) tmp.get("labels");

        if (labels != null) {
            DBObject tmp2 = (DBObject) labels.get(0);
            return tmp2.get("label").toString();
        }
    }

    return null;

}

From source file:dao.SearchTopicDao.java

public ArrayList<PublicDiscussion> searchTopic(String searchvalue) {
    BasicDBList or = new BasicDBList();
    ArrayList<PublicDiscussion> arpb = new ArrayList<PublicDiscussion>();
    BasicDBObject topic = new BasicDBObject("topic",
            new BasicDBObject("$regex", java.util.regex.Pattern.compile(searchvalue)).append("$options", "i"));
    BasicDBObject content = new BasicDBObject("content",
            new BasicDBObject("$regex", java.util.regex.Pattern.compile(searchvalue)).append("$options", "i"));
    BasicDBObject tags = new BasicDBObject("tags",
            new BasicDBObject("$regex", java.util.regex.Pattern.compile(searchvalue)).append("$options", "i"));
    or.add(topic);//  w ww  .j av a 2 s  .co  m
    or.add(content);
    or.add(tags);
    DBObject query = new BasicDBObject("$or", or);

    DBCursor cursor = col.find(query);

    if (cursor.size() > 0) {
        System.out.println("data is exists");
    } else {
        System.out.println("data  is not exists");
    }

    try {
        DBObject oneDetails;
        while (cursor.hasNext()) {
            oneDetails = cursor.next();

            PublicDiscussion tmpDiscussion = PublicPostConverter.toPublicDiscussion(oneDetails);
            arpb.add(tmpDiscussion);

        }
    } catch (Exception e) {
        System.out.println(e.getMessage());
    }

    return arpb;

}

From source file:dao.SearchUserDao.java

public List<User> searchUser(String searchvalue) {
    BasicDBList or = new BasicDBList();

    BasicDBObject username = new BasicDBObject("username",
            new BasicDBObject("$regex", java.util.regex.Pattern.compile(searchvalue)).append("$options", "i"));
    BasicDBObject firstname = new BasicDBObject("firstname",
            new BasicDBObject("$regex", java.util.regex.Pattern.compile(searchvalue)).append("$options", "i"));
    BasicDBObject lastname = new BasicDBObject("lastname", java.util.regex.Pattern.compile(searchvalue));
    or.add(username);//  ww w. j  av  a  2s.  c  o m
    or.add(firstname);
    or.add(lastname);
    DBObject query = new BasicDBObject("$or", or);

    DBCursor cursor = col.find(query);
    if (cursor.size() > 0) {
        System.out.println("user is exists");
    } else {
        System.out.println("user is not exists");
    }

    List<User> users = new ArrayList<>();
    try {
        while (cursor.hasNext()) {
            User tmpUser = UserConverter.toUsers(cursor.next());
            users.add(tmpUser);
        }
    } catch (Exception e) {
        System.out.println(e.getMessage());
    }

    return users;
}

From source file:dao.SearchUserDao.java

public List<Discussion> searchTopic(String searchvalue) {
    BasicDBList or1 = new BasicDBList();

    BasicDBObject topic = new BasicDBObject("Topic", java.util.regex.Pattern.compile(searchvalue));
    BasicDBObject content = new BasicDBObject("Content", java.util.regex.Pattern.compile(searchvalue));
    BasicDBObject tags = new BasicDBObject("Tags", java.util.regex.Pattern.compile(searchvalue));
    or1.add(topic);/*from ww w.ja  va2s.  c  o  m*/
    or1.add(content);
    or1.add(tags);
    DBObject query1 = new BasicDBObject("$or", or1);
    DBCursor cursor1 = col1.find(query1);
    if (cursor1.size() > 0) {
        System.out.println("data is exist");
    } else {
        System.out.println("data is not exist");
    }

    while (cursor1.hasNext()) {

        System.out.println(cursor1.next());
    }

    return null;
}

From source file:dbscan.DBScanReducer.java

License:Apache License

/**
 * The reduce function has in input an 'array' of clusters with the same key.
 * Its job is to aggregate these clusters and to analyze their neighborhood
 * to merge all points into a unique cluster.
 * This method can be called much times, virtually every time the map function emits
 * a new cluster with a key equals to another cluster.
 *
 * @param pKey the key of the clusters in input
 * @param pValues the array iterable of clusters (type of these objects is BSONWritable)
 * @param pContext the context in which map-reduce works
 *//*from w w w  . ja v  a2  s. c  o m*/
@Override
public void reduce(final Text eventKey, final Iterable<BSONWritable> eventValues, final Context eventContext)
        throws IOException, InterruptedException {

    //System.out.println("Reducing clusters with key : " + pKey +"...");

    // get the iterator
    Iterator<BSONWritable> iterator = eventValues.iterator();

    // alloc *new* cluster
    BSONWritable newCluster = new BSONWritable();

    int numPoints = 0;
    int k = 0;
    float avgLat = 0;
    float avgLon = 0;
    int numPointsAnalyzed = 0;
    ;

    // start loop for analyze every cluster
    while (iterator.hasNext()) {

        BSONObject aCluster = iterator.next();

        // at the first to loop, initialize the *new* cluster
        if (k == 0) {
            newCluster.put("loc", aCluster.get("loc"));
            newCluster.put("createdAt", aCluster.get("createdAt"));
            newCluster.put("hashtag", aCluster.get("hashtag"));
            newCluster.put("isEvent", aCluster.get("isEvent"));
        }

        // add points to *new* cluster
        numPoints += (Integer) aCluster.get("numPoints");

        // put all neighbor points to a ConcurrentHashMap
        Map<ObjectId, BSONObject> tmp = (Map<ObjectId, BSONObject>) aCluster.get("neighborPoints");
        Map<ObjectId, BSONObject> neighborPoints = new ConcurrentHashMap<ObjectId, BSONObject>();
        neighborPoints.putAll(tmp);

        // start loop for neighbor points         
        int i = 0;
        for (Iterator iteratorNeighborPoints = neighborPoints.entrySet().iterator(); iteratorNeighborPoints
                .hasNext();) {

            Map.Entry<ObjectId, BSONObject> p = (Entry<ObjectId, BSONObject>) iteratorNeighborPoints.next();

            // needs to re-query MongoDB because the point now could be visited
            // by, for example, a map thread concurrent to this reduce thread
            BSONObject point = collection.findOne(new BasicDBObject("_id", p.getValue().get("_id")));
            boolean pointModified = false;

            if (point != null) {
                if ((Boolean) point.get("visited") == false) {

                    // mark as visited
                    point.put("visited", true);
                    pointModified = true;

                    // find near points
                    BasicDBObject findNearPoints = new BasicDBObject();
                    findNearPoints.put("loc", new BasicDBObject("$within", new BasicDBObject("$center",
                            new Object[] { point.get("loc"), new Double(radius / 111.12) })));
                    findNearPoints.put("hashtag", point.get("hashtag"));
                    DBCursor nearPoints = collection.find(findNearPoints);

                    if (nearPoints.size() >= minPointsToCreateCluster) {
                        // increase performance by adding only points unvisited OR unclusterized
                        // two query BUT much less points to loop
                        findNearPoints.put("$or", new BasicDBObject[] { new BasicDBObject("visited", false),
                                new BasicDBObject("clusterized", false) });
                        nearPoints = collection.find(findNearPoints);

                        toMap(neighborPoints, nearPoints.toArray());
                    }

                    // refer to null to free a bit of memory
                    findNearPoints = null;
                    nearPoints = null;

                } // end if visited == false

                // add point to cluster
                if ((Boolean) point.get("clusterized") == false) {
                    // add the point to cluster
                    point.put("clusterized", true);
                    pointModified = true;
                    numPoints++;
                }

                // update new point in MongoDB
                if (pointModified)
                    collection.findAndModify(new BasicDBObject("_id", point.get("_id")),
                            new BasicDBObject(point.toMap()));

                // update average location
                if (((BasicBSONObject) point.get("loc")).get("lat") instanceof Double)
                    avgLat += ((Double) ((BasicBSONObject) point.get("loc")).get("lat")).floatValue();
                else
                    avgLat += ((Integer) ((BasicBSONObject) point.get("loc")).get("lat")).floatValue();
                if (((BasicBSONObject) point.get("loc")).get("lon") instanceof Double)
                    avgLon += ((Double) ((BasicBSONObject) point.get("loc")).get("lon")).floatValue();
                else
                    avgLon += ((Integer) ((BasicBSONObject) point.get("loc")).get("lon")).floatValue();

                point = null;
                i++;
                numPointsAnalyzed++;
            }
        } // end loop for neighbor points
        k++;

        aCluster = null;
        neighborPoints = null;
        System.gc();

    } // end loop for clusters

    if (numPointsAnalyzed > 0) {
        // update average location of new cluster with the weighted average
        // of points analyzed
        avgLat = avgLat / (float) numPointsAnalyzed;
        avgLon = avgLon / (float) numPointsAnalyzed;

        // if the location of analyzed points is significantly different from
        // the old cluster location, then that cluster is not an event!
        BSONObject loc = (BSONObject) newCluster.get("loc");
        LatLng oldLatLon = new LatLng((Double) loc.get("lat"), (Double) loc.get("lon"));
        LatLng newLatLon = new LatLng(avgLat, avgLon);
        double distance = oldLatLon.distance(newLatLon);

        if (distance < MAX_DISTANCE_OFFSET_NEW_CLUSTER_LOCATION)
            // mark as event
            newCluster.put("isEvent", true);
        else
            // mark as no-event
            newCluster.put("isEvent", false);

        // update new position (only if is valid)
        if (avgLat >= -90.0f && avgLat <= 90.0f && avgLon >= -180.0f && avgLon <= 180.0f) {
            DecimalFormat df = new DecimalFormat("##.######");
            Map<String, Float> newLoc = new TreeMap<String, Float>();
            newLoc.put("lat", Float.parseFloat(df.format(avgLat)));
            newLoc.put("lon", Float.parseFloat(df.format(avgLon)));
            newCluster.put("loc", newLoc);
        }

    }

    // update new cluster object
    newCluster.put("numPoints", numPoints);
    newCluster.put("neighborPoints", new HashMap<ObjectId, BSONObject>());

    // write to context if and only if the new cluster has enought points
    if (numPoints > 30)
        eventContext.write(eventKey, newCluster);

    newCluster = null;

    // IN CASE OF MEMORY PROBLEMS: force garbage collector
    // it could not be elegant and is often not recommended,
    // but it works
    System.gc();

}

From source file:eu.cassandra.csn.mongo.MongoQueries.java

License:Apache License

public static String[][] getRuns() {
    DBCursor cursor = DBConn.getConn("test").getCollection("runs").find();

    String[][] data = new String[cursor.size()][3];
    int counter = 0;
    while (cursor.hasNext()) {
        DBObject obj = cursor.next();/*from   w w  w  .  ja va 2 s .c  om*/
        if ((Integer) obj.get("percentage") == 100) {
            String c = String.valueOf(counter);
            String id = "";

            id = obj.get("_id").toString();

            int instCounter = DBConn.getMongo().getDB(id).getCollection("installations").find().count();
            if (instCounter > limit)
                instCounter = limit;

            data[counter][0] = c;
            data[counter][1] = id;
            data[counter][2] = String.valueOf(instCounter);
            counter++;
        }
    }
    cursor.close();
    return data;
}

From source file:eu.cassandra.server.api.Runs.java

License:Apache License

/**
 * Create a run.//  w  w w.  j  a v a  2s.com
 * In order to create and start a run, we need to have the simulation 
 * parameter id passed as a JSON property via the POST request. After that
 * the procedure goes as follows:
 * <ol>
 * <i>Create a database to hold the run documents and results (dbname same as run_id)</i>
 * <i>Parse the smp_id from the JSON request</i>
 * <i>From smp_id get scn_id</i>
 * <i>From scn_id gather all scenario documents and create a full JSON scenario</i>
 * <i>If the scenario is dynamic, instantiate as documents all the virtual installations</i>
 * <i>Store the full scenario in a new MongoDB database</i>
 * <i>Create thread with JSON scenario</i>
 * <i>Run the thread</i>
 * <i>Store the run document</i>
 * </ol>
 */
@POST
public Response createRun(String message) {

    DBObject query = new BasicDBObject(); // A query

    try {
        // Create the new database
        ObjectId objid = ObjectId.get();
        String dbname = objid.toString();
        DB db = createDB(dbname);

        // Create the scenario document
        DBObject scenario = new BasicDBObject();

        // Simulation parameters
        DBObject jsonMessage = (DBObject) JSON.parse(message);
        String smp_id = (String) jsonMessage.get("smp_id");
        checkForNull(smp_id, "Simulation Parameters id not posted.");
        query.put("_id", new ObjectId(smp_id));
        DBObject simParams = DBConn.getConn().getCollection(MongoSimParam.COL_SIMPARAM).findOne(query);
        checkForNull(simParams, "The provided Simulation Parameters were not found in the DB.");
        db.getCollection(MongoSimParam.COL_SIMPARAM).insert(simParams);
        scenario.put("sim_params", simParams);

        // Scenario
        String scn_id = (String) simParams.get("scn_id");
        checkForNull(scn_id, "Scenario id not found in posted Simulation Parameters.");
        query.put("_id", new ObjectId(scn_id));
        DBObject scn = DBConn.getConn().getCollection(MongoScenarios.COL_SCENARIOS).findOne(query);
        checkForNull(scn, "The provided Scenario was not found in the DB.");
        db.getCollection(MongoScenarios.COL_SCENARIOS).insert(scn);
        scenario.put("scenario", scn);

        // Pricing Policy
        String prc_id = (String) simParams.get("prc_id");
        if (prc_id != null && prc_id.matches("[a-z0-9]{24}")) { // Optionally provided
            query.put("_id", new ObjectId(prc_id));
            DBObject pricingPolicy = DBConn.getConn().getCollection(MongoPricingPolicy.COL_PRICING)
                    .findOne(query);
            checkForNull(pricingPolicy, "The provided Pricing Policy was not found in the DB.");
            db.getCollection(MongoPricingPolicy.COL_PRICING).insert(pricingPolicy);
            scenario.put("pricing", pricingPolicy);
        }

        // Pricing Policy
        String base_prc_id = (String) simParams.get("base_prc_id");
        if (base_prc_id != null && base_prc_id.matches("[a-z0-9]{24}")) { // Optionally provided
            query.put("_id", new ObjectId(base_prc_id));
            DBObject basePricingPolicy = DBConn.getConn().getCollection(MongoPricingPolicy.COL_PRICING)
                    .findOne(query);
            checkForNull(basePricingPolicy, "The provided Baseline Pricing Policy was not found in the DB.");
            db.getCollection(MongoPricingPolicy.COL_PRICING).insert(basePricingPolicy);
            scenario.put("baseline_pricing", basePricingPolicy);
        }

        // Project
        String prj_id = (String) scn.get("project_id");
        checkForNull(prj_id, "Project id not found in posted Scenario.");
        query.put("_id", new ObjectId(prj_id));
        DBObject project = DBConn.getConn().getCollection(MongoProjects.COL_PROJECTS).findOne(query);
        checkForNull(project, "The provided Project was not found in the DB.");
        db.getCollection(MongoProjects.COL_PROJECTS).insert(project);
        scenario.put("project", project);

        // Demographics
        query = new BasicDBObject();
        query.put("scn_id", scn_id);
        String setup = (String) scn.get("setup");
        checkForNull(setup, "Setup property not set.");
        String name = (String) scn.get("name");
        boolean isDynamic = setup.equalsIgnoreCase("dynamic");
        if (isDynamic) {
            DBObject demog = DBConn.getConn().getCollection(MongoDemographics.COL_DEMOGRAPHICS).findOne(query);
            checkForNull(demog, "The provided Demographics were not found in the DB.");
            db.getCollection(MongoDemographics.COL_DEMOGRAPHICS).insert(demog);
            scenario.put("demog", demog);
        }

        // Installations
        query = new BasicDBObject();
        query.put("scenario_id", scn_id);
        DBCursor cursor = DBConn.getConn().getCollection(MongoInstallations.COL_INSTALLATIONS).find(query);
        checkForZero(cursor.size(), "No istallations found");
        int countInst = 0;
        while (cursor.hasNext()) {
            countInst++;
            DBObject obj = cursor.next();
            if (!isDynamic)
                db.getCollection(MongoInstallations.COL_INSTALLATIONS).insert(obj);
            String inst_id = obj.get("_id").toString();
            // Thermal module
            query = new BasicDBObject();
            query.put("inst_id", inst_id);
            DBObject thermal = DBConn.getConn().getCollection(MongoThermal.COL_THERMAL).findOne(query);
            if (thermal != null) {
                db.getCollection(MongoThermal.COL_THERMAL).insert(thermal);
                obj.put("thermal", thermal);
            }

            // Persons
            query = new BasicDBObject();
            query.put("inst_id", inst_id);
            DBCursor persons = DBConn.getConn().getCollection(MongoPersons.COL_PERSONS).find(query);
            int personCount = 0;
            while (persons.hasNext()) {
                personCount++;
                DBObject person = persons.next();
                if (!isDynamic)
                    db.getCollection(MongoPersons.COL_PERSONS).insert(person);

                // Activities
                String pers_id = person.get("_id").toString();
                query = new BasicDBObject();
                query.put("pers_id", pers_id);
                DBCursor activities = DBConn.getConn().getCollection(MongoActivities.COL_ACTIVITIES)
                        .find(query);
                int countAct = 0;
                while (activities.hasNext()) {
                    countAct++;
                    DBObject activity = activities.next();
                    if (!isDynamic)
                        db.getCollection(MongoActivities.COL_ACTIVITIES).insert(activity);

                    // Activity Models
                    String act_id = activity.get("_id").toString();
                    query = new BasicDBObject();
                    query.put("act_id", act_id);
                    DBCursor activityModels = DBConn.getConn().getCollection(MongoActivityModels.COL_ACTMODELS)
                            .find(query);
                    int countActMod = 0;
                    while (activityModels.hasNext()) {
                        countActMod++;
                        DBObject activityModel = activityModels.next();
                        if (!isDynamic)
                            db.getCollection(MongoActivityModels.COL_ACTMODELS).insert(activityModel);

                        // Duration distribution
                        String dur_id = activityModel.get("duration").toString();
                        checkForNull(dur_id, "Activity Model with name '" + activityModel.get("name")
                                + "' does not have a duration distribution.");
                        query = new BasicDBObject();
                        query.put("_id", new ObjectId(dur_id));
                        DBObject durDist = DBConn.getConn().getCollection(MongoDistributions.COL_DISTRIBUTIONS)
                                .findOne(query);
                        checkForNull(durDist, "Duration distribution of '" + activityModel.get("name")
                                + "' not found in the DB.");
                        if (!isDynamic)
                            db.getCollection(MongoDistributions.COL_DISTRIBUTIONS).insert(durDist);
                        activityModel.put("duration", durDist);

                        // Start time distribution
                        String start_id = activityModel.get("startTime").toString();
                        checkForNull(start_id, "Activity Model with name '" + activityModel.get("name")
                                + "' does not have a start time distribution.");
                        query = new BasicDBObject();
                        query.put("_id", new ObjectId(start_id));
                        DBObject startDist = DBConn.getConn()
                                .getCollection(MongoDistributions.COL_DISTRIBUTIONS).findOne(query);
                        checkForNull(startDist, "Start distribution of '" + activityModel.get("name")
                                + "' not found in the DB.");
                        if (!isDynamic)
                            db.getCollection(MongoDistributions.COL_DISTRIBUTIONS).insert(startDist);
                        activityModel.put("start", startDist);

                        // Repetitions distribution
                        String rep_id = activityModel.get("repeatsNrOfTime").toString();
                        checkForNull(rep_id, "Activity Model with name '" + activityModel.get("name")
                                + "' does not have a number of times distribution.");
                        query = new BasicDBObject();
                        query.put("_id", new ObjectId(rep_id));
                        DBObject repDist = DBConn.getConn().getCollection(MongoDistributions.COL_DISTRIBUTIONS)
                                .findOne(query);
                        checkForNull(repDist, "Number of times distribution of '" + activityModel.get("name")
                                + "' not found in the DB.");
                        if (!isDynamic)
                            db.getCollection(MongoDistributions.COL_DISTRIBUTIONS).insert(repDist);
                        activityModel.put("repetitions", repDist);
                        activity.put("actmod" + countActMod, activityModel);
                    }
                    activity.put("actmodcount", new Integer(countActMod));
                    person.put("activity" + countAct, activity);
                }
                person.put("activitycount", new Integer(countAct));
                obj.put("person" + personCount, person);
            }
            obj.put("personcount", new Integer(personCount));
            // Appliances
            query = new BasicDBObject();
            query.put("inst_id", inst_id);
            DBCursor appliances = DBConn.getConn().getCollection(MongoAppliances.COL_APPLIANCES).find(query);
            int countApps = 0;
            while (appliances.hasNext()) {
                countApps++;
                DBObject appliance = appliances.next();
                if (!isDynamic)
                    db.getCollection(MongoAppliances.COL_APPLIANCES).insert(appliance);

                // Consumption model
                String app_id = appliance.get("_id").toString();
                query = new BasicDBObject();
                query.put("app_id", app_id);
                DBObject consModel = DBConn.getConn().getCollection(MongoConsumptionModels.COL_CONSMODELS)
                        .findOne(query);
                checkForNull(consModel,
                        "Consumption model of appliance '" + appliance.get("name") + "' not found in the DB.");
                if (!isDynamic)
                    db.getCollection(MongoConsumptionModels.COL_CONSMODELS).insert(consModel);
                appliance.put("consmod", consModel);
                obj.put("app" + countApps, appliance);
            }
            obj.put("appcount", new Integer(countApps));
            scenario.put("inst" + countInst, obj);
        }
        scenario.put("instcount", new Integer(countInst));
        Simulation sim = new Simulation(scenario.toString(), dbname);
        sim.setup(false);
        // Scenario building finished
        DBObject run = buildRunObj(objid, name, prj_id, "sim");
        DBConn.getConn().getCollection(MongoRuns.COL_RUNS).insert(run);
        String returnMsg = PrettyJSONPrinter.prettyPrint(jSON2Rrn.createJSON(run, "Sim creation successful"));
        logger.info(returnMsg);
        ThreadPoolExecutor executorPool = (ThreadPoolExecutor) context.getAttribute("MY_EXECUTOR");
        Utils.printExecutorSummary(executorPool);
        executorPool.execute(sim);
        return Utils.returnResponse(returnMsg);
    } catch (UnknownHostException | MongoException e1) {
        String returnMsg = "{ \"success\": false, \"message\": \"Sim creation failed\", \"errors\": { \"hostMongoException\": \""
                + e1.getMessage() + "\" } }";
        e1.printStackTrace();
        return Utils.returnResponse(returnMsg);
    } catch (Exception e) {
        String returnMsg = "{ \"success\": false, \"message\": \"Sim creation failed\", \"errors\": { \"generalException\": \""
                + e.getMessage() + "\" } }";
        e.printStackTrace();
        logger.error(Utils.stackTraceToString(e.getStackTrace()));
        return Utils.returnResponse(returnMsg);
    }
}

From source file:fr.cirad.mgdb.exporting.markeroriented.VcfExportHandler.java

License:Open Source License

@Override
public void exportData(OutputStream outputStream, String sModule, List<SampleId> sampleIDs,
        ProgressIndicator progress, DBCursor markerCursor, Map<Comparable, Comparable> markerSynonyms,
        int nMinimumGenotypeQuality, int nMinimumReadDepth, Map<String, InputStream> readyToExportFiles)
        throws Exception {
    Integer projectId = null;//from   ww  w.j a  va 2s . c o  m
    for (SampleId spId : sampleIDs) {
        if (projectId == null)
            projectId = spId.getProject();
        else if (projectId != spId.getProject()) {
            projectId = 0;
            break; // more than one project are involved: no header will be written
        }
    }

    File warningFile = File.createTempFile("export_warnings_", "");
    FileWriter warningFileWriter = new FileWriter(warningFile);

    MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule);
    int markerCount = markerCursor.count();
    ZipOutputStream zos = new ZipOutputStream(outputStream);

    if (readyToExportFiles != null)
        for (String readyToExportFile : readyToExportFiles.keySet()) {
            zos.putNextEntry(new ZipEntry(readyToExportFile));
            InputStream inputStream = readyToExportFiles.get(readyToExportFile);
            byte[] dataBlock = new byte[1024];
            int count = inputStream.read(dataBlock, 0, 1024);
            while (count != -1) {
                zos.write(dataBlock, 0, count);
                count = inputStream.read(dataBlock, 0, 1024);
            }
        }

    LinkedHashMap<SampleId, String> sampleIDToIndividualIdMap = new LinkedHashMap<SampleId, String>();
    ArrayList<String> individualList = new ArrayList<String>();
    List<Individual> individuals = getIndividualsFromSamples(sModule, sampleIDs);
    for (int i = 0; i < sampleIDs.size(); i++) {
        String individualId = individuals.get(i).getId();
        sampleIDToIndividualIdMap.put(sampleIDs.get(i), individualId);
        if (!individualList.contains(individualId)) {
            individualList.add(individualId);
        }
    }

    String exportName = sModule + "_" + markerCount + "variants_" + individualList.size() + "individuals";
    zos.putNextEntry(new ZipEntry(exportName + ".vcf"));

    int avgObjSize = (Integer) mongoTemplate
            .getCollection(mongoTemplate.getCollectionName(VariantRunData.class)).getStats().get("avgObjSize");
    int nQueryChunkSize = nMaxChunkSizeInMb * 1024 * 1024 / avgObjSize;

    VariantContextWriter writer = null;
    try {
        List<String> distinctSequenceNames = new ArrayList<String>();

        String sequenceSeqCollName = MongoTemplateManager.getMongoCollectionName(Sequence.class);
        if (mongoTemplate.collectionExists(sequenceSeqCollName)) {
            DBCursor markerCursorCopy = markerCursor.copy();
            markerCursorCopy.batchSize(nQueryChunkSize);
            while (markerCursorCopy.hasNext()) {
                int nLoadedMarkerCountInLoop = 0;
                boolean fStartingNewChunk = true;
                while (markerCursorCopy.hasNext()
                        && (fStartingNewChunk || nLoadedMarkerCountInLoop % nQueryChunkSize != 0)) {
                    DBObject exportVariant = markerCursorCopy.next();
                    String chr = (String) ((DBObject) exportVariant
                            .get(VariantData.FIELDNAME_REFERENCE_POSITION))
                                    .get(ReferencePosition.FIELDNAME_SEQUENCE);
                    if (!distinctSequenceNames.contains(chr))
                        distinctSequenceNames.add(chr);
                }
            }
            markerCursorCopy.close();
        }

        Collections.sort(distinctSequenceNames, new AlphaNumericStringComparator());
        SAMSequenceDictionary dict = createSAMSequenceDictionary(sModule, distinctSequenceNames);
        writer = new CustomVCFWriter(null, zos, dict, false, false, true);
        //         VariantContextWriterBuilder vcwb = new VariantContextWriterBuilder();
        //         vcwb.unsetOption(Options.INDEX_ON_THE_FLY);
        //         vcwb.unsetOption(Options.DO_NOT_WRITE_GENOTYPES);
        //         vcwb.setOption(Options.USE_ASYNC_IOINDEX_ON_THE_FLY);
        //         vcwb.setOption(Options.ALLOW_MISSING_FIELDS_IN_HEADER);
        //         vcwb.setReferenceDictionary(dict);
        //         writer = vcwb.build();
        //         writer = new AsyncVariantContextWriter(writer, 3000);

        progress.moveToNextStep(); // done with dictionary
        DBCursor headerCursor = mongoTemplate
                .getCollection(MongoTemplateManager.getMongoCollectionName(DBVCFHeader.class))
                .find(new BasicDBObject("_id." + VcfHeaderId.FIELDNAME_PROJECT, projectId));
        Set<VCFHeaderLine> headerLines = new HashSet<VCFHeaderLine>();
        boolean fWriteCommandLine = true, fWriteEngineHeaders = true; // default values

        while (headerCursor.hasNext()) {
            DBVCFHeader dbVcfHeader = DBVCFHeader.fromDBObject(headerCursor.next());
            headerLines.addAll(dbVcfHeader.getHeaderLines());

            // Add sequence header lines (not stored in our vcf header collection)
            BasicDBObject projection = new BasicDBObject(SequenceStats.FIELDNAME_SEQUENCE_LENGTH, true);
            int nSequenceIndex = 0;
            for (String sequenceName : distinctSequenceNames) {
                String sequenceInfoCollName = MongoTemplateManager.getMongoCollectionName(SequenceStats.class);
                boolean fCollectionExists = mongoTemplate.collectionExists(sequenceInfoCollName);
                if (fCollectionExists) {
                    DBObject record = mongoTemplate.getCollection(sequenceInfoCollName).findOne(
                            new Query(Criteria.where("_id").is(sequenceName)).getQueryObject(), projection);
                    if (record == null) {
                        LOG.warn("Sequence '" + sequenceName + "' not found in collection "
                                + sequenceInfoCollName);
                        continue;
                    }

                    Map<String, String> sequenceLineData = new LinkedHashMap<String, String>();
                    sequenceLineData.put("ID", (String) record.get("_id"));
                    sequenceLineData.put("length",
                            ((Number) record.get(SequenceStats.FIELDNAME_SEQUENCE_LENGTH)).toString());
                    headerLines.add(new VCFContigHeaderLine(sequenceLineData, nSequenceIndex++));
                }
            }
            fWriteCommandLine = headerCursor.size() == 1 && dbVcfHeader.getWriteCommandLine(); // wouldn't make sense to include command lines for several runs
            if (!dbVcfHeader.getWriteEngineHeaders())
                fWriteEngineHeaders = false;
        }
        headerCursor.close();

        VCFHeader header = new VCFHeader(headerLines, individualList);
        header.setWriteCommandLine(fWriteCommandLine);
        header.setWriteEngineHeaders(fWriteEngineHeaders);
        writer.writeHeader(header);

        short nProgress = 0, nPreviousProgress = 0;
        long nLoadedMarkerCount = 0;
        HashMap<SampleId, Comparable /*phID*/> phasingIDsBySample = new HashMap<SampleId, Comparable>();

        while (markerCursor.hasNext()) {
            if (progress.hasAborted())
                return;

            int nLoadedMarkerCountInLoop = 0;
            boolean fStartingNewChunk = true;
            markerCursor.batchSize(nQueryChunkSize);
            List<Comparable> currentMarkers = new ArrayList<Comparable>();
            while (markerCursor.hasNext()
                    && (fStartingNewChunk || nLoadedMarkerCountInLoop % nQueryChunkSize != 0)) {
                DBObject exportVariant = markerCursor.next();
                currentMarkers.add((Comparable) exportVariant.get("_id"));
                nLoadedMarkerCountInLoop++;
                fStartingNewChunk = false;
            }

            LinkedHashMap<VariantData, Collection<VariantRunData>> variantsAndRuns = MgdbDao.getSampleGenotypes(
                    mongoTemplate, sampleIDs, currentMarkers, true,
                    null /*new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_SEQUENCE).and(new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_START_SITE))*/); // query mongo db for matching genotypes
            for (VariantData variant : variantsAndRuns.keySet()) {
                VariantContext vc = variant.toVariantContext(variantsAndRuns.get(variant),
                        !ObjectId.isValid(variant.getId().toString()), sampleIDToIndividualIdMap,
                        phasingIDsBySample, nMinimumGenotypeQuality, nMinimumReadDepth, warningFileWriter,
                        markerSynonyms == null ? variant.getId() : markerSynonyms.get(variant.getId()));
                try {
                    writer.add(vc);
                } catch (Throwable t) {
                    Exception e = new Exception("Unable to convert to VariantContext: " + variant.getId(), t);
                    LOG.debug("error", e);
                    throw e;
                }

                if (nLoadedMarkerCountInLoop > currentMarkers.size())
                    LOG.error("Bug: writing variant number " + nLoadedMarkerCountInLoop + " (only "
                            + currentMarkers.size() + " variants expected)");
            }

            nLoadedMarkerCount += nLoadedMarkerCountInLoop;
            nProgress = (short) (nLoadedMarkerCount * 100 / markerCount);
            if (nProgress > nPreviousProgress) {
                progress.setCurrentStepProgress(nProgress);
                nPreviousProgress = nProgress;
            }
        }
        progress.setCurrentStepProgress((short) 100);

    } catch (Exception e) {
        LOG.error("Error exporting", e);
        progress.setError(e.getMessage());
        return;
    } finally {
        warningFileWriter.close();
        if (warningFile.length() > 0) {
            zos.putNextEntry(new ZipEntry(exportName + "-REMARKS.txt"));
            int nWarningCount = 0;
            BufferedReader in = new BufferedReader(new FileReader(warningFile));
            String sLine;
            while ((sLine = in.readLine()) != null) {
                zos.write((sLine + "\n").getBytes());
                nWarningCount++;
            }
            LOG.info("Number of Warnings for export (" + exportName + "): " + nWarningCount);
            in.close();
        }
        warningFile.delete();
        if (writer != null)
            try {
                writer.close();
            } catch (Throwable ignored) {
            }
    }
}

From source file:fr.cirad.web.controller.gigwa.base.AbstractVariantController.java

License:Open Source License

/**
 * Setup detail page./*  www.ja  v a 2  s. c  o m*/
 *
 * @param sModule the module
 * @param projectId the proj id
 * @param variantId the variant id
 * @param selectedIndividuals the selected individuals
 * @return the model and view
 * @throws Exception the exception
 */
@RequestMapping(value = variantDetailsURL, method = RequestMethod.GET)
protected ModelAndView setupDetailPage(@RequestParam("module") String sModule,
        @RequestParam("project") int projectId, @RequestParam("variantId") String variantId,
        @RequestParam("individuals") String selectedIndividuals) throws Exception {
    ModelAndView mav = new ModelAndView();
    MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule);

    GenotypingProject project = mongoTemplate.findById(projectId, GenotypingProject.class);
    mav.addObject("project", project);

    List<String> selectedIndividualList = Arrays.asList(selectedIndividuals.split(";"));
    HashMap<Integer, String> sampleToIndividualMap = new LinkedHashMap<Integer, String>();
    HashMap<String, Boolean> individualMap = new LinkedHashMap<String, Boolean>();
    for (String ind : listIndividualsInAlphaNumericOrder(sModule, projectId)) {
        for (Integer sampleIndex : project.getIndividualSampleIndexes(ind))
            sampleToIndividualMap.put(sampleIndex, ind);
        individualMap.put(ind, selectedIndividuals.length() == 0 || selectedIndividualList.contains(ind));
    }
    mav.addObject("individualMap", individualMap);

    HashMap<Integer, List<String>> sampleIDsByProject = new HashMap<Integer, List<String>>();
    sampleIDsByProject.put(projectId, selectedIndividualList);
    VariantData var = mongoTemplate.findById(variantId, VariantData.class);
    mav.addObject("variantType", var.getType());
    mav.addObject("refPos", var.getReferencePosition());

    Map<String /* run */, Map<String /* individual */, List<Comparable /* cell value */>>> dataByRun = new TreeMap<String, Map<String, List<Comparable>>>(
            new AlphaNumericComparator());
    Map<String /* run */, Map<String /* info field */, Object>> additionalInfoByRun = new TreeMap<String, Map<String, Object>>(
            new AlphaNumericComparator());
    Map<String /* run */, Map<String /* info field */, VCFInfoHeaderLine>> additionalInfoDescByRun = new HashMap<String, Map<String, VCFInfoHeaderLine>>();
    List<String> headerCols = new ArrayList<String>();
    List<String> headerColDescs = new ArrayList<String>();
    List<Criteria> crits = new ArrayList<Criteria>();
    crits.add(Criteria.where("_id." + VariantRunDataId.FIELDNAME_PROJECT_ID).is(projectId));
    crits.add(Criteria.where("_id." + VariantRunDataId.FIELDNAME_VARIANT_ID).is(var.getId()));
    List<VariantRunData> runs = mongoTemplate.find(
            new Query(new Criteria().andOperator(crits.toArray(new Criteria[crits.size()]))),
            VariantRunData.class);
    for (VariantRunData run : runs) {
        DBVCFHeader vcfHeader = null;
        BasicDBList andList = new BasicDBList();
        andList.add(new BasicDBObject("_id." + VcfHeaderId.FIELDNAME_PROJECT, projectId));
        andList.add(new BasicDBObject("_id." + VcfHeaderId.FIELDNAME_RUN, run.getRunName()));
        DBCursor headerCursor = mongoTemplate
                .getCollection(MongoTemplateManager.getMongoCollectionName(DBVCFHeader.class))
                .find(new BasicDBObject("$and", andList));
        if (headerCursor.size() > 0 && headerCols.isEmpty()) {
            vcfHeader = DBVCFHeader.fromDBObject(headerCursor.next());
            headerCursor.close();
        }
        Map<String /* individual */, List<Comparable /* cell value */>> genotypeRows = new TreeMap<String, List<Comparable>>(
                new AlphaNumericComparator());

        additionalInfoByRun.put(run.getRunName(), run.getAdditionalInfo());
        if (vcfHeader != null)
            additionalInfoDescByRun.put(run.getRunName(), vcfHeader.getmInfoMetaData());

        dataByRun.put(run.getRunName(), genotypeRows);
        for (Integer sample : run.getSampleGenotypes().keySet()) {
            SampleGenotype sg = run.getSampleGenotypes().get(sample);
            List<Comparable> genotypeRow = new ArrayList<Comparable>();
            genotypeRows.put(sampleToIndividualMap.get(sample), genotypeRow);
            genotypeRow.add(sg.getCode());

            for (String gtInfo : sg.getAdditionalInfo().keySet()) {
                if (!headerCols.contains(gtInfo)
                        /* exclude some fields that we don't want to show */ && !gtInfo
                                .equals(VariantData.GT_FIELD_PHASED_GT)
                        && !gtInfo.equals(VariantData.GT_FIELD_PHASED_ID)
                        && !gtInfo.equals(VariantRunData.FIELDNAME_ADDITIONAL_INFO_EFFECT_GENE)
                        && !gtInfo.equals(VariantRunData.FIELDNAME_ADDITIONAL_INFO_EFFECT_NAME)) {
                    headerCols.add(gtInfo);
                    headerColDescs.add(vcfHeader != null
                            ? ((VCFFormatHeaderLine) vcfHeader.getmFormatMetaData().get(gtInfo))
                                    .getDescription()
                            : "");
                }
                if (!headerCols.contains(gtInfo))
                    continue;

                int cellIndex = headerCols.indexOf(gtInfo);
                while (genotypeRow.size() < cellIndex + 2)
                    genotypeRow.add(null);
                genotypeRow.set(cellIndex + 1, sg.getAdditionalInfo().get(gtInfo));
            }
        }
    }

    mav.addObject("headerAdditionalInfo", headerCols);
    mav.addObject("headerAdditionalInfoDesc", headerColDescs);
    mav.addObject("runAdditionalInfo", additionalInfoByRun);
    mav.addObject("runAdditionalInfoDesc", additionalInfoDescByRun);
    mav.addObject("dataByRun", dataByRun);
    mav.addObject("knownAlleles", var.getKnownAlleleList());

    return mav;
}