Example usage for com.mongodb.client.model Filters and

List of usage examples for com.mongodb.client.model Filters and

Introduction

In this page you can find the example usage for com.mongodb.client.model Filters and.

Prototype

public static Bson and(final Bson... filters) 

Source Link

Document

Creates a filter that performs a logical AND of the provided list of filters.

Usage

From source file:io.sip3.tapir.twig.mongo.query.SipSearchQuery.java

License:Apache License

@Override
public Bson filter() {
    List<Bson> filters = Stream.of(between(), filter("caller", caller), filter("callee", callee))
            .filter(Objects::nonNull).collect(Collectors.toList());

    return Filters.and(filters);
}

From source file:it.av.fac.webserver.handlers.WikiPageFetcher.java

/**
 * TODO: Add more query functionalities.
 *
 * @param page//ww w.j  a v  a2  s  .c om
 * @return
 */
public JSONArray fetchPage(String page) {
    JSONArray ret = new JSONArray();

    List<Bson> filters = new ArrayList<>();
    filters.add(Filters.eq("_id", page));

    FindIterable<Document> documents = this.collection.find(Filters.and(filters));

    documents.forEach(new Consumer<Document>() {
        @Override
        public void accept(Document doc) {
            ret.put(new JSONObject(doc.toJson()));
        }
    });
    return ret;
}

From source file:module.script.AddSeriesToSamples.java

License:Open Source License

public AddSeriesToSamples() {

    // ===== Service =====
    FormatService formatService = new FormatService();

    // ===== Session Mongo =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");

    Set<String> setProjects = new HashSet<String>();

    MongoCollection<Document> collection = db.getCollection("sample");

    Bson filters = Filters.and(Filters.in("series", "PRJNA270632"));

    List<Document> listDocuments = collection.find(filters).into(new ArrayList<Document>());

    for (int i = 0; i < listDocuments.size(); i++) {

        Document doc = listDocuments.get(i);
        Document expgroup = doc.get("exp_group", Document.class);

        if (expgroup.get("exp_Mcount") != null) {

            List<String> projects = doc.get("series", ArrayList.class);
            setProjects.clear();/*from   ww  w.ja va  2 s. co  m*/
            setProjects.addAll(projects);
            setProjects.add("TISSUE_SPECIFIC_GENES_HS");
            doc.put("series", setProjects);
            System.out.println(doc.getString("_id") + " " + projects + " -> " + setProjects);

            collection.updateOne(Filters.eq("_id", doc.getString("_id")), new Document("$set", doc));
        }

    }

    mongoClient.close();
}

From source file:module.script.CorrectImportedData.java

License:Open Source License

public CorrectImportedData() {

    // ===== Service =====
    FormatService formatService = new FormatService();

    // ===== Session Mongo =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");

    MongoCollection<Document> collection = db.getCollection("sample");

    Bson filters = Filters.and(Filters.eq("main_gse_number", gseNumber));

    List<Document> listDocuments = collection.find(filters).into(new ArrayList<Document>());

    for (int i = 0; i < listDocuments.size(); i++) {

        Document doc = listDocuments.get(i);
        Document expgroup = (Document) doc.get("exp_group");
        Document parameters = (Document) doc.get("parameters");

        expgroup.append("id_tissue_stage", 2);
        expgroup.append("tissue_stage", "fetal");

        // Update Mongo document
        doc.put("exp_group", expgroup);
        // doc.put("parameters", parameters);
        doc.put("analyzed", true);

        System.out.println(expgroup);

        collection.updateOne(Filters.eq("_id", doc.getString("_id")), new Document("$set", doc));
    }//from www  .j  a va 2 s  .  c  om

    mongoClient.close();
}

From source file:module.test.CustomExport.java

License:Open Source License

public CustomExport() {

    // ===== Connection =====

    MongoClient mongoClient = MongoUtil.buildMongoClient();
    MongoDatabase db = mongoClient.getDatabase("epimed_experiments");

    MongoCollection<Document> collection = db.getCollection("samples");

    // ===== Find exp_group in the database =====

    // === Query 1 ===
    /*//  w  ww . ja  v  a  2  s. co m
    String queryName = "breast_cancer_GPL570";
    List<Bson> filters = new ArrayList<Bson>();
    filters.add(Filters.eq("exp_group.id_platform", "GPL570"));
    filters.add(Filters.eq("exp_group.id_topology_group", "C50"));
    filters.add(Filters.eq("exp_group.id_tissue_status", 3)); // tumoral
     */

    // === Query 2 ===
    /*
    String queryName = "breast_normal_GPL570";
    List<Bson> filters = new ArrayList<Bson>();
    filters.add(Filters.eq("exp_group.id_platform", "GPL570"));
    filters.add(Filters.eq("exp_group.id_topology_group", "C50"));
    filters.add(Filters.eq("exp_group.id_tissue_status", 1)); // normal
    */

    // === Query 3 ===
    String queryName = "breast_cancer_with_survival_GPL570";
    List<Bson> filters = new ArrayList<Bson>();
    filters.add(Filters.eq("exp_group.id_platform", "GPL570"));
    filters.add(Filters.eq("exp_group.id_topology_group", "C50"));
    filters.add(Filters.eq("exp_group.id_tissue_status", 3)); // tumoral
    filters.add(Filters.or(Filters.ne("exp_group.os_months", null), Filters.ne("exp_group.dfss_months", null),
            Filters.ne("exp_group.relapsed", null), Filters.ne("exp_group.dead", null)));

    Bson filter = Filters.and(filters);
    Long nbSamples = collection.count(filter);
    List<String> listSeries = collection.distinct("exp_group.main_gse_number", filter, String.class)
            .into(new ArrayList<String>());
    queryName = queryName + "_" + nbSamples + "_samples_" + listSeries.size() + "_series";

    List<Document> docExpGroup = collection.find(filter)
            .projection(Projections.fields(Projections.include("exp_group"), Projections.excludeId()))
            .into(new ArrayList<Document>());

    List<Document> docParam = collection.find(filter)
            .projection(Projections.fields(Projections.include("parameters"), Projections.excludeId()))
            .into(new ArrayList<Document>());

    mongoClient.close();

    // ===== Load Exp Group into a matrix =====

    List<String> headerExpGroup = new ArrayList<String>();
    List<Object> dataExpGroup = new ArrayList<Object>();

    for (int i = 0; i < docExpGroup.size(); i++) {
        Map<String, String> expGroup = (Map<String, String>) docExpGroup.get(i).get("exp_group");
        if (i == 0) {
            headerExpGroup.addAll(expGroup.keySet());
        }

        Object[] dataLine = new Object[headerExpGroup.size()];
        for (int j = 0; j < headerExpGroup.size(); j++) {
            dataLine[j] = expGroup.get(headerExpGroup.get(j));
        }
        dataExpGroup.add(dataLine);
    }

    // ===== Load Params into a matrix =====

    Set<String> headerParamSet = new HashSet<String>();
    List<String> headerParam = new ArrayList<String>();
    List<Object> dataParam = new ArrayList<Object>();

    for (int i = 0; i < docParam.size(); i++) {
        Map<String, String> param = (Map<String, String>) docParam.get(i).get("parameters");
        headerParamSet.addAll(param.keySet());
    }
    headerParam.addAll(headerParamSet);
    Collections.sort(headerParam);

    for (int i = 0; i < docParam.size(); i++) {
        Map<String, String> param = (Map<String, String>) docParam.get(i).get("parameters");
        Object[] dataLine = new Object[headerParam.size()];
        for (int j = 0; j < headerParam.size(); j++) {
            dataLine[j] = param.get(headerParam.get(j));
        }
        // System.out.println(Arrays.toString(dataLine));
        dataParam.add(dataLine);

    }

    // === Output ===

    String fileName = this.getOutputDirectory() + this.getDirSeparator() + "EpiMed_database_" + queryName + "_"
            + dateFormat.format(new Date()) + ".xlsx";
    System.out.println(fileName);
    XSSFWorkbook workbook = fileService.createWorkbook();
    fileService.addSheet(workbook, "exp_group_" + dateFormat.format(new Date()), headerExpGroup, dataExpGroup);
    fileService.addSheet(workbook, "parameters_" + dateFormat.format(new Date()), headerParam, dataParam);
    fileService.writeWorkbook(workbook, fileName);

}

From source file:mongodb.clients.percunia.mongo.Criteria.java

License:Apache License

public Bson getRestrictions() {
    return Filters.and(restrictions);
}

From source file:mongodb.clients.percunia.mongo.Restriction.java

License:Apache License

public static Bson and(Bson... restrictions) {
    return Filters.and(restrictions);
}

From source file:org.apache.rya.indexing.entity.storage.mongo.MongoEntityStorage.java

License:Apache License

@Override
public void update(final Entity old, final Entity updated) throws StaleUpdateException, EntityStorageException {
    requireNonNull(old);//  w  w w  . j a v  a 2 s  .  c om
    requireNonNull(updated);

    // The updated entity must have the same Subject as the one it is replacing.
    if (!old.getSubject().equals(updated.getSubject())) {
        throw new EntityStorageException(
                "The old Entity and the updated Entity must have the same Subject. " + "Old Subject: "
                        + old.getSubject().getData() + ", Updated Subject: " + updated.getSubject().getData());
    }

    // Make sure the updated Entity has a higher verison.
    if (old.getVersion() >= updated.getVersion()) {
        throw new EntityStorageException(
                "The old Entity's version must be less than the updated Entity's version." + " Old version: "
                        + old.getVersion() + " Updated version: " + updated.getVersion());
    }

    final Set<Bson> filters = new HashSet<>();

    // Must match the old entity's Subject.
    filters.add(makeSubjectFilter(old.getSubject()));

    // Must match the old entity's Version.
    filters.add(makeVersionFilter(old.getVersion()));

    // Do a find and replace.
    final Bson oldEntityFilter = Filters.and(filters);
    final Document updatedDoc = ENTITY_CONVERTER.toDocument(updated);

    final MongoCollection<Document> collection = mongo.getDatabase(ryaInstanceName)
            .getCollection(COLLECTION_NAME);
    if (collection.findOneAndReplace(oldEntityFilter, updatedDoc) == null) {
        throw new StaleUpdateException(
                "Could not update the Entity with Subject '" + updated.getSubject().getData() + ".");
    }
}

From source file:org.apache.rya.indexing.entity.storage.mongo.MongoEntityStorage.java

License:Apache License

@Override
public ConvertingCursor<TypedEntity> search(final Optional<RyaURI> subject, final Type type,
        final Set<Property> properties) throws EntityStorageException {
    requireNonNull(type);/*from www.  ja  va2 s  .  c o  m*/
    requireNonNull(properties);

    try {
        // Match the specified Property values.
        final Set<Bson> filters = properties.stream()
                .flatMap(property -> makePropertyFilters(type.getId(), property)).collect(Collectors.toSet());

        // Only match explicitly Typed entities.
        filters.add(makeExplicitTypeFilter(type.getId()));

        // Get a cursor over the Mongo Document that represent the search results.
        final MongoCursor<Document> cursor = mongo.getDatabase(ryaInstanceName).getCollection(COLLECTION_NAME)
                .find(Filters.and(filters)).iterator();

        // Define that Converter that converts from Document into TypedEntity.
        final Converter<TypedEntity> converter = document -> {
            try {
                final Entity entity = ENTITY_CONVERTER.fromDocument(document);
                final Optional<TypedEntity> typedEntity = entity.makeTypedEntity(type.getId());
                if (!typedEntity.isPresent()) {
                    throw new RuntimeException("Entity with Subject '" + entity.getSubject()
                            + "' could not be cast into Type '" + type.getId() + "'.");
                }
                return typedEntity.get();

            } catch (final DocumentConverterException e) {
                throw new RuntimeException("Document '" + document + "' could not be parsed into an Entity.",
                        e);
            }
        };

        // Return a cursor that performs the conversion.
        return new ConvertingCursor<TypedEntity>(converter, cursor);

    } catch (final MongoException e) {
        throw new EntityStorageException("Could not search Entity.", e);
    }
}

From source file:org.apache.rya.indexing.geotemporal.mongo.MongoEventStorage.java

License:Apache License

@Override
public void update(final Event old, final Event updated) throws StaleUpdateException, EventStorageException {
    requireNonNull(old);/*from  w  w  w. ja  va  2 s.c o m*/
    requireNonNull(updated);

    // The updated entity must have the same Subject as the one it is replacing.
    if (!old.getSubject().equals(updated.getSubject())) {
        throw new EventStorageException(
                "The old Event and the updated Event must have the same Subject. " + "Old Subject: "
                        + old.getSubject().getData() + ", Updated Subject: " + updated.getSubject().getData());
    }

    final Set<Bson> filters = new HashSet<>();

    // Must match the old entity's Subject.
    filters.add(makeSubjectFilter(old.getSubject()));

    // Do a find and replace.
    final Bson oldEntityFilter = Filters.and(filters);
    final Document updatedDoc = EVENT_CONVERTER.toDocument(updated);

    final MongoCollection<Document> collection = mongo.getDatabase(ryaInstanceName)
            .getCollection(COLLECTION_NAME);
    if (collection.findOneAndReplace(oldEntityFilter, updatedDoc) == null) {
        throw new StaleUpdateException(
                "Could not update the Event with Subject '" + updated.getSubject().getData() + ".");
    }
}