Example usage for io.vertx.core.json JsonObject toString

List of usage examples for io.vertx.core.json JsonObject toString

Introduction

In this page you can find the example usage for io.vertx.core.json JsonObject toString.

Prototype

@Override
    public String toString() 

Source Link

Usage

From source file:org.entcore.common.storage.impl.GridfsStorage.java

License:Open Source License

private void writeBuffer(String id, Buffer buff, Long maxSize, String contentType, String filename,
        final JsonObject m, Handler<JsonObject> handler) {
    JsonObject save = new JsonObject();
    save.put("action", "save");
    save.put("content-type", contentType);
    save.put("filename", filename);
    if (id != null && !id.trim().isEmpty()) {
        save.put("_id", id);
    }//ww  w . j  av a  2 s . c o  m
    final JsonObject metadata = (m != null) ? m
            : new JsonObject().put("content-type", contentType).put("filename", filename);
    if (metadata.getLong("size", 0l).equals(0l)) {
        metadata.put("size", buff.length());
    }
    if (maxSize != null && maxSize < metadata.getLong("size", 0l)) {
        handler.handle(new JsonObject().put("status", "error").put("message", "file.too.large"));
        return;
    }
    byte[] header = null;
    try {
        header = save.toString().getBytes("UTF-8");
    } catch (UnsupportedEncodingException e) {
        JsonObject json = new JsonObject().put("status", "error").put("message", e.getMessage());
        handler.handle(json);
    }
    if (header != null) {
        buff.appendBytes(header).appendInt(header.length);
        eb.send(gridfsAddress, buff, handlerToAsyncHandler(new Handler<Message<JsonObject>>() {
            @Override
            public void handle(Message<JsonObject> message) {
                handler.handle(message.body().put("metadata", metadata));
            }
        }));
    }
}

From source file:org.entcore.common.storage.impl.GridfsStorage.java

License:Open Source License

public void saveChunk(String id, Buffer buff, int n, String contentType, String filename, long fileSize,
        final Handler<JsonObject> handler) {
    JsonObject save = new JsonObject();
    save.put("action", "saveChunk");
    save.put("content-type", contentType);
    save.put("filename", filename);
    save.put("_id", id);
    save.put("n", n);
    save.put("length", fileSize);

    byte[] header = null;
    try {/*from  w  w  w  .  j  a  va  2  s  .  c o m*/
        header = save.toString().getBytes("UTF-8");
    } catch (UnsupportedEncodingException e) {
        JsonObject json = new JsonObject().put("status", "error").put("message", e.getMessage());
        handler.handle(json);
    }
    if (header != null) {
        buff.appendBytes(header).appendInt(header.length);
        eb.send(gridfsAddress, buff, handlerToAsyncHandler(new Handler<Message<JsonObject>>() {
            @Override
            public void handle(Message<JsonObject> event) {
                handler.handle(event.body());
            }
        }));
    }
}

From source file:org.entcore.common.storage.impl.GridfsStorage.java

License:Open Source License

@Override
public void readFile(String id, Handler<Buffer> handler) {
    JsonObject find = new JsonObject();
    find.put("action", "findone");
    find.put("query", new JsonObject("{ \"_id\": \"" + id + "\"}"));
    byte[] header = null;
    try {//  w ww .  j  ava  2 s  . c om
        header = find.toString().getBytes("UTF-8");
    } catch (UnsupportedEncodingException e) {
        handler.handle(Buffer.buffer());
    }
    if (header != null) {
        Buffer buf = Buffer.buffer(header);
        buf.appendInt(header.length);
        eb.send(gridfsAddress, buf, new Handler<AsyncResult<Message<Buffer>>>() {
            @Override
            public void handle(AsyncResult<Message<Buffer>> res) {
                if (res.succeeded()) {
                    handler.handle(res.result().body());
                } else {
                    handler.handle(null);
                }
            }
        });
    }
}

From source file:org.entcore.common.storage.impl.GridfsStorage.java

License:Open Source License

private static void gridfsReadChunkFile(final String id, final EventBus eb, final String gridfsAddress,
        final WriteStream writeStream, final Handler<Chunk> handler) {
    JsonObject find = new JsonObject();
    find.put("action", "countChunks");
    find.put("files_id", id);
    byte[] header = null;
    try {//from   w  w w .j a v a  2s  . c o m
        header = find.toString().getBytes("UTF-8");
    } catch (UnsupportedEncodingException e) {
        log.error(e.getMessage(), e);
        handler.handle(null);
    }
    if (header != null) {
        Buffer buf = Buffer.buffer(header);
        buf.appendInt(header.length);
        eb.send(gridfsAddress, buf, new Handler<AsyncResult<Message<Object>>>() {
            @Override
            public void handle(AsyncResult<Message<Object>> res) {
                if (res.succeeded() && res.result().body() instanceof Long) {
                    Long number = (Long) res.result().body();
                    if (number == null || number == 0l) {
                        handler.handle(null);
                    } else {
                        final Handler[] handlers = new Handler[number.intValue()];
                        handlers[handlers.length - 1] = new Handler<Chunk>() {
                            @Override
                            public void handle(Chunk chunk) {
                                handler.handle(chunk);
                                handler.handle(new Chunk(-1, null));
                            }
                        };
                        for (int i = number.intValue() - 2; i >= 0; i--) {
                            final int j = i;
                            handlers[i] = new Handler<Chunk>() {
                                @Override
                                public void handle(final Chunk chunk) {
                                    if (writeStream != null && writeStream.writeQueueFull()) {
                                        writeStream.drainHandler(new Handler<Void>() {
                                            @Override
                                            public void handle(Void event) {
                                                log.debug("in drain handler");
                                                writeStream.drainHandler(null);
                                                handler.handle(chunk);
                                                getChunk(id, j + 1, eb, gridfsAddress, new Handler<Chunk>() {
                                                    @Override
                                                    public void handle(Chunk res) {
                                                        handlers[j + 1].handle(res);
                                                    }
                                                });
                                            }
                                        });
                                    } else {
                                        handler.handle(chunk);
                                        getChunk(id, j + 1, eb, gridfsAddress, new Handler<Chunk>() {
                                            @Override
                                            public void handle(Chunk res) {
                                                handlers[j + 1].handle(res);
                                            }
                                        });
                                    }
                                }
                            };
                        }
                        getChunk(id, 0, eb, gridfsAddress, handlers[0]);
                    }
                } else {
                    handler.handle(null);
                }
            }
        });
    }
}

From source file:org.entcore.common.storage.impl.GridfsStorage.java

License:Open Source License

public static void getChunk(String id, final int j, EventBus eb, String gridfsAddress,
        final Handler<Chunk> handler) {
    JsonObject find = new JsonObject();
    find.put("action", "getChunk");
    find.put("files_id", id);
    find.put("n", j);
    byte[] header = null;
    try {/* w  w w .  ja  v  a 2 s. c  o  m*/
        header = find.toString().getBytes("UTF-8");
    } catch (UnsupportedEncodingException e) {
        handler.handle(null);
    }
    Buffer buf = Buffer.buffer(header);
    buf.appendInt(header.length);
    eb.send(gridfsAddress, buf, new Handler<AsyncResult<Message<Buffer>>>() {
        @Override
        public void handle(AsyncResult<Message<Buffer>> res) {
            if (res.succeeded()) {
                handler.handle(new Chunk(j, res.result().body()));
            } else {
                handler.handle(null);

            }
        }
    });
}

From source file:org.entcore.common.storage.impl.GridfsStorage.java

License:Open Source License

@Override
public void removeFile(String id, Handler<JsonObject> handler) {
    JsonArray ids = new fr.wseduc.webutils.collections.JsonArray().add(id);
    JsonObject find = new JsonObject();
    find.put("action", "remove");
    JsonObject query = new JsonObject();
    if (ids != null && ids.size() == 1) {
        query.put("_id", ids.getString(0));
    } else {/*from  ww  w . ja  v a 2s  .  c o  m*/
        query.put("_id", new JsonObject().put("$in", ids));
    }
    find.put("query", query);
    byte[] header = null;
    try {
        header = find.toString().getBytes("UTF-8");
    } catch (UnsupportedEncodingException e) {
        handler.handle(new JsonObject().put("status", "error"));
    }
    if (header != null) {
        Buffer buf = Buffer.buffer(header);
        buf.appendInt(header.length);
        eb.send(gridfsAddress, buf, handlerToAsyncHandler(new Handler<Message<JsonObject>>() {
            @Override
            public void handle(Message<JsonObject> res) {
                if (handler != null) {
                    handler.handle(res.body());
                }
            }
        }));
    }
}

From source file:org.entcore.common.storage.impl.GridfsStorage.java

License:Open Source License

@Override
public void copyFile(String id, Handler<JsonObject> handler) {
    JsonObject find = new JsonObject();
    find.put("action", "copy");
    find.put("query", new JsonObject("{ \"_id\": \"" + id + "\"}"));
    byte[] header = null;
    try {/*from  ww w. ja  va  2  s .  c  o m*/
        header = find.toString().getBytes("UTF-8");
    } catch (UnsupportedEncodingException e) {
        handler.handle(new JsonObject().put("status", "error"));
    }
    if (header != null) {
        Buffer buf = Buffer.buffer(header);
        buf.appendInt(header.length);
        eb.send(gridfsAddress, buf, handlerToAsyncHandler(new Handler<Message<JsonObject>>() {
            @Override
            public void handle(Message<JsonObject> res) {
                handler.handle(res.body());
            }
        }));
    }
}

From source file:org.entcore.timeline.services.impl.DefaultTimelineMailerService.java

License:Open Source License

private void getRecipientsUsers(Date from, final Handler<JsonArray> handler) {
    final JsonObject aggregation = new JsonObject();
    JsonArray pipeline = new fr.wseduc.webutils.collections.JsonArray();
    aggregation.put("aggregate", "timeline").put("allowDiskUse", true).put("pipeline", pipeline);

    JsonObject matcher = MongoQueryBuilder.build(QueryBuilder.start("date").greaterThanEquals(from));
    JsonObject grouper = new JsonObject(
            "{ \"_id\" : \"notifiedUsers\", \"recipients\" : {\"$addToSet\" : \"$recipients.userId\"}}");

    pipeline.add(new JsonObject().put("$match", matcher));
    pipeline.add(new JsonObject().put("$unwind", "$recipients"));
    pipeline.add(new JsonObject().put("$group", grouper));

    mongo.command(aggregation.toString(), new Handler<Message<JsonObject>>() {
        @Override/*  w w w .  j  a  v  a 2 s . c  o m*/
        public void handle(Message<JsonObject> event) {
            if ("error".equals(event.body().getString("status", "error"))) {
                handler.handle(new fr.wseduc.webutils.collections.JsonArray());
            } else {
                JsonArray r = event.body().getJsonObject("result", new JsonObject()).getJsonArray("result");
                if (r != null && r.size() > 0) {
                    handler.handle(r.getJsonObject(0).getJsonArray("recipients",
                            new fr.wseduc.webutils.collections.JsonArray()));
                } else {
                    handler.handle(new fr.wseduc.webutils.collections.JsonArray());
                }
            }
        }

    });
}

From source file:org.entcore.timeline.services.impl.DefaultTimelineMailerService.java

License:Open Source License

/**
 * Retrieves an aggregated list of notifications from mongodb for a single user.
 *
 *  Notifications are grouped by type & event-type.
 * @param userId : Userid//from  w  ww.  j  av a  2 s .  co  m
 * @param from : Starting date in the past
 * @param handler: Handles the notifications
 */
private void getAggregatedUserNotifications(String userId, Date from, final Handler<JsonArray> handler) {
    final JsonObject aggregation = new JsonObject();
    JsonArray pipeline = new fr.wseduc.webutils.collections.JsonArray();
    aggregation.put("aggregate", "timeline").put("allowDiskUse", true).put("pipeline", pipeline);

    JsonObject matcher = MongoQueryBuilder.build(QueryBuilder.start("recipients")
            .elemMatch(QueryBuilder.start("userId").is(userId).get()).and("date").greaterThanEquals(from));
    JsonObject grouper = new JsonObject(
            "{ \"_id\" : { \"type\": \"$type\", \"event-type\": \"$event-type\"}, \"count\": { \"$sum\": 1 } }");
    JsonObject transformer = new JsonObject(
            "{ \"type\": \"$_id.type\", \"event-type\": \"$_id.event-type\", \"count\": 1, \"_id\": 0 }");

    pipeline.add(new JsonObject().put("$match", matcher));
    pipeline.add(new JsonObject().put("$group", grouper));
    pipeline.add(new JsonObject().put("$project", transformer));

    mongo.command(aggregation.toString(), new Handler<Message<JsonObject>>() {
        @Override
        public void handle(Message<JsonObject> event) {
            if ("error".equals(event.body().getString("status", "error"))) {
                handler.handle(new fr.wseduc.webutils.collections.JsonArray());
            } else {
                handler.handle(event.body().getJsonObject("result", new JsonObject()).getJsonArray("result",
                        new fr.wseduc.webutils.collections.JsonArray()));
            }
        }

    });
}

From source file:org.gooru.nucleus.handlers.resources.processors.repositories.activejdbc.dbhandlers.DBHelper.java

static JsonObject getCopiesOfAResource(AJEntityResource resource, String originalResourceId) {
    JsonObject returnValue = null;

    setPGObject(resource, AJEntityResource.CONTENT_FORMAT, AJEntityResource.CONTENT_FORMAT_TYPE,
            AJEntityResource.VALID_CONTENT_FORMAT_FOR_RESOURCE);

    LazyList<AJEntityResource> result = AJEntityResource.findBySQL(AJEntityResource.SQL_GETCOPIESOFARESOURCE,
            AJEntityResource.VALID_CONTENT_FORMAT_FOR_RESOURCE, originalResourceId);
    if (result.size() > 0) {
        JsonArray idArray = new JsonArray();
        JsonArray collectionIdArray = new JsonArray();
        String collectionId;/*from   w  ww.  j  a v  a 2  s  .  c o m*/
        for (AJEntityResource model : result) {
            idArray.add(model.get(AJEntityResource.RESOURCE_ID).toString());
            collectionId = model.getString(AJEntityResource.COLLECTION_ID);
            if (collectionId != null && !collectionId.isEmpty()) {
                collectionIdArray.add(collectionId);
            }
        }
        returnValue = new JsonObject().put("resource_copy_ids", idArray)
                .put(AJEntityResource.COLLECTION_ID, collectionIdArray).put("id", originalResourceId);
        LOGGER.debug("getCopiesOfAResource ! : {} ", returnValue.toString());
    }
    return returnValue;
}