Example usage for com.google.gson JsonObject addProperty

List of usage examples for com.google.gson JsonObject addProperty

Introduction

In this page you can find the example usage for com.google.gson JsonObject addProperty.

Prototype

public void addProperty(String property, Character value) 

Source Link

Document

Convenience method to add a char member.

Usage

From source file:co.cask.cdap.common.zookeeper.coordination.DiscoverableCodec.java

License:Apache License

@Override
public JsonElement serialize(Discoverable src, Type typeOfSrc, JsonSerializationContext context) {
    JsonObject jsonObj = new JsonObject();
    jsonObj.addProperty("service", src.getName());
    jsonObj.addProperty("hostname", src.getSocketAddress().getHostName());
    jsonObj.addProperty("port", src.getSocketAddress().getPort());
    return jsonObj;
}

From source file:co.cask.cdap.common.zookeeper.coordination.ResourceAssignmentTypeAdapter.java

License:Apache License

@Override
public JsonElement serialize(ResourceAssignment src, Type typeOfSrc, JsonSerializationContext context) {
    JsonObject json = new JsonObject();
    json.addProperty("name", src.getName());

    src.getAssignments().entries();//from w w  w.ja  va2  s  . co m
    JsonArray assignments = new JsonArray();

    for (Map.Entry<Discoverable, PartitionReplica> entry : src.getAssignments().entries()) {
        JsonArray entryJson = new JsonArray();
        entryJson.add(context.serialize(entry.getKey(), Discoverable.class));
        entryJson.add(context.serialize(entry.getValue()));
        assignments.add(entryJson);
    }

    json.add("assignments", assignments);

    return json;
}

From source file:co.cask.cdap.common.zookeeper.coordination.ServiceDiscoveredCodec.java

License:Apache License

@Override
public JsonElement serialize(ServiceDiscovered serviceDiscovered, Type typeOfSrc,
        JsonSerializationContext context) {
    JsonArray object = new JsonArray();
    for (Discoverable discoverable : serviceDiscovered) {
        JsonObject discoverableJson = new JsonObject();
        discoverableJson.addProperty("host", discoverable.getSocketAddress().getHostName());
        discoverableJson.addProperty("port", discoverable.getSocketAddress().getPort());

        object.add(discoverableJson);/*from  w  w  w  .j  a v a2 s.  c o  m*/
    }
    return object;
}

From source file:co.cask.cdap.data.tools.UpgradeTool.java

License:Apache License

private void upgradeMetadataDatasetSpec(MetadataScope scope, DatasetId metadataDatasetId) {
    DatasetSpecification oldMetadataDatasetSpec = datasetInstanceManager.get(metadataDatasetId);
    if (oldMetadataDatasetSpec == null) {
        LOG.info("Metadata Dataset {} not found. No upgrade necessary.", metadataDatasetId);
        return;/*  ww  w  .java 2s . co  m*/
    }
    // Updating the type in the spec using Gson. Doing choosing this option over:
    // 1. Build a new DatasetSpecification using the DatasetSpecification Builder: This seems clean, but because
    // of the namespacing logic in the builder, you would need to change names of the embedded datasets first,
    // leading to unnecessary complex logic for this temporary code.
    // TODO: CDAP-7834: Adding new indexed columns should be supported by IndexedTable.
    // TODO: CDAP-7835: This should be moved out (probably to MetadataService) so it can be run after CDAP starts up.
    Gson gson = new Gson();
    JsonObject jsonObject = gson.toJsonTree(oldMetadataDatasetSpec, DatasetSpecification.class)
            .getAsJsonObject();
    JsonObject metadataDatasetProperties = jsonObject.get("properties").getAsJsonObject();
    metadataDatasetProperties.addProperty("scope", scope.name());
    // change the columnsToIndex since in 4.0 we added 4 more index columns
    JsonObject metadataIndexObject = jsonObject.get("datasetSpecs").getAsJsonObject().get("metadata_index")
            .getAsJsonObject();
    JsonObject properties = metadataIndexObject.get("properties").getAsJsonObject();
    properties.addProperty("columnsToIndex", MetadataDataset.COLUMNS_TO_INDEX);
    properties.addProperty("scope", scope.name());
    JsonObject dProperties = metadataIndexObject.get("datasetSpecs").getAsJsonObject().get("d")
            .getAsJsonObject().get("properties").getAsJsonObject();
    JsonObject iProperties = metadataIndexObject.get("datasetSpecs").getAsJsonObject().get("i")
            .getAsJsonObject().get("properties").getAsJsonObject();
    dProperties.addProperty("columnsToIndex", MetadataDataset.COLUMNS_TO_INDEX);
    dProperties.addProperty("scope", scope.name());
    iProperties.addProperty("columnsToIndex", MetadataDataset.COLUMNS_TO_INDEX);
    iProperties.addProperty("scope", scope.name());

    DatasetSpecification newMetadataDatasetSpec = gson.fromJson(jsonObject, DatasetSpecification.class);
    datasetInstanceManager.delete(metadataDatasetId);
    datasetInstanceManager.add(NamespaceId.SYSTEM, newMetadataDatasetSpec);
    LOG.info("Found old Metadata Dataset Spec {}. Upgraded it to new spec {}.", oldMetadataDatasetSpec,
            newMetadataDatasetSpec);
}

From source file:co.cask.cdap.etl.mock.realtime.StructuredRecordCodec.java

License:Apache License

@Override
public JsonElement serialize(StructuredRecord src, Type typeOfSrc, JsonSerializationContext context) {
    JsonObject obj = new JsonObject();
    try {//w  w w  . j  a  va  2 s.c  o m
        obj.addProperty("record", StructuredRecordStringConverter.toJsonString(src));
        obj.addProperty("schema", src.getSchema().toString());
        return obj;
    } catch (IOException e) {
        throw new RuntimeException(e);
    }
}

From source file:co.cask.cdap.etl.spark.batch.DatasetInfoTypeAdapter.java

License:Apache License

@Override
public JsonElement serialize(DatasetInfo src, Type typeOfSrc, JsonSerializationContext context) {
    JsonObject jsonObj = new JsonObject();
    jsonObj.addProperty("datasetName", src.getDatasetName());
    jsonObj.add("datasetArgs", context.serialize(src.getDatasetArgs()));
    if (src.getSplits() != null && !src.getSplits().isEmpty()) {
        jsonObj.addProperty("datasetSplitClass", src.getSplits().get(0).getClass().getName());
        jsonObj.add("datasetSplits", context.serialize(src.getSplits()));
    }//w ww  . j  a  va2 s  . c  o  m
    return jsonObj;
}

From source file:co.cask.cdap.etl.spark.batch.InputFormatProviderTypeAdapter.java

License:Apache License

@Override
public JsonElement serialize(InputFormatProvider src, Type typeOfSrc, JsonSerializationContext context) {
    JsonObject jsonObj = new JsonObject();
    jsonObj.addProperty("inputFormatClass", src.getInputFormatClassName());
    jsonObj.add("inputFormatConfig", context.serialize(src.getInputFormatConfiguration()));
    return jsonObj;
}

From source file:co.cask.cdap.etl.spark.batch.OutputFormatProviderTypeAdapter.java

License:Apache License

@Override
public JsonElement serialize(OutputFormatProvider src, Type typeOfSrc, JsonSerializationContext context) {
    JsonObject jsonObj = new JsonObject();
    jsonObj.addProperty("outputFormatClass", src.getOutputFormatClassName());
    jsonObj.add("outputFormatConfig", context.serialize(src.getOutputFormatConfiguration()));
    return jsonObj;
}

From source file:co.cask.cdap.explore.executor.AbstractExploreMetadataHttpHandler.java

License:Apache License

protected void handleResponseEndpointExecution(HttpRequest request, HttpResponder responder,
        final EndpointCoreExecution<QueryHandle> execution) throws ExploreException, IOException {
    genericEndpointExecution(request, responder, new EndpointCoreExecution<Void>() {
        @Override//from w ww .ja va  2  s .  co m
        public Void execute(HttpRequest request, HttpResponder responder)
                throws IllegalArgumentException, SQLException, ExploreException, IOException {
            QueryHandle handle = execution.execute(request, responder);
            JsonObject json = new JsonObject();
            json.addProperty("handle", handle.getHandle());
            responder.sendJson(HttpResponseStatus.OK, json);
            return null;
        }
    });
}

From source file:co.cask.cdap.explore.executor.ExploreExecutorHttpHandler.java

License:Apache License

@POST
@Path("streams/{stream}/tables/{table}/enable")
public void enableStream(HttpRequest request, HttpResponder responder,
        @PathParam("namespace-id") String namespaceId, @PathParam("stream") String streamName,
        @PathParam("table") String tableName) throws Exception {
    Id.Stream streamId = Id.Stream.from(namespaceId, streamName);
    try (Reader reader = new InputStreamReader(new ChannelBufferInputStream(request.getContent()))) {
        FormatSpecification format = GSON.fromJson(reader, FormatSpecification.class);
        if (format == null) {
            throw new BadRequestException("Expected format in the body");
        }/*from w w w.  jav a 2  s.c  om*/
        QueryHandle handle = exploreTableManager.enableStream(tableName, streamId, format);
        JsonObject json = new JsonObject();
        json.addProperty("handle", handle.getHandle());
        responder.sendJson(HttpResponseStatus.OK, json);
    } catch (UnsupportedTypeException e) {
        LOG.error("Exception while generating create statement for stream {}", streamName, e);
        responder.sendString(HttpResponseStatus.BAD_REQUEST, e.getMessage());
    }
}