List of usage examples for com.google.gson JsonSerializationContext serialize
public JsonElement serialize(Object src);
From source file:co.cask.cdap.api.dataset.lib.partitioned.ComparableCodec.java
License:Apache License
@Nullable protected JsonElement serializeComparable(@Nullable Comparable comparable, JsonSerializationContext jsonSerializationContext) { if (comparable == null) { return null; }// w w w . j a v a 2s . c o m JsonArray jsonArray = new JsonArray(); jsonArray.add(jsonSerializationContext.serialize(comparable.getClass().getName())); jsonArray.add(jsonSerializationContext.serialize(comparable)); return jsonArray; }
From source file:co.cask.cdap.etl.common.SetMultimapCodec.java
License:Apache License
@Override public JsonElement serialize(SetMultimap<K, V> src, Type typeOfSrc, JsonSerializationContext context) { JsonObject obj = new JsonObject(); obj.add("map", context.serialize(src.asMap())); return obj;/*from w w w . j a v a2 s .co m*/ }
From source file:co.cask.cdap.etl.spark.batch.DatasetInfoTypeAdapter.java
License:Apache License
@Override public JsonElement serialize(DatasetInfo src, Type typeOfSrc, JsonSerializationContext context) { JsonObject jsonObj = new JsonObject(); jsonObj.addProperty("datasetName", src.getDatasetName()); jsonObj.add("datasetArgs", context.serialize(src.getDatasetArgs())); if (src.getSplits() != null && !src.getSplits().isEmpty()) { jsonObj.addProperty("datasetSplitClass", src.getSplits().get(0).getClass().getName()); jsonObj.add("datasetSplits", context.serialize(src.getSplits())); }/*ww w . j a v a 2 s .c om*/ return jsonObj; }
From source file:co.cask.cdap.etl.spark.batch.InputFormatProviderTypeAdapter.java
License:Apache License
@Override public JsonElement serialize(InputFormatProvider src, Type typeOfSrc, JsonSerializationContext context) { JsonObject jsonObj = new JsonObject(); jsonObj.addProperty("inputFormatClass", src.getInputFormatClassName()); jsonObj.add("inputFormatConfig", context.serialize(src.getInputFormatConfiguration())); return jsonObj; }
From source file:co.cask.cdap.etl.spark.batch.OutputFormatProviderTypeAdapter.java
License:Apache License
@Override public JsonElement serialize(OutputFormatProvider src, Type typeOfSrc, JsonSerializationContext context) { JsonObject jsonObj = new JsonObject(); jsonObj.addProperty("outputFormatClass", src.getOutputFormatClassName()); jsonObj.add("outputFormatConfig", context.serialize(src.getOutputFormatConfiguration())); return jsonObj; }
From source file:co.cask.cdap.internal.app.ApplicationSpecificationCodec.java
License:Apache License
@Override public JsonElement serialize(ApplicationSpecification src, Type typeOfSrc, JsonSerializationContext context) { JsonObject jsonObj = new JsonObject(); jsonObj.add("name", new JsonPrimitive(src.getName())); if (src.getConfiguration() != null) { jsonObj.add("configuration", new JsonPrimitive(src.getConfiguration())); }// ww w . java2 s.c om jsonObj.add("artifactId", context.serialize(src.getArtifactId())); jsonObj.add("description", new JsonPrimitive(src.getDescription())); jsonObj.add("streams", serializeMap(src.getStreams(), context, StreamSpecification.class)); jsonObj.add("datasetModules", serializeMap(src.getDatasetModules(), context, String.class)); jsonObj.add("datasetInstances", serializeMap(src.getDatasets(), context, DatasetCreationSpec.class)); jsonObj.add("flows", serializeMap(src.getFlows(), context, FlowSpecification.class)); jsonObj.add("mapReduces", serializeMap(src.getMapReduce(), context, MapReduceSpecification.class)); jsonObj.add("sparks", serializeMap(src.getSpark(), context, SparkSpecification.class)); jsonObj.add("workflows", serializeMap(src.getWorkflows(), context, WorkflowSpecification.class)); jsonObj.add("services", serializeMap(src.getServices(), context, ServiceSpecification.class)); jsonObj.add("schedules", serializeMap(src.getSchedules(), context, ScheduleSpecification.class)); jsonObj.add("workers", serializeMap(src.getWorkers(), context, WorkerSpecification.class)); jsonObj.add("plugins", serializeMap(src.getPlugins(), context, Plugin.class)); return jsonObj; }
From source file:co.cask.cdap.proto.codec.EntityIdTypeAdapter.java
License:Apache License
@Override public JsonElement serialize(EntityId src, Type typeOfSrc, JsonSerializationContext context) { return context.serialize(src); }
From source file:co.cask.cdap.proto.codec.IdTypeAdapter.java
License:Apache License
@Override public JsonElement serialize(Id src, Type typeOfSrc, JsonSerializationContext context) { return context.serialize(src.toEntityId()); }
From source file:co.cask.cdap.proto.codec.MapReduceSpecificationCodec.java
License:Apache License
@Override public JsonElement serialize(MapReduceSpecification src, Type typeOfSrc, JsonSerializationContext context) { JsonObject jsonObj = new JsonObject(); jsonObj.addProperty("className", src.getClassName()); jsonObj.addProperty("name", src.getName()); jsonObj.addProperty("description", src.getDescription()); if (src.getDriverResources() != null) { jsonObj.add("driverResources", context.serialize(src.getDriverResources())); }//from w w w . ja va2 s. com if (src.getMapperResources() != null) { jsonObj.add("mapperResources", context.serialize(src.getMapperResources())); } if (src.getReducerResources() != null) { jsonObj.add("reducerResources", context.serialize(src.getReducerResources())); } if (src.getInputDataSet() != null) { jsonObj.addProperty("inputDataSet", src.getInputDataSet()); } if (src.getOutputDataSet() != null) { jsonObj.addProperty("outputDataSet", src.getOutputDataSet()); } jsonObj.add("datasets", serializeSet(src.getDataSets(), context, String.class)); jsonObj.add("properties", serializeMap(src.getProperties(), context, String.class)); return jsonObj; }
From source file:co.cask.cdap.proto.codec.NamespacedIdCodec.java
License:Apache License
@Override public JsonElement serialize(Id.NamespacedId src, Type typeOfSrc, JsonSerializationContext context) { JsonObject jsonObj = new JsonObject(); jsonObj.add("type", new JsonPrimitive(src.getIdType())); jsonObj.add("id", context.serialize(src)); return jsonObj; }