List of usage examples for com.google.gson JsonElement getAsJsonObject
public JsonObject getAsJsonObject()
From source file:co.cask.cdap.api.dataset.lib.ConditionCodec.java
License:Apache License
@Override public PartitionFilter.Condition deserialize(JsonElement jsonElement, Type type, JsonDeserializationContext deserializationContext) throws JsonParseException { JsonObject jsonObject = jsonElement.getAsJsonObject(); boolean isSingleValue = jsonObject.get("isSingleValue").getAsBoolean(); if (isSingleValue) { return new PartitionFilter.Condition<>(jsonObject.get("fieldName").getAsString(), deserializeComparable(jsonObject.get("lower"), deserializationContext)); } else {//from w w w . j ava 2 s. com return new PartitionFilter.Condition<>(jsonObject.get("fieldName").getAsString(), deserializeComparable(jsonObject.get("lower"), deserializationContext), deserializeComparable(jsonObject.get("upper"), deserializationContext)); } }
From source file:co.cask.cdap.api.dataset.lib.partitioned.PartitionKeyCodec.java
License:Apache License
@Override public PartitionKey deserialize(JsonElement jsonElement, Type type, JsonDeserializationContext jsonDeserializationContext) throws JsonParseException { JsonObject jsonObject = jsonElement.getAsJsonObject(); PartitionKey.Builder builder = PartitionKey.builder(); for (Map.Entry<String, JsonElement> entry : jsonObject.entrySet()) { JsonArray jsonArray = entry.getValue().getAsJsonArray(); builder.addField(entry.getKey(), deserializeComparable(jsonArray, jsonDeserializationContext)); }//from ww w . java 2s .c om return builder.build(); }
From source file:co.cask.cdap.common.conf.PluginClassDeserializer.java
License:Apache License
@Override public PluginClass deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException { if (!json.isJsonObject()) { throw new JsonParseException("PluginClass should be a JSON Object"); }//from ww w . j av a2 s.com JsonObject jsonObj = json.getAsJsonObject(); String type = jsonObj.has("type") ? jsonObj.get("type").getAsString() : Plugin.DEFAULT_TYPE; String name = getRequired(jsonObj, "name").getAsString(); String description = jsonObj.has("description") ? jsonObj.get("description").getAsString() : ""; String className = getRequired(jsonObj, "className").getAsString(); Set<String> endpointsSet = new HashSet<>(); if (jsonObj.has("endpoints")) { endpointsSet = context.deserialize(jsonObj.get("endpoints"), ENDPOINTS_TYPE); } Map<String, PluginPropertyField> properties = jsonObj.has("properties") ? context.<Map<String, PluginPropertyField>>deserialize(jsonObj.get("properties"), PROPERTIES_TYPE) : ImmutableMap.<String, PluginPropertyField>of(); return new PluginClass(type, name, description, className, null, properties, endpointsSet); }
From source file:co.cask.cdap.common.zookeeper.coordination.DiscoverableCodec.java
License:Apache License
@Override public Discoverable deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException { JsonObject jsonObj = json.getAsJsonObject(); final String service = jsonObj.get("service").getAsString(); String hostname = jsonObj.get("hostname").getAsString(); int port = jsonObj.get("port").getAsInt(); final InetSocketAddress address = new InetSocketAddress(hostname, port); return new Discoverable() { @Override/*www. j av a 2 s . com*/ public String getName() { return service; } @Override public InetSocketAddress getSocketAddress() { return address; } }; }
From source file:co.cask.cdap.common.zookeeper.coordination.ResourceAssignmentTypeAdapter.java
License:Apache License
@Override public ResourceAssignment deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException { if (!json.isJsonObject()) { throw new JsonParseException("Expect a json object, got " + json); }// ww w . j a va2s.co m JsonObject jsonObj = json.getAsJsonObject(); String name = jsonObj.get("name").getAsString(); Multimap<Discoverable, PartitionReplica> assignments = TreeMultimap .create(DiscoverableComparator.COMPARATOR, PartitionReplica.COMPARATOR); JsonArray assignmentsJson = context.deserialize(jsonObj.get("assignments"), JsonArray.class); for (JsonElement element : assignmentsJson) { if (!element.isJsonArray()) { throw new JsonParseException("Expect a json array, got " + element); } JsonArray entryJson = element.getAsJsonArray(); if (entryJson.size() != 2) { throw new JsonParseException("Expect json array of size = 2, got " + entryJson.size()); } Discoverable key = context.deserialize(entryJson.get(0), Discoverable.class); PartitionReplica value = context.deserialize(entryJson.get(1), PartitionReplica.class); assignments.put(key, value); } return new ResourceAssignment(name, assignments); }
From source file:co.cask.cdap.etl.common.SetMultimapCodec.java
License:Apache License
@Override public SetMultimap<K, V> deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException { JsonObject obj = json.getAsJsonObject(); Map<K, Collection<V>> map = context.deserialize(obj.get("map"), mapType); SetMultimap<K, V> multimap = HashMultimap.create(); for (Map.Entry<K, Collection<V>> entry : map.entrySet()) { multimap.putAll(entry.getKey(), entry.getValue()); }/* w w w . j a v a2s . c om*/ return multimap; }
From source file:co.cask.cdap.etl.mock.realtime.StructuredRecordCodec.java
License:Apache License
@Override public StructuredRecord deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException { JsonObject obj = json.getAsJsonObject(); try {// w ww.ja va2 s .c o m Schema schema = Schema.parseJson(obj.get("schema").getAsString()); return StructuredRecordStringConverter.fromJsonString(obj.get("record").getAsString(), schema); } catch (IOException e) { throw new RuntimeException(e); } }
From source file:co.cask.cdap.etl.spark.batch.DatasetInfoTypeAdapter.java
License:Apache License
@Override public DatasetInfo deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException { JsonObject obj = json.getAsJsonObject(); String datasetName = obj.get("datasetName").getAsString(); Map<String, String> datasetArgs = context.deserialize(obj.get("datasetArgs"), mapType); if (obj.get("datasetSplitClass") == null) { return new DatasetInfo(datasetName, datasetArgs, null); }//from www .j a v a2 s .c om String datasetSplitClass = obj.get("datasetSplitClass").getAsString(); ClassLoader classLoader = Objects.firstNonNull(Thread.currentThread().getContextClassLoader(), SparkBatchSourceFactory.class.getClassLoader()); try { Class<?> splitClass = classLoader.loadClass(datasetSplitClass); List<Split> splits = context.deserialize(obj.get("datasetSplits"), getListType(splitClass)); return new DatasetInfo(datasetName, datasetArgs, splits); } catch (ClassNotFoundException e) { throw new JsonParseException("Unable to deserialize splits", e); } }
From source file:co.cask.cdap.etl.spark.batch.InputFormatProviderTypeAdapter.java
License:Apache License
@Override public InputFormatProvider deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException { JsonObject obj = json.getAsJsonObject(); // if inputFormat is not present, return empty InputFormatProvider if (obj.get("inputFormatClass") == null) { return new SparkBatchSourceFactory.BasicInputFormatProvider(); }/*from www . j av a 2 s.c o m*/ String className = obj.get("inputFormatClass").getAsString(); Map<String, String> conf = context.deserialize(obj.get("inputFormatConfig"), mapType); return new SparkBatchSourceFactory.BasicInputFormatProvider(className, conf); }
From source file:co.cask.cdap.etl.spark.batch.OutputFormatProviderTypeAdapter.java
License:Apache License
@Override public OutputFormatProvider deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException { JsonObject obj = json.getAsJsonObject(); // if outputformat is not present, return empty OutputFormatProvider if (obj.get("outputFormatClass") == null) { return new SparkBatchSinkFactory.BasicOutputFormatProvider(); }/*from w ww . j a v a2s . com*/ String className = obj.get("outputFormatClass").getAsString(); Map<String, String> conf = context.deserialize(obj.get("outputFormatConfig"), mapType); return new SparkBatchSinkFactory.BasicOutputFormatProvider(className, conf); }