List of usage examples for com.google.gson JsonArray JsonArray
public JsonArray()
From source file:ch.iterate.openstack.swift.Client.java
License:Open Source License
/** * @param container The name of the container * @param name The name of the object * @param entity The name of the request entity (make sure to set the Content-Type * @param metadata The metadata for the object * @param md5sum The 32 character hex encoded MD5 sum of the data * @param objectSize The total size in bytes of the object to be stored * @param segmentSize Optional size in bytes of the object segments to be stored (forces large object support) default 4G * @param dynamicLargeObject Optional setting to use dynamic large objects, False/null will use static large objects if required * @param segmentContainer Optional name of container to store file segments, defaults to storing chunks in the same container as the file sill appear * @param segmentFolder Optional name of folder for storing file segments, defaults to ".chunks/" * @param leaveSegments Optional setting to leave segments of large objects in place when the manifest is overwrtten/changed * @return The ETAG if the save was successful, null otherwise * @throws GenericException There was a protocol level error talking to CloudFiles *//*www .j a va 2 s .com*/ public String storeObject(Region region, String container, String name, HttpEntity entity, Map<String, String> metadata, String md5sum, Long objectSize, Long segmentSize, Boolean dynamicLargeObject, String segmentContainer, String segmentFolder, Boolean leaveSegments) throws IOException, InterruptedException { /* * Default values for large object support. We also use the defaults combined with the inputs * to determine whether to store as a large object. */ /* * The maximum size of a single object (5GiB). */ long singleObjectSizeLimit = (long) (5 * Math.pow(1024, 3)); /* * The default minimum segment size (1MiB). */ long minSegmentSize = 1024L * 1024L; /* * Set the segment size. * * Defaults to 4GiB segments, and will not permit smaller than 1MiB segments. */ long actualSegmentSize = (segmentSize == null) ? (long) (4 * Math.pow(1024, 3)) : Math.max(segmentSize, minSegmentSize); /* * Determines if we will store using large objects - we may do this for 3 reasons: * * - A segmentSize has been specified and the object size is greater than the minimum segment size * - If an objectSize is provided and is larger than the single object size limit of 5GiB * - A segmentSize has been specified, but no objectSize given (we take this as a request for segmentation) * * The last case may fail if the user does not provide at least as much data as the minimum segment * size configured on the server, and will always produce a large object structure (even if only one * small segment is required). */ objectSize = (objectSize == null) ? -1 : objectSize; boolean useLargeObject = ((segmentSize != null) && (objectSize > actualSegmentSize)) || (objectSize > singleObjectSizeLimit) || ((segmentSize != null) && (objectSize == -1)); if (!useLargeObject) { return storeObject(region, container, name, entity, metadata, md5sum); } else { /* * We need to upload a large object as defined by the method * parameters. For now this is done sequentially, but a parallel * version using appropriate random access to the underlying data * may be desirable. * * We make the assumption that the given file size will not be * greater than int.MAX_VALUE * segmentSize * */ leaveSegments = (leaveSegments == null) ? Boolean.FALSE : leaveSegments; dynamicLargeObject = (dynamicLargeObject == null) ? Boolean.FALSE : dynamicLargeObject; segmentFolder = (segmentFolder == null) ? ".file-segments" : segmentFolder; segmentContainer = (segmentContainer == null) ? container : segmentContainer; /* * If we have chosen not to leave existing large object segments in place (default) * then we need to collect information about any existing file segments so that we can * deal with them after we complete the upload of the new manifest. * * We should only delete existing segments after a successful upload of a new manifest file * because this constitutes an object update and the older file should remain available * until the new file can be downloaded. */ Map<String, List<StorageObject>> oldSegmentsToRemove = null; if (!leaveSegments) { oldSegmentsToRemove = listObjectSegments(region, container, name); } /* * Upload the new segments and manifest */ int segmentNumber = 1; long timeStamp = System.currentTimeMillis() / 1000L; String segmentBase = String.format("%s/%d/%d", segmentFolder, timeStamp, objectSize); /* * Create subInputStream from the OutputStream we will pass to the * HttpEntity for writing content. */ final PipedInputStream contentInStream = new PipedInputStream(64 * 1024); final PipedOutputStream contentOutStream = new PipedOutputStream(contentInStream); SubInputStream segmentStream = new SubInputStream(contentInStream, actualSegmentSize, false); /* * Fork the call to entity.writeTo() that allows us to grab any exceptions raised */ final HttpEntity e = entity; final Callable<Boolean> writer = new Callable<Boolean>() { public Boolean call() throws Exception { e.writeTo(contentOutStream); return Boolean.TRUE; } }; ExecutorService writeExecutor = Executors.newSingleThreadExecutor(); final Future<Boolean> future = writeExecutor.submit(writer); /* * Check the future for exceptions after we've finished uploading segments */ Map<String, List<StorageObject>> newSegmentsAdded = new HashMap<String, List<StorageObject>>(); List<StorageObject> newSegments = new LinkedList<StorageObject>(); JsonArray manifestSLO = new JsonArray(); boolean finished = false; /* * Upload each segment of the file by reading sections of the content input stream * until the entire underlying stream is complete */ while (!finished) { String segmentName = String.format("%s/%08d", segmentBase, segmentNumber); String etag; try { etag = storeObject(region, segmentContainer, segmentStream, "application/octet-stream", segmentName, new HashMap<String, String>()); } catch (IOException ex) { // Finished storing the object ex.printStackTrace(); throw ex; } String segmentPath = segmentContainer + "/" + segmentName; long bytesUploaded = segmentStream.getBytesProduced(); /* * Create the appropriate manifest structure if we're making a static large * object. * * ETAG returned by the simple upload * total size of segment uploaded * path of segment */ if (!dynamicLargeObject) { JsonObject segmentJSON = new JsonObject(); segmentJSON.addProperty("path", segmentPath); segmentJSON.addProperty("etag", etag); segmentJSON.addProperty("size_bytes", bytesUploaded); manifestSLO.add(segmentJSON); newSegments.add(new StorageObject(segmentName)); } segmentNumber++; if (!finished) { finished = segmentStream.endSourceReached(); } newSegmentsAdded.put(segmentContainer, newSegments); segmentStream.readMoreBytes(actualSegmentSize); } /* * Attempts to retrieve the return value from the write operation * Any exceptions raised can then be handled appropriately */ try { future.get(); } catch (InterruptedException ex) { /* * The write was interrupted... should we delete the segments? * For now we'll leave orphaned segments, but we should re-visit this later */ } catch (ExecutionException ex) { /* * This should always be an IOException or a RuntimeException * because the call to entity.writeTo() only throws IOException */ Throwable t = ex.getCause(); if (t instanceof IOException) { throw (IOException) t; } else { throw (RuntimeException) t; } } /* * Create an appropriate manifest depending on our DLO/SLO choice */ String manifestEtag; if (dynamicLargeObject) { /* * Empty manifest with header detailing the shared prefix of object segments */ long manifestTimeStamp = System.currentTimeMillis() / 1000L; metadata.put(Constants.X_OBJECT_META + "mtime", String.format("%s", manifestTimeStamp)); manifestEtag = createDLOManifestObject(region, container, entity.getContentType().getValue(), name, segmentBase, metadata); } else { /* * Manifest containing json list specifying details of the object segments. */ manifestEtag = createSLOManifestObject(region, container, entity.getContentType().getValue(), name, manifestSLO.toString(), metadata); } /* * Delete stale segments of overwritten large object if requested. */ if (!leaveSegments) { /* * Before deleting old segments, remove any objects from the delete list * that are also part of a new static large object that were updated during the upload. */ if (!(oldSegmentsToRemove == null)) { for (String c : oldSegmentsToRemove.keySet()) { List<StorageObject> rmv = oldSegmentsToRemove.get(c); if (newSegmentsAdded.containsKey(c)) { rmv.removeAll(newSegmentsAdded.get(c)); } List<String> rmvNames = new LinkedList<String>(); for (StorageObject s : rmv) { rmvNames.add(s.getName()); } deleteObjects(region, c, rmvNames); } } } return manifestEtag; } }
From source file:ch.jamiete.hilda.admin.commands.AdminAllowCommand.java
License:Apache License
@Override public void execute(final Message message, final String[] arguments, final String label) { if (arguments.length == 1 && arguments[0].equalsIgnoreCase("list")) { final List<String> strings = new ArrayList<>(); for (String str : this.hilda.getAllowedServers()) { Guild guild = this.hilda.getBot().getGuildById(str); if (guild != null) { strings.add(this.name(guild)); }//from w w w . j a v a 2s . c o m } if (strings.isEmpty()) { this.reply(message, "The whitelist function is not enabled!"); } else { final MessageBuilder mb = new MessageBuilder(); mb.append("I'm currently allowing "); if (strings.size() != this.hilda.getBot().getGuilds().size()) { mb.append("only "); } mb.append(strings.size()).append(strings.size() == 1 ? "server" : "servers").append(": "); mb.append(Util.getAsList(strings)); mb.append("."); this.reply(message, mb.build()); } return; } if (arguments.length == 0) { this.usage(message, "<id.../list>"); } final AllowDirection direction = AllowDirection.valueOf(label.toUpperCase()); final List<String> ids = new ArrayList<>(); final List<String> success = new ArrayList<>(); final List<String> fail = new ArrayList<>(); for (final String arg : arguments) { Guild guild = this.hilda.getBot().getGuildById(arg); if (guild == null) { fail.add(arg); } else { ids.add(arg); success.add(this.name(guild)); } } if (!success.isEmpty()) { final Configuration cfg = this.hilda.getConfigurationManager().getConfiguration(this.plugin, "allowedservers"); JsonArray array = cfg.get().getAsJsonArray("servers"); if (array == null) { array = new JsonArray(); } for (final String id : ids) { if (direction == AllowDirection.ALLOW) { this.hilda.addAllowedServer(id); if (!array.contains(new JsonPrimitive(id))) { array.add(id); } } if (direction == AllowDirection.DISALLOW) { this.hilda.removeAllowedServer(id); array.remove(new JsonPrimitive(id)); } } cfg.get().add("servers", array); cfg.save(); } final MessageBuilder mb = new MessageBuilder(); mb.append("OK, "); if (!success.isEmpty()) { mb.append("I'm ").append(direction == AllowDirection.ALLOW ? "now" : "no longer").append(" allowing "); mb.append(Util.getAsList(success)); } if (!fail.isEmpty()) { if (!success.isEmpty()) { mb.append(", however "); } mb.append("I couldn't find any servers matching "); mb.append(Util.getAsList(fail)); } mb.append("."); mb.buildAll(SplitPolicy.SPACE).forEach(m -> message.getChannel().sendMessage(m).queue()); }
From source file:ch.jamiete.hilda.admin.commands.AdminIgnoreCommand.java
License:Apache License
@Override public void execute(final Message message, final String[] arguments, final String label) { if (arguments.length == 1 && arguments[0].equalsIgnoreCase("list")) { final List<String> strings = this.hilda.getCommandManager().getIgnoredUsers(); if (strings.isEmpty()) { this.reply(message, "I'm not ignoring any channels!"); } else {/*from w w w. ja va 2 s .co m*/ final MessageBuilder mb = new MessageBuilder(); mb.append("I'm currently ignoring "); mb.append(Util.getAsList(this.getPieces(strings))); mb.append("."); this.reply(message, mb.build()); } return; } if (arguments.length == 0) { this.usage(message, "<@user.../id...>"); } final IgnoreDirection direction = IgnoreDirection.valueOf(label.toUpperCase()); final List<String> ids = new ArrayList<>(); if (!message.getMentionedUsers().isEmpty()) { message.getMentionedUsers().forEach(u -> ids.add(u.getId())); } for (final String arg : arguments) { if (!arg.startsWith("<@")) { ids.add(arg); } } final Configuration cfg = this.hilda.getConfigurationManager().getConfiguration(this.plugin, "ignoredusers"); JsonArray array = cfg.get().getAsJsonArray("users"); if (array == null) { array = new JsonArray(); } for (final String id : ids) { if (direction == IgnoreDirection.IGNORE) { this.hilda.getCommandManager().addIgnoredUser(id); if (!array.contains(new JsonPrimitive(id))) { array.add(id); } } if (direction == IgnoreDirection.UNIGNORE) { this.hilda.getCommandManager().removeIgnoredUser(id); array.remove(new JsonPrimitive(id)); } } cfg.get().add("users", array); cfg.save(); final MessageBuilder mb = new MessageBuilder(); mb.append("OK, I'm ").append(direction == IgnoreDirection.IGNORE ? "now" : "no longer") .append(" ignoring "); mb.append(Util.getAsList(this.getPieces(ids))); mb.append("."); mb.buildAll(SplitPolicy.SPACE).forEach(m -> message.getChannel().sendMessage(m).queue()); }
From source file:ch.jamiete.hilda.configuration.Configuration.java
License:Apache License
public JsonArray getArray(final String name) { final JsonArray array = this.json.getAsJsonArray(name); return array == null ? new JsonArray() : array; }
From source file:ch.jamiete.hilda.moderatortools.commands.IgnoreCommand.java
License:Apache License
@Override public void execute(final Message message, final String[] arguments, final String label) { if (arguments.length == 1 && arguments[0].equalsIgnoreCase("list")) { final MessageBuilder mb = new MessageBuilder(); final List<String> strings = this.hilda.getCommandManager().getIgnoredChannels(); final List<TextChannel> ignored = new ArrayList<TextChannel>(); for (final String s : strings) { final TextChannel c = message.getGuild().getTextChannelById(s); if (c != null) { ignored.add(c);//from w w w. ja va2s . c o m } } if (ignored.isEmpty()) { this.reply(message, "I'm not ignoring any channels!"); } else { mb.append("I'm currently ignoring "); for (final TextChannel c : ignored) { mb.append(c.getAsMention()); mb.append(", "); } mb.replaceLast(", ", ""); mb.append("."); this.reply(message, mb.build()); } return; } final IgnoreDirection direction = IgnoreDirection.valueOf(label.toUpperCase()); final List<TextChannel> channels = new ArrayList<TextChannel>(); if (arguments.length == 0) { channels.add(message.getTextChannel()); } if (!message.getMentionedChannels().isEmpty()) { channels.addAll(message.getMentionedChannels()); } final Configuration cfg = this.hilda.getConfigurationManager().getConfiguration(this.plugin, "ignore-" + message.getGuild().getId()); JsonArray array = cfg.get().getAsJsonArray("channels"); if (array == null) { array = new JsonArray(); } for (final TextChannel channel : channels) { if (direction == IgnoreDirection.IGNORE) { this.hilda.getCommandManager().addIgnoredChannel(channel.getId()); if (!array.contains(new JsonPrimitive(channel.getId()))) { array.add(channel.getId()); } } if (direction == IgnoreDirection.UNIGNORE) { this.hilda.getCommandManager().removeIgnoredChannel(channel.getId()); array.remove(new JsonPrimitive(channel.getId())); } } cfg.get().add("channels", array); cfg.save(); final MessageBuilder mb = new MessageBuilder(); mb.append("OK, I'm ").append(direction == IgnoreDirection.IGNORE ? "now" : "no longer") .append(" ignoring "); mb.append(Util.getChannelsAsString(channels)); mb.append("."); mb.buildAll().forEach(m -> message.getChannel().sendMessage(m).queue()); }
From source file:classes.analysis.Analysis.java
License:Open Source License
private static Analysis parseAnalysisGalaxyData(String origin, String emsuser, JsonObject analysisData) { JsonParser parser = new JsonParser(); JsonArray provenance = (JsonArray) parser.parse(analysisData.get("provenance").getAsString()); //STEP 1. Find the associations between the steps (inputs and outputs) HashMap<String, JsonElement> outputs = new HashMap<String, JsonElement>(); JsonObject stepJSONobject;//from w w w . j a v a2s . c o m for (JsonElement step_json : provenance) { stepJSONobject = step_json.getAsJsonObject(); for (JsonElement output : stepJSONobject.getAsJsonArray("outputs")) { outputs.put(output.getAsJsonObject().get("id").getAsString(), step_json); } if ("upload1".equalsIgnoreCase(stepJSONobject.get("tool_id").getAsString())) { stepJSONobject.remove("step_type"); stepJSONobject.add("step_type", new JsonPrimitive("external_source")); } else { stepJSONobject.add("step_type", new JsonPrimitive("processed_data")); } } for (JsonElement step_json : provenance) { stepJSONobject = step_json.getAsJsonObject(); for (JsonElement input : stepJSONobject.getAsJsonArray("inputs")) { String id = input.getAsJsonObject().get("id").getAsString(); if (outputs.containsKey(id)) { if (!"external_source" .equalsIgnoreCase(outputs.get(id).getAsJsonObject().get("step_type").getAsString())) { outputs.get(id).getAsJsonObject().remove("step_type"); outputs.get(id).getAsJsonObject().add("step_type", new JsonPrimitive("intermediate_data")); } if (!stepJSONobject.has("used_data")) { stepJSONobject.add("used_data", new JsonArray()); } ((JsonArray) stepJSONobject.get("used_data")).add(new JsonPrimitive( "STxxxx." + outputs.get(id).getAsJsonObject().get("id").getAsString())); } } } //STEP 2. Create the instances for the steps ArrayList<NonProcessedData> nonProcessedDataList = new ArrayList<NonProcessedData>(); ArrayList<ProcessedData> processedDataList = new ArrayList<ProcessedData>(); for (JsonElement step_json : provenance) { stepJSONobject = step_json.getAsJsonObject(); if ("external_source".equalsIgnoreCase(stepJSONobject.get("step_type").getAsString())) { nonProcessedDataList.add(ExternalData.parseStepGalaxyData(stepJSONobject, analysisData, emsuser)); } else if ("intermediate_data".equalsIgnoreCase(stepJSONobject.get("step_type").getAsString())) { nonProcessedDataList .add(IntermediateData.parseStepGalaxyData(stepJSONobject, analysisData, emsuser)); } else if ("processed_data".equalsIgnoreCase(stepJSONobject.get("step_type").getAsString())) { processedDataList.add(ProcessedData.parseStepGalaxyData(stepJSONobject, analysisData, emsuser)); } else { throw new InstantiationError("Unknown step type"); } } Collections.sort(nonProcessedDataList); Collections.sort(processedDataList); //STEP 3. Create the instance of analysis Analysis analysis = new Analysis(); analysis.setAnalysisName(analysisData.get("ems_analysis_name").getAsString()); analysis.setAnalysisType("Galaxy workflow"); analysis.setNonProcessedData(nonProcessedDataList.toArray(new NonProcessedData[] {})); analysis.setProcessedData(processedDataList.toArray(new ProcessedData[] {})); analysis.setTags(new String[] { "imported" }); analysis.setStatus("pending"); return analysis; }
From source file:co.aikar.timings.TimingsHistory.java
License:MIT License
TimingsHistory() { this.endTime = System.currentTimeMillis() / 1000; this.startTime = TimingsManager.historyStart / 1000; if (timedTicks % 1200 != 0 || MINUTE_REPORTS.isEmpty()) { this.minuteReports = MINUTE_REPORTS.toArray(new MinuteReport[MINUTE_REPORTS.size() + 1]); this.minuteReports[this.minuteReports.length - 1] = new MinuteReport(); } else {//from w w w.ja v a 2s . c o m this.minuteReports = MINUTE_REPORTS.toArray(new MinuteReport[MINUTE_REPORTS.size()]); } long ticks = 0; for (MinuteReport mr : this.minuteReports) { ticks += mr.ticksRecord.timed; } this.totalTicks = ticks; this.totalTime = fullServerTickTimer.record.totalTime; this.entries = new TimingsHistoryEntry[TimingsManager.TIMINGS.size()]; int i = 0; for (Timing timing : TimingsManager.TIMINGS) { this.entries[i++] = new TimingsHistoryEntry(timing); } final Map<Integer, AtomicInteger> entityCounts = new HashMap<>(); final Map<Integer, AtomicInteger> blockEntityCounts = new HashMap<>(); final Gson GSON = new Gson(); // Information about all loaded entities/block entities for (Level level : Server.getInstance().getLevels().values()) { JsonArray jsonLevel = new JsonArray(); for (BaseFullChunk chunk : level.getChunks().values()) { entityCounts.clear(); blockEntityCounts.clear(); //count entities for (Entity entity : chunk.getEntities().values()) { if (!entityCounts.containsKey(entity.getNetworkId())) entityCounts.put(entity.getNetworkId(), new AtomicInteger(0)); entityCounts.get(entity.getNetworkId()).incrementAndGet(); entityMap.put(entity.getNetworkId(), entity.getClass().getSimpleName()); } //count block entities for (BlockEntity blockEntity : chunk.getBlockEntities().values()) { if (!blockEntityCounts.containsKey(blockEntity.getBlock().getId())) blockEntityCounts.put(blockEntity.getBlock().getId(), new AtomicInteger(0)); blockEntityCounts.get(blockEntity.getBlock().getId()).incrementAndGet(); blockEntityMap.put(blockEntity.getBlock().getId(), blockEntity.getClass().getSimpleName()); } if (blockEntityCounts.isEmpty() && entityCounts.isEmpty()) { continue; } JsonArray jsonChunk = new JsonArray(); jsonChunk.add(chunk.getX()); jsonChunk.add(chunk.getZ()); jsonChunk.add(GSON .toJsonTree(JsonUtil.mapToObject(entityCounts.entrySet(), (entry) -> new JsonUtil.JSONPair(entry.getKey(), entry.getValue().get()))) .getAsJsonObject()); jsonChunk.add(GSON .toJsonTree(JsonUtil.mapToObject(blockEntityCounts.entrySet(), (entry) -> new JsonUtil.JSONPair(entry.getKey(), entry.getValue().get()))) .getAsJsonObject()); jsonLevel.add(jsonChunk); } if (!levelMap.containsKey(level.getName())) levelMap.put(level.getName(), levelIdPool++); levels.add(String.valueOf(levelMap.get(level.getName())), jsonLevel); } }
From source file:co.cask.cdap.api.dataset.lib.partitioned.ComparableCodec.java
License:Apache License
@Nullable protected JsonElement serializeComparable(@Nullable Comparable comparable, JsonSerializationContext jsonSerializationContext) { if (comparable == null) { return null; }/*from ww w. j av a 2 s. com*/ JsonArray jsonArray = new JsonArray(); jsonArray.add(jsonSerializationContext.serialize(comparable.getClass().getName())); jsonArray.add(jsonSerializationContext.serialize(comparable)); return jsonArray; }
From source file:co.cask.cdap.common.zookeeper.coordination.ResourceAssignmentTypeAdapter.java
License:Apache License
@Override public JsonElement serialize(ResourceAssignment src, Type typeOfSrc, JsonSerializationContext context) { JsonObject json = new JsonObject(); json.addProperty("name", src.getName()); src.getAssignments().entries();/* www.j a v a2s.c o m*/ JsonArray assignments = new JsonArray(); for (Map.Entry<Discoverable, PartitionReplica> entry : src.getAssignments().entries()) { JsonArray entryJson = new JsonArray(); entryJson.add(context.serialize(entry.getKey(), Discoverable.class)); entryJson.add(context.serialize(entry.getValue())); assignments.add(entryJson); } json.add("assignments", assignments); return json; }
From source file:co.cask.cdap.common.zookeeper.coordination.ServiceDiscoveredCodec.java
License:Apache License
@Override public JsonElement serialize(ServiceDiscovered serviceDiscovered, Type typeOfSrc, JsonSerializationContext context) { JsonArray object = new JsonArray(); for (Discoverable discoverable : serviceDiscovered) { JsonObject discoverableJson = new JsonObject(); discoverableJson.addProperty("host", discoverable.getSocketAddress().getHostName()); discoverableJson.addProperty("port", discoverable.getSocketAddress().getPort()); object.add(discoverableJson);/*from ww w.j a v a 2 s .c om*/ } return object; }