List of usage examples for io.vertx.core.json JsonObject encode
public String encode()
From source file:org.sfs.nodes.compute.container.ExportContainer.java
License:Apache License
@Override public void handle(final SfsRequest httpServerRequest) { VertxContext<Server> vertxContext = httpServerRequest.vertxContext(); SfsVertx sfsVertx = vertxContext.vertx(); Context context = sfsVertx.getOrCreateContext(); aVoid().flatMap(new Authenticate(httpServerRequest)).flatMap(new ValidateActionAdmin(httpServerRequest)) .map(aVoid -> httpServerRequest).map(new ValidateHeaderExists(X_SFS_DEST_DIRECTORY)) .map(new ValidateHeaderIsBoolean(X_SFS_COMPRESS)) .map(new ValidateHeaderIsBase64Encoded(X_SFS_SECRET)) .map(new ValidateHeaderBetweenLong(X_SFS_KEEP_ALIVE_TIMEOUT, 10000, 300000)) .map(aVoid -> fromSfsRequest(httpServerRequest)).map(new ValidateContainerPath()) .flatMap(new LoadAccountAndContainer(vertxContext)).flatMap(persistentContainer -> { MultiMap headers = httpServerRequest.headers(); String destDirectory = headers.get(X_SFS_DEST_DIRECTORY); boolean compress = "true".equalsIgnoreCase(headers.get(X_SFS_COMPRESS)); byte[] secret = headers.contains(X_SFS_SECRET) ? base64().decode(headers.get(X_SFS_SECRET)) : null;/* w w w .j a va 2s . com*/ return aVoid().flatMap(aVoid -> { ObservableFuture<Boolean> handler = RxHelper.observableFuture(); vertxContext.vertx().fileSystem().exists(destDirectory, handler.toHandler()); return handler.map(destDirectoryExists -> { if (!TRUE.equals(destDirectoryExists)) { JsonObject jsonObject = new JsonObject().put("message", format("%s does not exist", destDirectory)); throw new HttpRequestValidationException(HTTP_BAD_REQUEST, jsonObject); } else { return (Void) null; } }); }).flatMap(oVoid -> { ObservableFuture<List<String>> handler = RxHelper.observableFuture(); vertxContext.vertx().fileSystem().readDir(destDirectory, handler.toHandler()); return handler.map(listing -> { if (listing.size() > 0) { JsonObject jsonObject = new JsonObject().put("message", format("%s is not empty", destDirectory)); throw new HttpRequestValidationException(HTTP_BAD_REQUEST, jsonObject); } else { return (Void) null; } }); }).flatMap(aVoid -> { LOGGER.info("Exporting container " + persistentContainer.getId() + " to " + destDirectory); JournalFile dumpFile = new JournalFile(get(destDirectory).resolve(DUMP_FILE_NAME)); return dumpFile.open(vertxContext.vertx()) .flatMap(aVoid1 -> dumpFile.enableWrites(vertxContext.vertx())) .map(aVoid1 -> dumpFile); }).flatMap(dumpFile -> { httpServerRequest.startProxyKeepAlive(); Elasticsearch elasticsearch = vertxContext.verticle().elasticsearch(); String containerId = persistentContainer.getId(); String objectIndex = elasticsearch.objectIndex(persistentContainer.getName()); long now = System.currentTimeMillis() - VerifyRepairAllContainerObjects.CONSISTENCY_THRESHOLD; Calendar consistencyThreshold = Calendar.getInstance(); consistencyThreshold.setTimeInMillis(now); TermQueryBuilder containerIdQuery = termQuery("container_id", containerId); ScanAndScrollStreamProducer producer = new ScanAndScrollStreamProducer(vertxContext, containerIdQuery).setIndeces(objectIndex).setTypes(elasticsearch.defaultType()) .setReturnVersion(true); DumpFileWriter fileWriter = new DumpFileWriter(vertxContext, persistentContainer, dumpFile); if (compress) { fileWriter.enableDataCompression(); } if (secret != null) { fileWriter.enableDataEncryption(secret); } return pump(producer, fileWriter).map(aVoid -> dumpFile); }).flatMap(journalFile -> journalFile.disableWrites(vertxContext.vertx()) .map(aVoid -> journalFile)) .flatMap(journalFile -> journalFile.force(vertxContext.vertx(), true) .map(aVoid -> journalFile)) .flatMap(journalFile -> journalFile.close(vertxContext.vertx()) .map(aVoid -> journalFile)) .flatMap(journalFile -> RxHelper.executeBlocking(context, sfsVertx.getBackgroundPool(), () -> { try { write(get(destDirectory).resolve(".successful"), new byte[0], CREATE_NEW, WRITE); return (Void) null; } catch (IOException e) { throw new RuntimeException(e); } })) .doOnNext(aVoid -> LOGGER.info("Done exporting container " + persistentContainer.getId() + " to " + destDirectory)) .onErrorResumeNext(throwable -> { LOGGER.info("Failed exporting container " + persistentContainer.getId() + " to " + destDirectory, throwable); return Observable.error(throwable); }); }).map(new ToVoid<>()).single().subscribe(new ConnectionCloseTerminus<Void>(httpServerRequest) { @Override public void onNext(Void aVoid) { JsonObject jsonResponse = new JsonObject(); jsonResponse.put("code", HTTP_OK); HttpServerResponse httpResponse = httpServerRequest.response(); httpResponse.write(jsonResponse.encode(), UTF_8.toString()).write(DELIMITER_BUFFER); } }); }
From source file:org.sfs.nodes.compute.container.ImportContainer.java
License:Apache License
@Override public void handle(final SfsRequest httpServerRequest) { VertxContext<Server> vertxContext = httpServerRequest.vertxContext(); aVoid().flatMap(new Authenticate(httpServerRequest)).flatMap(new ValidateActionAdmin(httpServerRequest)) .map(aVoid -> httpServerRequest).map(new ValidateHeaderExists(X_SFS_SRC_DIRECTORY)) .map(new ValidateHeaderBetweenLong(X_SFS_KEEP_ALIVE_TIMEOUT, 10000, 300000)) .map(aVoid -> fromSfsRequest(httpServerRequest)).map(new ValidateContainerPath()) .flatMap(new LoadAccountAndContainer(vertxContext)) .flatMap(new ValidateContainerIsEmpty(vertxContext)).flatMap(targetPersistentContainer -> { MultiMap headers = httpServerRequest.headers(); String importDirectory = headers.get(X_SFS_SRC_DIRECTORY); String unparsedSkipPositions = headers.get(X_SFS_IMPORT_SKIP_POSITIONS); Set<Long> skipPositions; if (!isNullOrEmpty(unparsedSkipPositions)) { skipPositions = from(on(',').trimResults().split(unparsedSkipPositions)) .transform(input -> tryParse(input)).filter(notNull()).toSet(); } else { skipPositions = new HashSet<>(0); }//from w ww .ja v a 2 s .com return aVoid().flatMap(aVoid -> { ObservableFuture<Boolean> handler = RxHelper.observableFuture(); vertxContext.vertx().fileSystem().exists(importDirectory, handler.toHandler()); return handler.map(destDirectoryExists -> { if (!TRUE.equals(destDirectoryExists)) { JsonObject jsonObject = new JsonObject().put("message", format("%s does not exist", importDirectory)); throw new HttpRequestValidationException(HTTP_BAD_REQUEST, jsonObject); } else { return (Void) null; } }); }).flatMap(oVoid -> { ObservableFuture<List<String>> handler = RxHelper.observableFuture(); vertxContext.vertx().fileSystem().readDir(importDirectory, handler.toHandler()); return handler.map(listing -> { if (listing.size() <= 0) { JsonObject jsonObject = new JsonObject().put("message", format("%s is empty", importDirectory)); throw new HttpRequestValidationException(HTTP_BAD_REQUEST, jsonObject); } else { return (Void) null; } }); }).flatMap(aVoid -> { LOGGER.info("Importing into container " + targetPersistentContainer.getId() + " from " + importDirectory); JournalFile journalFile = new JournalFile(get(importDirectory).resolve(DUMP_FILE_NAME)); return journalFile.open(vertxContext.vertx()).map(aVoid1 -> journalFile); }).flatMap(journalFile -> { SfsVertx sfsVertx = vertxContext.vertx(); return journalFile.getFirstEntry(sfsVertx).map(entryOptional -> { checkState(entryOptional.isPresent(), "First dump file entry is corrupt"); return entryOptional.get(); }).flatMap(entry -> entry.getMetadata(sfsVertx).map(buffer -> { try { return parseFrom(buffer.getBytes()); } catch (InvalidProtocolBufferException e) { throw new RuntimeException(e); } }).flatMap(firstHeader -> { if (firstHeader.getEncrypted()) { return just(httpServerRequest).map(new ValidateHeaderExists(X_SFS_SECRET)) .map(new ValidateHeaderIsBase64Encoded(X_SFS_SECRET)).map(new ToVoid<>()) .map(aVoid -> { String cipherName = firstHeader.getCipherName(); checkState(!isNullOrEmpty(cipherName), "Encryption is enabled by cipher name is not specified"); AlgorithmDef algorithmDef = fromNameIfExists(cipherName); checkState(algorithmDef != null, "Algorithm %s not found", cipherName); return new ImportStartState(journalFile, entry.getNextHeaderPosition(), algorithmDef, base64().decode(headers.get(X_SFS_SECRET))); }); } else { return just(new ImportStartState(journalFile, entry.getNextHeaderPosition(), null, null)); } })); }).flatMap(importStartState -> { JournalFile journalFile = importStartState.getJournalFile(); long startPosition = importStartState.getStartPosition(); boolean encrypted = importStartState.getAlgorithmDef() != null; byte[] secret = importStartState.getSecret(); AlgorithmDef algorithmDef = importStartState.getAlgorithmDef(); httpServerRequest.startProxyKeepAlive(); SfsVertx sfsVertx = vertxContext.vertx(); return journalFile.scan(sfsVertx, startPosition, entry -> { // skip over any positions that should be skipped if (skipPositions.contains(entry.getHeaderPosition())) { return just(true); } return entry.getMetadata(sfsVertx).flatMap(buffer -> { try { Header header = Header.parseFrom(buffer.getBytes()); Type type = header.getType(); checkState(VERSION_01.equals(type), "Type was %s, expected %s", type, VERSION_01); byte[] cipherDataSalt = header.getCipherDataSalt() != null ? header.getCipherDataSalt().toByteArray() : null; byte[] cipherMetadataSalt = header.getCipherMetadataSalt() != null ? header.getCipherMetadataSalt().toByteArray() : null; CompressionType metadataCompressionType = header.getMetadataCompressionType(); checkState( NONE.equals(metadataCompressionType) || DEFLATE.equals(metadataCompressionType), "Metadata compression type was %s, expected %s", metadataCompressionType, DEFLATE); CompressionType dataCompressionType = header.getDataCompressionType(); checkState( NONE.equals(dataCompressionType) || DEFLATE.equals(dataCompressionType), "Data compression type was %s, expected %s", dataCompressionType, DEFLATE); byte[] marshaledExportObject = header.getData().toByteArray(); if (encrypted) { checkState(cipherMetadataSalt != null && cipherMetadataSalt.length > 0); Algorithm algorithm = algorithmDef.create(secret, cipherMetadataSalt); marshaledExportObject = algorithm.decrypt(marshaledExportObject); } if (DEFLATE.equals(metadataCompressionType)) { ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); try (InflaterOutputStream inflaterOutputStream = new InflaterOutputStream( byteArrayOutputStream)) { inflaterOutputStream.write(marshaledExportObject); } catch (IOException e) { throw new RuntimeException(e); } marshaledExportObject = byteArrayOutputStream.toByteArray(); } Version01 exportObject = Version01.parseFrom(marshaledExportObject); ObjectPath originalObjectPath = fromPaths(exportObject.getObjectId()); String originalAccountName = originalObjectPath.accountName().get(); String originalContainerName = originalObjectPath.containerName().get(); String originalObjectName = originalObjectPath.objectName().get(); ObjectPath targetObjectPath = fromPaths(targetPersistentContainer.getId(), originalObjectName); ValidatePath validatePath = new ValidateObjectPath(); validatePath.call(targetObjectPath); String targetObjectId = targetObjectPath.objectPath().get(); String targetAccountName = targetObjectPath.accountName().get(); String targetContainerName = targetObjectPath.containerName().get(); return just(targetObjectId) .flatMap(new LoadObject(vertxContext, targetPersistentContainer)) .map(oPersistentObject -> { if (oPersistentObject.isPresent()) { PersistentObject persistentObject = oPersistentObject.get(); return persistentObject.newVersion().merge(exportObject); } else { final TransientObject transientObject = new TransientObject( targetPersistentContainer, targetObjectId) .setOwnerGuid(exportObject.getOwnerGuid()); return transientObject.newVersion().merge(exportObject); } }).flatMap(transientVersion -> { long length = transientVersion.getContentLength().get(); if (length > 0 && !transientVersion.isDeleted()) { return aVoid().flatMap(aVoid -> { PipedReadStream pipedReadStream = new PipedReadStream(); BufferEndableWriteStream bufferStreamConsumer = new PipedEndableWriteStream( pipedReadStream); if (DEFLATE.equals(dataCompressionType)) { bufferStreamConsumer = new InflaterEndableWriteStream( bufferStreamConsumer); } if (encrypted) { checkState(cipherDataSalt != null && cipherDataSalt.length > 0); Algorithm algorithm = algorithmDef.create(secret, cipherDataSalt); bufferStreamConsumer = algorithm .decrypt(bufferStreamConsumer); } Observable<Void> oProducer = entry.produceData(sfsVertx, bufferStreamConsumer); Observable<TransientSegment> oConsumer = just( transientVersion) .flatMap(new WriteNewSegment(vertxContext, pipedReadStream)); return combineSinglesDelayError(oProducer, oConsumer, (aVoid1, transientSegment) -> transientSegment); }).map(transientSegment -> transientSegment.getParent()); } else { return just(transientVersion); } }).doOnNext(transientVersion -> { Optional<String> oObjectManifest = transientVersion .getObjectManifest(); if (oObjectManifest.isPresent()) { String objectManifest = oObjectManifest.get(); int indexOfObjectName = objectManifest.indexOf(DELIMITER); if (indexOfObjectName > 0) { String containerName = objectManifest.substring(0, indexOfObjectName); // only adjust the object manifest if the manifest references objects // in the container that was exported if (Objects.equals(containerName, originalContainerName)) { objectManifest = targetContainerName + DELIMITER + objectManifest .substring(indexOfObjectName + 1); transientVersion.setObjectManifest(objectManifest); } } } }).flatMap(new PersistOrUpdateVersion(vertxContext)) .flatMap(transientVersion -> { long length = transientVersion.getContentLength().get(); if (length > 0 && !transientVersion.getSegments().isEmpty()) { TransientSegment latestSegment = transientVersion .getNewestSegment().get(); return just(latestSegment) .flatMap(new AcknowledgeSegment( httpServerRequest.vertxContext())) .map(modified -> latestSegment.getParent()); } else { return just(transientVersion); } }).flatMap(transientVersion -> { final long versionId = transientVersion.getId(); XObject xObject = transientVersion.getParent(); return just((PersistentObject) xObject) .map(persistentObject -> persistentObject .setUpdateTs(getInstance())) .flatMap(new UpdateObject(httpServerRequest.vertxContext())) .map(new ValidateOptimisticObjectLock()) .map(persistentObject -> persistentObject .getVersion(versionId).get()); }).map(version -> TRUE); } catch (InvalidProtocolBufferException e) { throw new RuntimeException(e); } }).onErrorResumeNext(throwable -> error( new IgnorePositionRuntimeException(throwable, entry.getHeaderPosition()))); }); }).doOnNext(aVoid -> LOGGER.info("Done importing into container " + targetPersistentContainer.getId() + " from " + importDirectory)).map(new ToVoid<>()) .map(aVoid -> { JsonObject jsonResponse = new JsonObject(); jsonResponse.put("code", HTTP_OK); return jsonResponse; }).onErrorResumeNext(throwable -> { LOGGER.info("Failed importing into container " + targetPersistentContainer.getId() + " from " + importDirectory, throwable); Optional<IgnorePositionRuntimeException> oIgnorePosition = unwrapCause( IgnorePositionRuntimeException.class, throwable); if (oIgnorePosition.isPresent()) { IgnorePositionRuntimeException ignorePositionRuntimeException = oIgnorePosition .get(); LOGGER.error("Handling Exception", ignorePositionRuntimeException); long positionToIgnore = ignorePositionRuntimeException.getPosition(); JsonObject jsonResponse = new JsonObject(); skipPositions.add(positionToIgnore); String joined = Joiner.on(',').join(skipPositions); jsonResponse.put("code", HTTP_INTERNAL_ERROR); jsonResponse.put("message", format( "If you would like to ignore this position set the %s header with the value %s", X_SFS_IMPORT_SKIP_POSITIONS, joined)); jsonResponse.put(X_SFS_IMPORT_SKIP_POSITIONS, joined); return just(jsonResponse); } else { return error(throwable); } }); }).single().subscribe(new ConnectionCloseTerminus<JsonObject>(httpServerRequest) { @Override public void onNext(JsonObject jsonResponse) { HttpServerResponse httpResponse = httpServerRequest.response(); httpResponse.write(jsonResponse.encode(), UTF_8.toString()).write(DELIMITER_BUFFER); } }); }
From source file:org.sfs.nodes.compute.container.VerifyRepairAllContainersExecute.java
License:Apache License
@Override public void handle(final SfsRequest httpServerRequest) { VertxContext<Server> vertxContext = httpServerRequest.vertxContext(); Defer.aVoid().flatMap(new Authenticate(httpServerRequest)) .flatMap(new ValidateActionAdminOrSystem(httpServerRequest)).map(aVoid -> httpServerRequest) .map(new ToVoid<>()).flatMap(aVoid -> { ClusterInfo clusterInfo = vertxContext.verticle().getClusterInfo(); Nodes nodes = vertxContext.verticle().nodes(); MultiMap headers = httpServerRequest.headers(); long timeout = headers.contains(Jobs.Parameters.TIMEOUT) ? Long.parseLong(headers.get(Jobs.Parameters.TIMEOUT)) : 100;// w w w.j a v a 2 s . c o m String unparsedForceRemoveVolumes = headers.contains(Jobs.Parameters.FORCE_REMOVE_VOLUMES) ? headers.get(Jobs.Parameters.FORCE_REMOVE_VOLUMES) : null; MultiMap params = MultiMap.caseInsensitiveMultiMap(); if (unparsedForceRemoveVolumes != null) { params.add(Jobs.Parameters.FORCE_REMOVE_VOLUMES, unparsedForceRemoveVolumes); } TransientServiceDef transientServiceDef = clusterInfo.getCurrentMasterNode(); MasterNode masterNode = nodes.remoteMasterNode(vertxContext, transientServiceDef); httpServerRequest.startProxyKeepAlive(); return masterNode.executeJob(Jobs.ID.VERIFY_REPAIR_ALL_CONTAINERS_OBJECTS, params, timeout, TimeUnit.MILLISECONDS); }).single().subscribe(new ConnectionCloseTerminus<Void>(httpServerRequest) { @Override public void onNext(Void aVoid) { JsonObject responseJson = new JsonObject().put("code", HTTP_OK).put("message", "Success"); httpServerRequest.response().write(responseJson.encode(), StandardCharsets.UTF_8.toString()) .write(DELIMITER_BUFFER); } }); }
From source file:org.sfs.nodes.compute.container.VerifyRepairAllContainersStop.java
License:Apache License
@Override public void handle(final SfsRequest httpServerRequest) { VertxContext<Server> vertxContext = httpServerRequest.vertxContext(); Defer.aVoid().flatMap(new Authenticate(httpServerRequest)) .flatMap(new ValidateActionAdminOrSystem(httpServerRequest)).map(aVoid -> httpServerRequest) .map(new ValidateHeaderExists(Jobs.Parameters.TIMEOUT)) .map(new ValidateHeaderBetweenLong(Jobs.Parameters.TIMEOUT, 100, Long.MAX_VALUE)) .map(new ToVoid<>()).flatMap(aVoid -> { ClusterInfo clusterInfo = vertxContext.verticle().getClusterInfo(); Nodes nodes = vertxContext.verticle().nodes(); MultiMap headers = httpServerRequest.headers(); long timeout = headers.contains(Jobs.Parameters.TIMEOUT) ? Long.parseLong(headers.get(Jobs.Parameters.TIMEOUT)) : 100;/* ww w.j a v a2 s .c om*/ TransientServiceDef transientServiceDef = clusterInfo.getCurrentMasterNode(); MasterNode masterNode = nodes.remoteMasterNode(vertxContext, transientServiceDef); httpServerRequest.startProxyKeepAlive(); return masterNode.stopJob(Jobs.ID.VERIFY_REPAIR_ALL_CONTAINERS_OBJECTS, timeout, TimeUnit.MILLISECONDS); }).single().subscribe(new ConnectionCloseTerminus<Void>(httpServerRequest) { @Override public void onNext(Void aVoid) { JsonObject responseJson = new JsonObject().put("code", HTTP_OK).put("message", "Success"); httpServerRequest.response().write(responseJson.encode(), StandardCharsets.UTF_8.toString()) .write(DELIMITER_BUFFER); } }); }
From source file:org.sfs.nodes.compute.container.VerifyRepairAllContainersWait.java
License:Apache License
@Override public void handle(final SfsRequest httpServerRequest) { VertxContext<Server> vertxContext = httpServerRequest.vertxContext(); Defer.aVoid().flatMap(new Authenticate(httpServerRequest)) .flatMap(new ValidateActionAdminOrSystem(httpServerRequest)).map(aVoid -> httpServerRequest) .map(new ValidateHeaderExists(Jobs.Parameters.TIMEOUT)) .map(new ValidateHeaderBetweenLong(Jobs.Parameters.TIMEOUT, 100, Long.MAX_VALUE)) .map(new ToVoid<>()).flatMap(aVoid -> { ClusterInfo clusterInfo = vertxContext.verticle().getClusterInfo(); Nodes nodes = vertxContext.verticle().nodes(); MultiMap headers = httpServerRequest.headers(); long timeout = headers.contains(Jobs.Parameters.TIMEOUT) ? Long.parseLong(headers.get(Jobs.Parameters.TIMEOUT)) : 100;/*from www . j av a 2s . com*/ TransientServiceDef transientServiceDef = clusterInfo.getCurrentMasterNode(); MasterNode masterNode = nodes.remoteMasterNode(vertxContext, transientServiceDef); httpServerRequest.startProxyKeepAlive(); return masterNode.waitForJob(Jobs.ID.VERIFY_REPAIR_ALL_CONTAINERS_OBJECTS, timeout, TimeUnit.MILLISECONDS); }).single().subscribe(new ConnectionCloseTerminus<Void>(httpServerRequest) { @Override public void onNext(Void aVoid) { JsonObject responseJson = new JsonObject().put("code", HTTP_OK).put("message", "Success"); httpServerRequest.response().write(responseJson.encode(), StandardCharsets.UTF_8.toString()) .write(DELIMITER_BUFFER); } }); }
From source file:org.sfs.nodes.compute.container.VerifyRepairContainerExecute.java
License:Apache License
@Override public void handle(final SfsRequest httpServerRequest) { VertxContext<Server> vertxContext = httpServerRequest.vertxContext(); Defer.aVoid().flatMap(new Authenticate(httpServerRequest)) .flatMap(new ValidateActionAdminOrSystem(httpServerRequest)).map(aVoid -> httpServerRequest) .map(new ValidateHeaderBetweenLong(Jobs.Parameters.TIMEOUT, 100, Long.MAX_VALUE)) .map(new ToVoid<>()).map(aVoid -> ObjectPath.fromSfsRequest(httpServerRequest)) .map(new ValidateContainerPath()).flatMap(objectPath -> { ClusterInfo clusterInfo = vertxContext.verticle().getClusterInfo(); Nodes nodes = vertxContext.verticle().nodes(); MultiMap headers = httpServerRequest.headers(); long timeout = headers.contains(Jobs.Parameters.TIMEOUT) ? Long.parseLong(headers.get(Jobs.Parameters.TIMEOUT)) : 100;/*from w w w .ja v a 2 s . c o m*/ String unparsedForceRemoveVolumes = headers.contains(Jobs.Parameters.FORCE_REMOVE_VOLUMES) ? headers.get(Jobs.Parameters.FORCE_REMOVE_VOLUMES) : null; MultiMap params = MultiMap.caseInsensitiveMultiMap(); if (unparsedForceRemoveVolumes != null) { params.add(Jobs.Parameters.FORCE_REMOVE_VOLUMES, unparsedForceRemoveVolumes); } params.set(Jobs.Parameters.CONTAINER_ID, objectPath.containerPath().get()); TransientServiceDef transientServiceDef = clusterInfo.getCurrentMasterNode(); MasterNode masterNode = nodes.remoteMasterNode(vertxContext, transientServiceDef); httpServerRequest.startProxyKeepAlive(); return masterNode.executeJob(Jobs.ID.VERIFY_REPAIR_CONTAINER_OBJECTS, params, timeout, TimeUnit.MILLISECONDS); }).single().subscribe(new ConnectionCloseTerminus<Void>(httpServerRequest) { @Override public void onNext(Void aVoid) { JsonObject responseJson = new JsonObject().put("code", HTTP_OK).put("message", "Success"); httpServerRequest.response().write(responseJson.encode(), StandardCharsets.UTF_8.toString()) .write(DELIMITER_BUFFER); } }); }
From source file:org.sfs.nodes.compute.container.VerifyRepairContainerStop.java
License:Apache License
@Override public void handle(final SfsRequest httpServerRequest) { VertxContext<Server> vertxContext = httpServerRequest.vertxContext(); Defer.aVoid().flatMap(new Authenticate(httpServerRequest)) .flatMap(new ValidateActionAdminOrSystem(httpServerRequest)).map(aVoid -> httpServerRequest) .map(new ValidateHeaderExists(Jobs.Parameters.TIMEOUT)) .map(new ValidateHeaderBetweenLong(Jobs.Parameters.TIMEOUT, 100, Long.MAX_VALUE)) .map(new ToVoid<>()).map(aVoid -> ObjectPath.fromSfsRequest(httpServerRequest)) .map(new ValidateContainerPath()).flatMap(objectPath -> { ClusterInfo clusterInfo = vertxContext.verticle().getClusterInfo(); Nodes nodes = vertxContext.verticle().nodes(); MultiMap headers = httpServerRequest.headers(); long timeout = headers.contains(Jobs.Parameters.TIMEOUT) ? Long.parseLong(headers.get(Jobs.Parameters.TIMEOUT)) : 100;/*ww w .ja v a 2s . c om*/ TransientServiceDef transientServiceDef = clusterInfo.getCurrentMasterNode(); MasterNode masterNode = nodes.remoteMasterNode(vertxContext, transientServiceDef); httpServerRequest.startProxyKeepAlive(); return masterNode.stopJob(Jobs.ID.VERIFY_REPAIR_CONTAINER_OBJECTS, timeout, TimeUnit.MILLISECONDS); }).single().subscribe(new ConnectionCloseTerminus<Void>(httpServerRequest) { @Override public void onNext(Void aVoid) { JsonObject responseJson = new JsonObject().put("code", HTTP_OK).put("message", "Success"); httpServerRequest.response().write(responseJson.encode(), StandardCharsets.UTF_8.toString()) .write(DELIMITER_BUFFER); } }); }
From source file:org.sfs.nodes.compute.container.VerifyRepairContainerWait.java
License:Apache License
@Override public void handle(final SfsRequest httpServerRequest) { VertxContext<Server> vertxContext = httpServerRequest.vertxContext(); Defer.aVoid().flatMap(new Authenticate(httpServerRequest)) .flatMap(new ValidateActionAdminOrSystem(httpServerRequest)).map(aVoid -> httpServerRequest) .map(new ValidateHeaderExists(Jobs.Parameters.TIMEOUT)) .map(new ValidateHeaderBetweenLong(Jobs.Parameters.TIMEOUT, 100, Long.MAX_VALUE)) .map(new ToVoid<>()).map(aVoid -> ObjectPath.fromSfsRequest(httpServerRequest)) .map(new ValidateContainerPath()).flatMap(objectPath -> { ClusterInfo clusterInfo = vertxContext.verticle().getClusterInfo(); Nodes nodes = vertxContext.verticle().nodes(); MultiMap headers = httpServerRequest.headers(); long timeout = headers.contains(Jobs.Parameters.TIMEOUT) ? Long.parseLong(headers.get(Jobs.Parameters.TIMEOUT)) : 100;/*from w w w . j a v a 2 s . co m*/ TransientServiceDef transientServiceDef = clusterInfo.getCurrentMasterNode(); MasterNode masterNode = nodes.remoteMasterNode(vertxContext, transientServiceDef); httpServerRequest.startProxyKeepAlive(); return masterNode.waitForJob(Jobs.ID.VERIFY_REPAIR_CONTAINER_OBJECTS, timeout, TimeUnit.MILLISECONDS); }).single().subscribe(new ConnectionCloseTerminus<Void>(httpServerRequest) { @Override public void onNext(Void aVoid) { JsonObject responseJson = new JsonObject().put("code", HTTP_OK).put("message", "Success"); httpServerRequest.response().write(responseJson.encode(), StandardCharsets.UTF_8.toString()) .write(DELIMITER_BUFFER); } }); }
From source file:org.sfs.nodes.compute.containerkeys.ReEncryptContainerKeys.java
License:Apache License
@Override public void handle(final SfsRequest httpServerRequest) { VertxContext<Server> vertxContext = httpServerRequest.vertxContext(); Defer.aVoid().flatMap(new Authenticate(httpServerRequest)) .flatMap(new ValidateActionAdminOrSystem(httpServerRequest)).map(aVoid -> httpServerRequest) .map(new ValidateHeaderBetweenLong(Jobs.Parameters.TIMEOUT, 100, Long.MAX_VALUE)) .map(new ToVoid<>()).flatMap(aVoid -> { ClusterInfo clusterInfo = vertxContext.verticle().getClusterInfo(); Nodes nodes = vertxContext.verticle().nodes(); MultiMap headers = httpServerRequest.headers(); long timeout = headers.contains(Jobs.Parameters.TIMEOUT) ? Long.parseLong(headers.get(Jobs.Parameters.TIMEOUT)) : 100;//from www. ja v a 2 s . c o m MultiMap params = MultiMap.caseInsensitiveMultiMap(); TransientServiceDef transientServiceDef = clusterInfo.getCurrentMasterNode(); MasterNode masterNode = nodes.remoteMasterNode(vertxContext, transientServiceDef); httpServerRequest.startProxyKeepAlive(); return masterNode.executeJob(Jobs.ID.RE_ENCRYPT_CONTAINER_KEYS, params, timeout, TimeUnit.MILLISECONDS); }).single().subscribe(new ConnectionCloseTerminus<Void>(httpServerRequest) { @Override public void onNext(Void aVoid) { JsonObject responseJson = new JsonObject().put("code", HTTP_OK).put("message", "Success"); httpServerRequest.response().write(responseJson.encode(), StandardCharsets.UTF_8.toString()) .write(DELIMITER_BUFFER); } }); }
From source file:org.sfs.nodes.compute.masterkey.ReEncryptMasterKeys.java
License:Apache License
@Override public void handle(final SfsRequest httpServerRequest) { VertxContext<Server> vertxContext = httpServerRequest.vertxContext(); Defer.aVoid().flatMap(new Authenticate(httpServerRequest)) .flatMap(new ValidateActionAdminOrSystem(httpServerRequest)).map(aVoid -> httpServerRequest) .map(new ValidateHeaderBetweenLong(Jobs.Parameters.TIMEOUT, 100, Long.MAX_VALUE)) .map(new ToVoid<>()).flatMap(aVoid -> { ClusterInfo clusterInfo = vertxContext.verticle().getClusterInfo(); Nodes nodes = vertxContext.verticle().nodes(); MultiMap headers = httpServerRequest.headers(); long timeout = headers.contains(Jobs.Parameters.TIMEOUT) ? Long.parseLong(headers.get(Jobs.Parameters.TIMEOUT)) : 100;/*from ww w . j a v a 2s .c om*/ MultiMap params = MultiMap.caseInsensitiveMultiMap(); TransientServiceDef transientServiceDef = clusterInfo.getCurrentMasterNode(); MasterNode masterNode = nodes.remoteMasterNode(vertxContext, transientServiceDef); httpServerRequest.startProxyKeepAlive(); return masterNode.executeJob(Jobs.ID.RE_ENCRYPT_MASTER_KEYS, params, timeout, TimeUnit.MILLISECONDS); }).single().subscribe(new ConnectionCloseTerminus<Void>(httpServerRequest) { @Override public void onNext(Void aVoid) { JsonObject responseJson = new JsonObject().put("code", HTTP_OK).put("message", "Success"); httpServerRequest.response().write(responseJson.encode(), StandardCharsets.UTF_8.toString()) .write(DELIMITER_BUFFER); } }); }