List of usage examples for javax.json JsonReader readObject
JsonObject readObject();
From source file:com.oncore.calorders.rest.service.extension.OrderHistoryFacadeRESTExtension.java
/** * Creates an order, containing the ordered products and related services. * * @param orderjson The order, represented as a JSON string * @throws DataAccessException/*w w w . ja va2 s . c o m*/ */ @POST @Path("createOrder") @Consumes({ MediaType.APPLICATION_JSON }) public void createOrder(String orderjson) throws DataAccessException { try { JsonReader reader = Json.createReader(new StringReader(orderjson)); JsonObject orderObject = reader.readObject(); reader.close(); OrderHistory order = new OrderHistory(); order.setUpdateTs(new Date()); order.setUpdateUserId(orderObject.getString("updateUserId", null)); order.setCreateTs(new Date()); order.setCreateUserId(orderObject.getString("createUserId", null)); OrdStatusCd ordStatusCd = this.ordStatusCdFacadeREST.find(orderObject.getString("orderStatusCd", null)); if (ordStatusCd == null) { throw new DataAccessException(ErrorCode.DATAACCESSERROR.toString()); } else { order.setOrdStatusCd(ordStatusCd); } Party party = this.partyFacadeRESTExtension.find(Integer.valueOf(orderObject.getString("partyUid"))); if (party == null) { throw new DataAccessException(ErrorCode.DATAACCESSERROR.toString()); } else { order.setPtyUidFk(party); order.setDepUidFk( party.getGroupPartyAssocCollection().iterator().next().getGrpUidFk().getDepUidFk()); } order.setOrderProductAssocCollection(new ArrayList<OrderProductAssoc>()); JsonArray productList = orderObject.getJsonArray("products"); for (int i = 0; i < productList.size(); i++) { JsonObject productObject = productList.getJsonObject(i); OrderProductAssoc orderProductAssoc = new OrderProductAssoc(); Product product = this.productFacadeRESTExtension.find(productObject.getInt("prdUid")); orderProductAssoc.setPrdUidFk(product); orderProductAssoc.setOrdUidFk(order); orderProductAssoc.setUpdateTs(new Date()); orderProductAssoc.setUpdateUserId(productObject.getString("updateUserId", null)); orderProductAssoc.setCreateTs(new Date()); orderProductAssoc.setCreateUserId(productObject.getString("createUserId", null)); orderProductAssoc.setOpaQuantity(productObject.getInt("quantity")); orderProductAssoc.setOpaPrice(product.getPrdCntrUnitPrice() .multiply(BigDecimal.valueOf(productObject.getInt("quantity")))); order.getOrderProductAssocCollection().add(orderProductAssoc); } super.create(order); } catch (Exception ex) { Logger.error(LOG, FormatHelper.getStackTrace(ex)); throw new DataAccessException(ex, ErrorCode.DATAACCESSERROR); } }
From source file:de.tu_dortmund.ub.data.dswarm.Task.java
/** * get the resource id of the resource for the data model for the the prototype project * * @param dataModelID/*from w w w . j a va 2 s. c om*/ * @return * @throws Exception */ private String getProjectResourceID(String dataModelID) throws Exception { String resourceID = null; CloseableHttpClient httpclient = HttpClients.createDefault(); try { // Hole Mappings aus dem Projekt mit 'projectID' HttpGet httpGet = new HttpGet(config.getProperty("engine.dswarm.api") + "datamodels/" + dataModelID); CloseableHttpResponse httpResponse = httpclient.execute(httpGet); logger.info("[" + config.getProperty("service.name") + "] " + "request : " + httpGet.getRequestLine()); try { int statusCode = httpResponse.getStatusLine().getStatusCode(); HttpEntity httpEntity = httpResponse.getEntity(); switch (statusCode) { case 200: { StringWriter writer = new StringWriter(); IOUtils.copy(httpEntity.getContent(), writer, "UTF-8"); String responseJson = writer.toString(); logger.info("[" + config.getProperty("service.name") + "] responseJson : " + responseJson); JsonReader jsonReader = Json.createReader(IOUtils.toInputStream(responseJson, "UTF-8")); JsonObject jsonObject = jsonReader.readObject(); JsonArray resources = jsonObject.getJsonObject("configuration").getJsonArray("resources"); resourceID = resources.getJsonObject(0).getJsonString("uuid").getString(); logger.info("[" + config.getProperty("service.name") + "] resourceID : " + resourceID); break; } default: { logger.error("[" + config.getProperty("service.name") + "] " + statusCode + " : " + httpResponse.getStatusLine().getReasonPhrase()); } } EntityUtils.consume(httpEntity); } finally { httpResponse.close(); } } finally { httpclient.close(); } return resourceID; }
From source file:edu.harvard.iq.dataverse.api.imports.ImportServiceBean.java
public JsonObjectBuilder doImport(DataverseRequest dataverseRequest, Dataverse owner, String xmlToParse, String fileName, ImportType importType, PrintWriter cleanupLog) throws ImportException, IOException { String status = ""; Long createdId = null;/*w w w . j a va 2s. c om*/ DatasetDTO dsDTO = null; try { dsDTO = importDDIService.doImport(importType, xmlToParse); } catch (XMLStreamException e) { throw new ImportException("XMLStreamException" + e); } // convert DTO to Json, Gson gson = new GsonBuilder().setPrettyPrinting().create(); String json = gson.toJson(dsDTO); JsonReader jsonReader = Json.createReader(new StringReader(json)); JsonObject obj = jsonReader.readObject(); //and call parse Json to read it into a dataset try { JsonParser parser = new JsonParser(datasetfieldService, metadataBlockService, settingsService); parser.setLenient(!importType.equals(ImportType.NEW)); Dataset ds = parser.parseDataset(obj); // For ImportType.NEW, if the user supplies a global identifier, and it's not a protocol // we support, it will be rejected. if (importType.equals(ImportType.NEW)) { if (ds.getGlobalId() != null && !ds.getProtocol() .equals(settingsService.getValueForKey(SettingsServiceBean.Key.Protocol, ""))) { throw new ImportException( "Could not register id " + ds.getGlobalId() + ", protocol not supported"); } } ds.setOwner(owner); ds.getLatestVersion().setDatasetFields(ds.getLatestVersion().initDatasetFields()); // Check data against required contraints List<ConstraintViolation<DatasetField>> violations = ds.getVersions().get(0).validateRequired(); if (!violations.isEmpty()) { if (importType.equals(ImportType.MIGRATION) || importType.equals(ImportType.HARVEST)) { // For migration and harvest, add NA for missing required values for (ConstraintViolation<DatasetField> v : violations) { DatasetField f = v.getRootBean(); f.setSingleValue(DatasetField.NA_VALUE); } } else { // when importing a new dataset, the import will fail // if required values are missing. String errMsg = "Error importing data:"; for (ConstraintViolation<DatasetField> v : violations) { errMsg += " " + v.getMessage(); } throw new ImportException(errMsg); } } // Check data against validation constraints // If we are migrating and "scrub migration data" is true we attempt to fix invalid data // if the fix fails stop processing of this file by throwing exception Set<ConstraintViolation> invalidViolations = ds.getVersions().get(0).validate(); ValidatorFactory factory = Validation.buildDefaultValidatorFactory(); Validator validator = factory.getValidator(); if (!invalidViolations.isEmpty()) { for (ConstraintViolation<DatasetFieldValue> v : invalidViolations) { DatasetFieldValue f = v.getRootBean(); boolean fixed = false; boolean converted = false; if ((importType.equals(ImportType.MIGRATION) || importType.equals(ImportType.HARVEST)) && settingsService.isTrueForKey(SettingsServiceBean.Key.ScrubMigrationData, false)) { fixed = processMigrationValidationError(f, cleanupLog, fileName); converted = true; if (fixed) { Set<ConstraintViolation<DatasetFieldValue>> scrubbedViolations = validator.validate(f); if (!scrubbedViolations.isEmpty()) { fixed = false; } } } if (!fixed) { if (importType.equals(ImportType.HARVEST)) { String msg = "Data modified - File: " + fileName + "; Field: " + f.getDatasetField().getDatasetFieldType().getDisplayName() + "; " + "Invalid value: '" + f.getValue() + "'" + " Converted Value:'" + DatasetField.NA_VALUE + "'"; cleanupLog.println(msg); f.setValue(DatasetField.NA_VALUE); } else { String msg = " Validation error for "; if (converted) { msg += "converted "; } msg += "value: " + f.getValue() + ", " + f.getValidationMessage(); throw new ImportException(msg); } } } } Dataset existingDs = datasetService.findByGlobalId(ds.getGlobalId()); if (existingDs != null) { if (importType.equals(ImportType.HARVEST)) { // For harvested datasets, there should always only be one version. // We will replace the current version with the imported version. if (existingDs.getVersions().size() != 1) { throw new ImportException("Error importing Harvested Dataset, existing dataset has " + existingDs.getVersions().size() + " versions"); } engineSvc.submit(new DestroyDatasetCommand(existingDs, dataverseRequest)); Dataset managedDs = engineSvc .submit(new CreateDatasetCommand(ds, dataverseRequest, false, importType)); status = " updated dataset, id=" + managedDs.getId() + "."; } else { // If we are adding a new version to an existing dataset, // check that the version number isn't already in the dataset for (DatasetVersion dsv : existingDs.getVersions()) { if (dsv.getVersionNumber().equals(ds.getLatestVersion().getVersionNumber())) { throw new ImportException("VersionNumber " + ds.getLatestVersion().getVersionNumber() + " already exists in dataset " + existingDs.getGlobalId()); } } DatasetVersion dsv = engineSvc.submit( new CreateDatasetVersionCommand(dataverseRequest, existingDs, ds.getVersions().get(0))); status = " created datasetVersion, for dataset " + dsv.getDataset().getGlobalId(); createdId = dsv.getId(); } } else { Dataset managedDs = engineSvc .submit(new CreateDatasetCommand(ds, dataverseRequest, false, importType)); status = " created dataset, id=" + managedDs.getId() + "."; createdId = managedDs.getId(); } } catch (JsonParseException ex) { logger.log(Level.INFO, "Error parsing datasetVersion: {0}", ex.getMessage()); throw new ImportException("Error parsing datasetVersion: " + ex.getMessage(), ex); } catch (CommandException ex) { logger.log(Level.INFO, "Error excuting Create dataset command: {0}", ex.getMessage()); throw new ImportException("Error excuting dataverse command: " + ex.getMessage(), ex); } return Json.createObjectBuilder().add("message", status); }
From source file:com.floreantpos.ui.views.payment.SettleTicketDialog.java
public void submitMyKalaDiscount() { if (ticket.hasProperty(LOYALTY_ID)) { POSMessageDialog.showError(Application.getPosWindow(), Messages.getString("SettleTicketDialog.18")); //$NON-NLS-1$ return;//from ww w .j av a 2s .co m } try { String loyaltyid = JOptionPane.showInputDialog(Messages.getString("SettleTicketDialog.19")); //$NON-NLS-1$ if (StringUtils.isEmpty(loyaltyid)) { return; } ticket.addProperty(LOYALTY_ID, loyaltyid); String transactionURL = buildLoyaltyApiURL(ticket, loyaltyid); String string = IOUtils.toString(new URL(transactionURL).openStream()); JsonReader reader = Json.createReader(new StringReader(string)); JsonObject object = reader.readObject(); JsonArray jsonArray = (JsonArray) object.get("discounts"); //$NON-NLS-1$ for (int i = 0; i < jsonArray.size(); i++) { JsonObject jsonObject = (JsonObject) jsonArray.get(i); addCoupon(ticket, jsonObject); } updateModel(); OrderController.saveOrder(ticket); POSMessageDialog.showMessage(Application.getPosWindow(), Messages.getString("SettleTicketDialog.21")); //$NON-NLS-1$ paymentView.updateView(); } catch (Exception e) { POSMessageDialog.showError(Application.getPosWindow(), Messages.getString("SettleTicketDialog.22"), e); //$NON-NLS-1$ } }
From source file:csg.files.CSGFiles.java
private JsonObject loadJSONFile(String jsonFilePath) throws IOException { InputStream is = new FileInputStream(jsonFilePath); JsonReader jsonReader = Json.createReader(is); JsonObject json = jsonReader.readObject(); jsonReader.close();// w w w . j av a2s . c o m is.close(); return json; }
From source file:edu.harvard.iq.dataverse.api.imports.ImportServiceBean.java
@TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) public Dataset doImportHarvestedDataset(DataverseRequest dataverseRequest, HarvestingClient harvestingClient, String harvestIdentifier, String metadataFormat, File metadataFile, PrintWriter cleanupLog) throws ImportException, IOException { if (harvestingClient == null || harvestingClient.getDataverse() == null) { throw new ImportException( "importHarvestedDataset called wiht a null harvestingClient, or an invalid harvestingClient."); }//from w ww . java 2s. c o m Dataverse owner = harvestingClient.getDataverse(); Dataset importedDataset = null; DatasetDTO dsDTO = null; String json = null; // TODO: // At the moment (4.5; the first official "export/harvest release"), there // are 3 supported metadata formats: DDI, DC and native Dataverse metadata // encoded in JSON. The 2 XML formats are handled by custom implementations; // each of the 2 implementations uses its own parsing approach. (see the // ImportDDIServiceBean and ImportGenerciServiceBean for details). // TODO: Need to create a system of standardized import plugins - similar to Stephen // Kraffmiller's export modules; replace the logic below with clean // programmatic lookup of the import plugin needed. if ("ddi".equalsIgnoreCase(metadataFormat) || "oai_ddi".equals(metadataFormat) || metadataFormat.toLowerCase().matches("^oai_ddi.*")) { try { String xmlToParse = new String(Files.readAllBytes(metadataFile.toPath())); // TODO: // import type should be configurable - it should be possible to // select whether you want to harvest with or without files, // ImportType.HARVEST vs. ImportType.HARVEST_WITH_FILES logger.fine("importing DDI " + metadataFile.getAbsolutePath()); dsDTO = importDDIService.doImport(ImportType.HARVEST_WITH_FILES, xmlToParse); } catch (IOException | XMLStreamException | ImportException e) { throw new ImportException( "Failed to process DDI XML record: " + e.getClass() + " (" + e.getMessage() + ")"); } } else if ("dc".equalsIgnoreCase(metadataFormat) || "oai_dc".equals(metadataFormat)) { logger.fine("importing DC " + metadataFile.getAbsolutePath()); try { String xmlToParse = new String(Files.readAllBytes(metadataFile.toPath())); dsDTO = importGenericService.processOAIDCxml(xmlToParse); } catch (IOException | XMLStreamException e) { throw new ImportException( "Failed to process Dublin Core XML record: " + e.getClass() + " (" + e.getMessage() + ")"); } } else if ("dataverse_json".equals(metadataFormat)) { // This is Dataverse metadata already formatted in JSON. // Simply read it into a string, and pass to the final import further down: logger.fine( "Attempting to import custom dataverse metadata from file " + metadataFile.getAbsolutePath()); json = new String(Files.readAllBytes(metadataFile.toPath())); } else { throw new ImportException("Unsupported import metadata format: " + metadataFormat); } if (json == null) { if (dsDTO != null) { // convert DTO to Json, Gson gson = new GsonBuilder().setPrettyPrinting().create(); json = gson.toJson(dsDTO); logger.fine("JSON produced for the metadata harvested: " + json); } else { throw new ImportException( "Failed to transform XML metadata format " + metadataFormat + " into a DatasetDTO"); } } JsonReader jsonReader = Json.createReader(new StringReader(json)); JsonObject obj = jsonReader.readObject(); //and call parse Json to read it into a dataset try { JsonParser parser = new JsonParser(datasetfieldService, metadataBlockService, settingsService); parser.setLenient(true); Dataset ds = parser.parseDataset(obj); // For ImportType.NEW, if the metadata contains a global identifier, and it's not a protocol // we support, it should be rejected. // (TODO: ! - add some way of keeping track of supported protocols!) //if (ds.getGlobalId() != null && !ds.getProtocol().equals(settingsService.getValueForKey(SettingsServiceBean.Key.Protocol, ""))) { // throw new ImportException("Could not register id " + ds.getGlobalId() + ", protocol not supported"); //} ds.setOwner(owner); ds.getLatestVersion().setDatasetFields(ds.getLatestVersion().initDatasetFields()); // Check data against required contraints List<ConstraintViolation<DatasetField>> violations = ds.getVersions().get(0).validateRequired(); if (!violations.isEmpty()) { // For migration and harvest, add NA for missing required values for (ConstraintViolation<DatasetField> v : violations) { DatasetField f = v.getRootBean(); f.setSingleValue(DatasetField.NA_VALUE); } } // Check data against validation constraints // If we are migrating and "scrub migration data" is true we attempt to fix invalid data // if the fix fails stop processing of this file by throwing exception Set<ConstraintViolation> invalidViolations = ds.getVersions().get(0).validate(); ValidatorFactory factory = Validation.buildDefaultValidatorFactory(); Validator validator = factory.getValidator(); if (!invalidViolations.isEmpty()) { for (ConstraintViolation<DatasetFieldValue> v : invalidViolations) { DatasetFieldValue f = v.getRootBean(); boolean fixed = false; boolean converted = false; // TODO: Is this scrubbing something we want to continue doing? if (settingsService.isTrueForKey(SettingsServiceBean.Key.ScrubMigrationData, false)) { fixed = processMigrationValidationError(f, cleanupLog, metadataFile.getName()); converted = true; if (fixed) { Set<ConstraintViolation<DatasetFieldValue>> scrubbedViolations = validator.validate(f); if (!scrubbedViolations.isEmpty()) { fixed = false; } } } if (!fixed) { String msg = "Data modified - File: " + metadataFile.getName() + "; Field: " + f.getDatasetField().getDatasetFieldType().getDisplayName() + "; " + "Invalid value: '" + f.getValue() + "'" + " Converted Value:'" + DatasetField.NA_VALUE + "'"; cleanupLog.println(msg); f.setValue(DatasetField.NA_VALUE); } } } // A Global ID is required, in order for us to be able to harvest and import // this dataset: if (StringUtils.isEmpty(ds.getGlobalId())) { throw new ImportException("The harvested metadata record with the OAI server identifier " + harvestIdentifier + " does not contain a global unique identifier that we could recognize, skipping."); } ds.setHarvestedFrom(harvestingClient); ds.setHarvestIdentifier(harvestIdentifier); Dataset existingDs = datasetService.findByGlobalId(ds.getGlobalId()); if (existingDs != null) { // If this dataset already exists IN ANOTHER DATAVERSE // we are just going to skip it! if (existingDs.getOwner() != null && !owner.getId().equals(existingDs.getOwner().getId())) { throw new ImportException("The dataset with the global id " + ds.getGlobalId() + " already exists, in the dataverse " + existingDs.getOwner().getAlias() + ", skipping."); } // And if we already have a dataset with this same id, in this same // dataverse, but it is LOCAL dataset (can happen!), we're going to // skip it also: if (!existingDs.isHarvested()) { throw new ImportException("A LOCAL dataset with the global id " + ds.getGlobalId() + " already exists in this dataverse; skipping."); } // For harvested datasets, there should always only be one version. // We will replace the current version with the imported version. if (existingDs.getVersions().size() != 1) { throw new ImportException("Error importing Harvested Dataset, existing dataset has " + existingDs.getVersions().size() + " versions"); } // Purge all the SOLR documents associated with this client from the // index server: indexService.deleteHarvestedDocuments(existingDs); // files from harvested datasets are removed unceremoniously, // directly in the database. no need to bother calling the // DeleteFileCommand on them. for (DataFile harvestedFile : existingDs.getFiles()) { DataFile merged = em.merge(harvestedFile); em.remove(merged); harvestedFile = null; } // TODO: // Verify what happens with the indexed files in SOLR? // are they going to be overwritten by the reindexing of the dataset? existingDs.setFiles(null); Dataset merged = em.merge(existingDs); engineSvc.submit(new DestroyDatasetCommand(merged, dataverseRequest)); importedDataset = engineSvc .submit(new CreateDatasetCommand(ds, dataverseRequest, false, ImportType.HARVEST)); } else { importedDataset = engineSvc .submit(new CreateDatasetCommand(ds, dataverseRequest, false, ImportType.HARVEST)); } } catch (JsonParseException | ImportException | CommandException ex) { logger.fine("Failed to import harvested dataset: " + ex.getClass() + ": " + ex.getMessage()); FileOutputStream savedJsonFileStream = new FileOutputStream( new File(metadataFile.getAbsolutePath() + ".json")); byte[] jsonBytes = json.getBytes(); int i = 0; while (i < jsonBytes.length) { int chunkSize = i + 8192 <= jsonBytes.length ? 8192 : jsonBytes.length - i; savedJsonFileStream.write(jsonBytes, i, chunkSize); i += chunkSize; savedJsonFileStream.flush(); } savedJsonFileStream.close(); logger.info("JSON produced saved in " + metadataFile.getAbsolutePath() + ".json"); throw new ImportException( "Failed to import harvested dataset: " + ex.getClass() + " (" + ex.getMessage() + ")", ex); } return importedDataset; }
From source file:de.tu_dortmund.ub.data.dswarm.Task.java
@Override public String call() { // init logger PropertyConfigurator.configure(config.getProperty("service.log4j-conf")); logger.info("[" + config.getProperty("service.name") + "] " + "Starting 'Task' ..."); // init IDs of the prototype project String dataModelID = config.getProperty("prototype.dataModelID"); String projectID = config.getProperty("prototype.projectID"); String outputDataModelID = config.getProperty("prototype.outputDataModelID"); // init process values String inputResourceID = null; String message = null;// w w w. jav a 2s . c o m try { // get the resource id of the current data model >> updateResourceID replaces resourceID String updateResourceID = null; try { updateResourceID = getProjectResourceID(dataModelID); } catch (Exception e1) { e1.printStackTrace(); } logger.info("[" + config.getProperty("service.name") + "] updateResourceID = " + updateResourceID); // upload resource and update a InputDataModel String inputResourceJson = uploadFileAndUpdateResource(updateResourceID, resource, "resource for project '" + resource, config.getProperty("project.name") + "' - case " + cnt); JsonReader jsonReader = Json.createReader(IOUtils.toInputStream(inputResourceJson, "UTF-8")); inputResourceID = jsonReader.readObject().getString("uuid"); logger.info("[" + config.getProperty("service.name") + "] inputResourceID = " + inputResourceID); if (updateResourceID != null) { // update the datamodel (will use it's (update) resource) updateDataModel(dataModelID); // configuration and processing of the task String jsonResponse = executeTask(dataModelID, projectID, outputDataModelID); if (jsonResponse != null) { if (Boolean.parseBoolean(config.getProperty("results.persistInFolder"))) { if (Boolean.parseBoolean(config.getProperty("results.writeDMPJson"))) { // save DMP results in files FileUtils.writeStringToFile(new File(config.getProperty("results.folder") + File.separatorChar + dataModelID + "." + cnt + ".json"), jsonResponse); } // build rdf graph ValueFactory factory = ValueFactoryImpl.getInstance(); Graph graph = new LinkedHashModel(); URI graphUri = factory.createURI(config.getProperty("results.rdf.graph")); URI subject = null; URI predicate = null; URI object = null; Literal literal = null; Statement statement = null; JsonReader dmpJsonResult = Json.createReader(IOUtils.toInputStream(jsonResponse, "UTF-8")); JsonArray records = dmpJsonResult.readArray(); for (JsonObject record : records.getValuesAs(JsonObject.class)) { subject = factory .createURI(record.getJsonString("__record_id").toString().replaceAll("\"", "")); for (JsonObject triple : record.getJsonArray("__record_data") .getValuesAs(JsonObject.class)) { for (String key : triple.keySet()) { if (key.endsWith("rdf-syntax-ns#type")) { predicate = RDF.TYPE; object = factory.createURI( triple.getJsonString(key).toString().replaceAll("\"", "")); statement = factory.createStatement(subject, predicate, object, graphUri); graph.add(statement); } else { predicate = factory.createURI(key); switch (triple.get(key).getValueType().toString()) { case "STRING": { try { object = factory.createURI( triple.getJsonString(key).toString().replaceAll("\"", "")); statement = factory.createStatement(subject, predicate, object, graphUri); graph.add(statement); } catch (Exception e) { literal = factory.createLiteral( triple.getJsonString(key).toString().replaceAll("\"", "")); statement = factory.createStatement(subject, predicate, literal, graphUri); graph.add(statement); } break; } case "ARRAY": { for (JsonString value : triple.getJsonArray(key) .getValuesAs(JsonString.class)) { try { object = factory .createURI(value.toString().replaceAll("\"", "")); statement = factory.createStatement(subject, predicate, object, graphUri); graph.add(statement); } catch (Exception e) { literal = factory .createLiteral(value.toString().replaceAll("\"", "")); statement = factory.createStatement(subject, predicate, literal, graphUri); graph.add(statement); } } break; } default: { logger.info("Unhandled ValueType: " + triple.get(key).getValueType()); } } } } } } if (graph.size() > 0) { // save rdf data as 'results.rdf.format' in 'results.folder' RDFFormat format = null; switch (config.getProperty("results.rdf.format")) { case "xml": { format = RDFFormat.RDFXML; break; } case "nquads": { format = RDFFormat.NQUADS; break; } case "jsonld": { format = RDFFormat.JSONLD; break; } case "ttl": { format = RDFFormat.TURTLE; break; } default: { format = RDFFormat.RDFXML; } } try { FileOutputStream out = new FileOutputStream( config.getProperty("results.folder") + File.separatorChar + dataModelID + "." + cnt + ".rdf." + config.getProperty("results.rdf.format")); RDFWriter writer = Rio.createWriter(format, out); writer.startRDF(); for (Statement st : graph) { writer.handleStatement(st); } writer.endRDF(); out.close(); } catch (RDFHandlerException | IOException e) { e.printStackTrace(); } message = "'" + resource + "' transformed. results in '" + config.getProperty("results.folder") + File.separatorChar + dataModelID + "." + cnt + ".rdf." + config.getProperty("results.rdf.format") + "'"; } else { message = "'" + resource + "' transformed but result is empty."; } } } else { message = "'" + resource + "' not transformed: error in task execution."; } } } catch (Exception e) { logger.error("[" + config.getProperty("service.name") + "] Processing resource '" + resource + "' failed with a " + e.getClass().getSimpleName()); e.printStackTrace(); } return message; }
From source file:de.tu_dortmund.ub.data.dswarm.Task.java
/** * configuration and processing of the task * * @param inputDataModelID//from ww w .jav a 2 s . co m * @param projectID * @param outputDataModelID * @return */ private String executeTask(String inputDataModelID, String projectID, String outputDataModelID) throws Exception { String jsonResponse = null; CloseableHttpClient httpclient = HttpClients.createDefault(); try { // Hole Mappings aus dem Projekt mit 'projectID' HttpGet httpGet = new HttpGet(config.getProperty("engine.dswarm.api") + "projects/" + projectID); CloseableHttpResponse httpResponse = httpclient.execute(httpGet); logger.info("[" + config.getProperty("service.name") + "] " + "request : " + httpGet.getRequestLine()); String mappings = ""; try { int statusCode = httpResponse.getStatusLine().getStatusCode(); HttpEntity httpEntity = httpResponse.getEntity(); switch (statusCode) { case 200: { StringWriter writer = new StringWriter(); IOUtils.copy(httpEntity.getContent(), writer, "UTF-8"); String responseJson = writer.toString(); logger.info("[" + config.getProperty("service.name") + "] responseJson : " + responseJson); JsonReader jsonReader = Json.createReader(IOUtils.toInputStream(responseJson, "UTF-8")); JsonObject jsonObject = jsonReader.readObject(); mappings = jsonObject.getJsonArray("mappings").toString(); logger.info("[" + config.getProperty("service.name") + "] mappings : " + mappings); break; } default: { logger.error("[" + config.getProperty("service.name") + "] " + statusCode + " : " + httpResponse.getStatusLine().getReasonPhrase()); } } EntityUtils.consume(httpEntity); } finally { httpResponse.close(); } // Hole InputDataModel String inputDataModel = ""; httpGet = new HttpGet(config.getProperty("engine.dswarm.api") + "datamodels/" + inputDataModelID); httpResponse = httpclient.execute(httpGet); logger.info("[" + config.getProperty("service.name") + "] " + "request : " + httpGet.getRequestLine()); try { int statusCode = httpResponse.getStatusLine().getStatusCode(); HttpEntity httpEntity = httpResponse.getEntity(); switch (statusCode) { case 200: { StringWriter writer = new StringWriter(); IOUtils.copy(httpEntity.getContent(), writer, "UTF-8"); inputDataModel = writer.toString(); logger.info("[" + config.getProperty("service.name") + "] inputDataModel : " + inputDataModel); JsonReader jsonReader = Json.createReader(IOUtils.toInputStream(inputDataModel, "UTF-8")); JsonObject jsonObject = jsonReader.readObject(); String inputResourceID = jsonObject.getJsonObject("data_resource").getString("uuid"); logger.info("[" + config.getProperty("service.name") + "] mappings : " + mappings); break; } default: { logger.error("[" + config.getProperty("service.name") + "] " + statusCode + " : " + httpResponse.getStatusLine().getReasonPhrase()); } } EntityUtils.consume(httpEntity); } finally { httpResponse.close(); } // Hole OutputDataModel String outputDataModel = ""; httpGet = new HttpGet(config.getProperty("engine.dswarm.api") + "datamodels/" + outputDataModelID); httpResponse = httpclient.execute(httpGet); logger.info("[" + config.getProperty("service.name") + "] " + "request : " + httpGet.getRequestLine()); try { int statusCode = httpResponse.getStatusLine().getStatusCode(); HttpEntity httpEntity = httpResponse.getEntity(); switch (statusCode) { case 200: { StringWriter writer = new StringWriter(); IOUtils.copy(httpEntity.getContent(), writer, "UTF-8"); outputDataModel = writer.toString(); logger.info( "[" + config.getProperty("service.name") + "] outputDataModel : " + outputDataModel); break; } default: { logger.error("[" + config.getProperty("service.name") + "] " + statusCode + " : " + httpResponse.getStatusLine().getReasonPhrase()); } } EntityUtils.consume(httpEntity); } finally { httpResponse.close(); } // erzeuge Task-JSON String task = "{"; task += "\"name\":\"" + "Task Batch-Prozess 'CrossRef'" + "\","; task += "\"description\":\"" + "Task Batch-Prozess 'CrossRef' zum InputDataModel '" + inputDataModelID + "'\","; task += "\"job\": { " + "\"mappings\": " + mappings + "," + "\"uuid\": \"" + UUID.randomUUID() + "\"" + " },"; task += "\"input_data_model\":" + inputDataModel + ","; task += "\"output_data_model\":" + outputDataModel; task += "}"; logger.info("[" + config.getProperty("service.name") + "] task : " + task); // POST /dmp/tasks/ HttpPost httpPost = new HttpPost(config.getProperty("engine.dswarm.api") + "tasks?persist=" + config.getProperty("results.persistInDMP")); StringEntity stringEntity = new StringEntity(task, ContentType.create("application/json", Consts.UTF_8)); httpPost.setEntity(stringEntity); logger.info("[" + config.getProperty("service.name") + "] " + "request : " + httpPost.getRequestLine()); httpResponse = httpclient.execute(httpPost); try { int statusCode = httpResponse.getStatusLine().getStatusCode(); HttpEntity httpEntity = httpResponse.getEntity(); switch (statusCode) { case 200: { logger.info("[" + config.getProperty("service.name") + "] " + statusCode + " : " + httpResponse.getStatusLine().getReasonPhrase()); StringWriter writer = new StringWriter(); IOUtils.copy(httpEntity.getContent(), writer, "UTF-8"); jsonResponse = writer.toString(); logger.info("[" + config.getProperty("service.name") + "] jsonResponse : " + jsonResponse); break; } default: { logger.error("[" + config.getProperty("service.name") + "] " + statusCode + " : " + httpResponse.getStatusLine().getReasonPhrase()); } } EntityUtils.consume(httpEntity); } finally { httpResponse.close(); } } finally { httpclient.close(); } return jsonResponse; }
From source file:io.bibleget.BibleGetDB.java
public boolean renewMetaData() { if (instance.connect()) { try {/*from w w w. ja v a 2 s . co m*/ DatabaseMetaData dbMeta; dbMeta = instance.conn.getMetaData(); try (ResultSet rs3 = dbMeta.getTables(null, null, "METADATA", null)) { if (rs3.next()) { //System.out.println("Table METADATA exists..."); try (Statement stmt = instance.conn.createStatement()) { HTTPCaller myHTTPCaller = new HTTPCaller(); String myResponse; myResponse = myHTTPCaller.getMetaData("biblebooks"); if (myResponse != null) { JsonReader jsonReader = Json.createReader(new StringReader(myResponse)); JsonObject json = jsonReader.readObject(); JsonArray arrayJson = json.getJsonArray("results"); if (arrayJson != null) { ListIterator pIterator = arrayJson.listIterator(); while (pIterator.hasNext()) { try (Statement stmt1 = instance.conn.createStatement()) { int index = pIterator.nextIndex(); JsonArray currentJson = (JsonArray) pIterator.next(); String biblebooks_str = currentJson.toString(); //.replaceAll("\"", "\\\\\""); //System.out.println("BibleGetDB line 267: BIBLEBOOKS"+Integer.toString(index)+"='"+biblebooks_str+"'"); String stmt_str = "UPDATE METADATA SET BIBLEBOOKS" + Integer.toString(index) + "='" + biblebooks_str + "' WHERE ID=0"; //System.out.println("executing update: "+stmt_str); int update = stmt1.executeUpdate(stmt_str); //System.out.println("executeUpdate resulted in: "+Integer.toString(update)); stmt1.close(); } } } arrayJson = json.getJsonArray("languages"); if (arrayJson != null) { try (Statement stmt2 = instance.conn.createStatement()) { String languages_str = arrayJson.toString(); //.replaceAll("\"", "\\\\\""); String stmt_str = "UPDATE METADATA SET LANGUAGES='" + languages_str + "' WHERE ID=0"; int update = stmt2.executeUpdate(stmt_str); stmt2.close(); } } } myResponse = myHTTPCaller.getMetaData("bibleversions"); if (myResponse != null) { JsonReader jsonReader = Json.createReader(new StringReader(myResponse)); JsonObject json = jsonReader.readObject(); JsonObject objJson = json.getJsonObject("validversions_fullname"); if (objJson != null) { String bibleversions_str = objJson.toString(); //.replaceAll("\"", "\\\\\""); try (Statement stmt3 = instance.conn.createStatement()) { String stmt_str = "UPDATE METADATA SET VERSIONS='" + bibleversions_str + "' WHERE ID=0"; int update = stmt3.executeUpdate(stmt_str); stmt3.close(); } Set<String> versionsabbrev = objJson.keySet(); if (!versionsabbrev.isEmpty()) { String versionsabbrev_str = ""; for (String s : versionsabbrev) { versionsabbrev_str += ("".equals(versionsabbrev_str) ? "" : ",") + s; } myResponse = myHTTPCaller .getMetaData("versionindex&versions=" + versionsabbrev_str); if (myResponse != null) { jsonReader = Json.createReader(new StringReader(myResponse)); json = jsonReader.readObject(); objJson = json.getJsonObject("indexes"); if (objJson != null) { for (String name : objJson.keySet()) { JsonObjectBuilder tempBld = Json.createObjectBuilder(); JsonObject book_num = objJson.getJsonObject(name); tempBld.add("book_num", book_num.getJsonArray("book_num")); tempBld.add("chapter_limit", book_num.getJsonArray("chapter_limit")); tempBld.add("verse_limit", book_num.getJsonArray("verse_limit")); JsonObject temp = tempBld.build(); String versionindex_str = temp.toString(); //.replaceAll("\"", "\\\\\""); //add new column to METADATA table name+"IDX" VARCHAR(5000) //update METADATA table SET name+"IDX" = versionindex_str try (ResultSet rs1 = dbMeta.getColumns(null, null, "METADATA", name + "IDX")) { boolean updateFlag = false; if (rs1.next()) { //column already exists updateFlag = true; } else { try (Statement stmt4 = instance.conn .createStatement()) { String sql = "ALTER TABLE METADATA ADD COLUMN " + name + "IDX VARCHAR(5000)"; boolean colAdded = stmt4.execute(sql); if (colAdded == false) { int count = stmt4.getUpdateCount(); if (count == -1) { //System.out.println("The result is a ResultSet object or there are no more results."); } else if (count == 0) { //0 rows affected updateFlag = true; } } stmt4.close(); } } if (updateFlag) { try (Statement stmt5 = instance.conn .createStatement()) { String sql1 = "UPDATE METADATA SET " + name + "IDX='" + versionindex_str + "' WHERE ID=0"; boolean rowsUpdated = stmt5.execute(sql1); stmt5.close(); } } } } } } } } } stmt.close(); } } rs3.close(); } instance.disconnect(); } catch (SQLException ex) { Logger.getLogger(BibleGetDB.class.getName()).log(Level.SEVERE, null, ex); return false; } return true; } return false; }
From source file:com.wikia.webdriver.pageobjectsfactory.pageobject.WikiBasePageObject.java
/** * this method should be called after clicktracking test, in order to verify if expected events * were tracked/*from w w w. j ava 2 s. c o m*/ */ public void compareTrackedEventsTo(List<JsonObject> expectedEventsList) { jsActions.execute(ClickTrackingScriptsProvider.EVENTS_CAPTURE_INSTALLATION); List<JsonObject> trackedEventsArrayList = new ArrayList<JsonObject>(); List<JsonObject> trackedEventsList; JavascriptExecutor js = (JavascriptExecutor) driver; // prepare list of tracked events Object event = js.executeScript("return selenium_popEvent()"); StringReader reader = new StringReader(event.toString()); JsonReader jsonReader = Json.createReader(reader); while (!(event == null)) { reader = new StringReader(event.toString()); jsonReader = Json.createReader(reader); trackedEventsArrayList.add(jsonReader.readObject()); // take next tracked event event = js.executeScript("return selenium_popEvent()"); } trackedEventsList = trackedEventsArrayList; ClickTrackingSupport support = new ClickTrackingSupport(); support.compare(expectedEventsList, trackedEventsList); }