List of usage examples for org.apache.commons.io IOUtils toInputStream
public static InputStream toInputStream(String input, String encoding) throws IOException
From source file:com.cognifide.aet.rest.XUnitServlet.java
private InputStream generateXML(Testsuites xUnitModel) throws JAXBException, IOException { Writer writer = new StringWriter(); writer.write(XML_HEADER);/*from ww w . ja v a 2s. co m*/ prepareJaxbMarshaller().marshal(xUnitModel, writer); return IOUtils.toInputStream(writer.toString(), Charsets.UTF_8); }
From source file:ch.entwine.weblounge.common.impl.content.page.LazyPageImpl.java
/** * Loads the complete page.// ww w . j ava 2s. co m */ protected void loadPage() { try { // Get a hold of the page reader PageReader reader = (readerRef != null) ? readerRef.get() : null; if (reader == null) { reader = new PageReader(); // No need to keep the reference, since we're done after this } // Load the page page = reader.read(IOUtils.toInputStream(pageXml, "utf-8"), uri.getSite()); isHeaderLoaded = true; isBodyLoaded = true; cleanupAfterLoading(); } catch (Throwable e) { logger.error("Failed to lazy-load body of {}", uri); throw new IllegalStateException("Failed to lazy-load body of " + uri, e); } }
From source file:cat.calidos.morfeu.model.injection.URIToParsedModule.java
@Produces @Named("FetchedTransformedContent") public static InputStream fetchedTransformedContent(@Named("FetchableContentURI") URI uri, @Named("FetchedRawContent") InputStream fetchedRawContent, YAMLMapper mapper, Producer<Model> model) throws FetchingException, TransformException { // get the yaml and apply the transformation from yaml to xml try {/*from ww w . j a v a 2 s . com*/ log.trace("Converting yaml to xml '{}'", uri); JsonNode yaml = mapper.readTree(fetchedRawContent); Map<String, Object> values = new HashMap<String, Object>(2); List<CellModel> rootCellModels = model.get().get().getRootCellModels(); values.put("cellmodels", rootCellModels); values.put("yaml", yaml); values.put("case", "yaml-to-xml"); //rootCellModels.stream().map(cm -> cm.getName()).forEach(name -> log.trace("CellModel:{}",name)); String transformedContent = DaggerViewComponent.builder() .withTemplate("templates/transform/content-yaml-to-xml.twig").withValue(values).build() .render(); log.trace("Transformed yaml to xml '{}'", transformedContent); return IOUtils.toInputStream(transformedContent, Config.DEFAULT_CHARSET); } catch (IOException e) { log.error("Could not fetch yaml '{}' ({}", uri, e); throw new FetchingException("Problem when fetching yaml '" + uri + "'", e); } catch (InterruptedException | ExecutionException e) { log.error("Could not transform yaml to xml '{}' ({}", uri, e); throw new TransformException("Problem when transforming yaml to xml '" + uri + "'", e); } }
From source file:de.tu_dortmund.ub.data.dswarm.Task.java
@Override public String call() { // init logger PropertyConfigurator.configure(config.getProperty("service.log4j-conf")); logger.info("[" + config.getProperty("service.name") + "] " + "Starting 'Task' ..."); // init IDs of the prototype project String dataModelID = config.getProperty("prototype.dataModelID"); String projectID = config.getProperty("prototype.projectID"); String outputDataModelID = config.getProperty("prototype.outputDataModelID"); // init process values String inputResourceID = null; String message = null;/* ww w. ja va 2 s .c o m*/ try { // get the resource id of the current data model >> updateResourceID replaces resourceID String updateResourceID = null; try { updateResourceID = getProjectResourceID(dataModelID); } catch (Exception e1) { e1.printStackTrace(); } logger.info("[" + config.getProperty("service.name") + "] updateResourceID = " + updateResourceID); // upload resource and update a InputDataModel String inputResourceJson = uploadFileAndUpdateResource(updateResourceID, resource, "resource for project '" + resource, config.getProperty("project.name") + "' - case " + cnt); JsonReader jsonReader = Json.createReader(IOUtils.toInputStream(inputResourceJson, "UTF-8")); inputResourceID = jsonReader.readObject().getString("uuid"); logger.info("[" + config.getProperty("service.name") + "] inputResourceID = " + inputResourceID); if (updateResourceID != null) { // update the datamodel (will use it's (update) resource) updateDataModel(dataModelID); // configuration and processing of the task String jsonResponse = executeTask(dataModelID, projectID, outputDataModelID); if (jsonResponse != null) { if (Boolean.parseBoolean(config.getProperty("results.persistInFolder"))) { if (Boolean.parseBoolean(config.getProperty("results.writeDMPJson"))) { // save DMP results in files FileUtils.writeStringToFile(new File(config.getProperty("results.folder") + File.separatorChar + dataModelID + "." + cnt + ".json"), jsonResponse); } // build rdf graph ValueFactory factory = ValueFactoryImpl.getInstance(); Graph graph = new LinkedHashModel(); URI graphUri = factory.createURI(config.getProperty("results.rdf.graph")); URI subject = null; URI predicate = null; URI object = null; Literal literal = null; Statement statement = null; JsonReader dmpJsonResult = Json.createReader(IOUtils.toInputStream(jsonResponse, "UTF-8")); JsonArray records = dmpJsonResult.readArray(); for (JsonObject record : records.getValuesAs(JsonObject.class)) { subject = factory .createURI(record.getJsonString("__record_id").toString().replaceAll("\"", "")); for (JsonObject triple : record.getJsonArray("__record_data") .getValuesAs(JsonObject.class)) { for (String key : triple.keySet()) { if (key.endsWith("rdf-syntax-ns#type")) { predicate = RDF.TYPE; object = factory.createURI( triple.getJsonString(key).toString().replaceAll("\"", "")); statement = factory.createStatement(subject, predicate, object, graphUri); graph.add(statement); } else { predicate = factory.createURI(key); switch (triple.get(key).getValueType().toString()) { case "STRING": { try { object = factory.createURI( triple.getJsonString(key).toString().replaceAll("\"", "")); statement = factory.createStatement(subject, predicate, object, graphUri); graph.add(statement); } catch (Exception e) { literal = factory.createLiteral( triple.getJsonString(key).toString().replaceAll("\"", "")); statement = factory.createStatement(subject, predicate, literal, graphUri); graph.add(statement); } break; } case "ARRAY": { for (JsonString value : triple.getJsonArray(key) .getValuesAs(JsonString.class)) { try { object = factory .createURI(value.toString().replaceAll("\"", "")); statement = factory.createStatement(subject, predicate, object, graphUri); graph.add(statement); } catch (Exception e) { literal = factory .createLiteral(value.toString().replaceAll("\"", "")); statement = factory.createStatement(subject, predicate, literal, graphUri); graph.add(statement); } } break; } default: { logger.info("Unhandled ValueType: " + triple.get(key).getValueType()); } } } } } } if (graph.size() > 0) { // save rdf data as 'results.rdf.format' in 'results.folder' RDFFormat format = null; switch (config.getProperty("results.rdf.format")) { case "xml": { format = RDFFormat.RDFXML; break; } case "nquads": { format = RDFFormat.NQUADS; break; } case "jsonld": { format = RDFFormat.JSONLD; break; } case "ttl": { format = RDFFormat.TURTLE; break; } default: { format = RDFFormat.RDFXML; } } try { FileOutputStream out = new FileOutputStream( config.getProperty("results.folder") + File.separatorChar + dataModelID + "." + cnt + ".rdf." + config.getProperty("results.rdf.format")); RDFWriter writer = Rio.createWriter(format, out); writer.startRDF(); for (Statement st : graph) { writer.handleStatement(st); } writer.endRDF(); out.close(); } catch (RDFHandlerException | IOException e) { e.printStackTrace(); } message = "'" + resource + "' transformed. results in '" + config.getProperty("results.folder") + File.separatorChar + dataModelID + "." + cnt + ".rdf." + config.getProperty("results.rdf.format") + "'"; } else { message = "'" + resource + "' transformed but result is empty."; } } } else { message = "'" + resource + "' not transformed: error in task execution."; } } } catch (Exception e) { logger.error("[" + config.getProperty("service.name") + "] Processing resource '" + resource + "' failed with a " + e.getClass().getSimpleName()); e.printStackTrace(); } return message; }
From source file:de.tu_dortmund.ub.data.dswarm.Ingest.java
public String call() { final String serviceName = config.getProperty(TPUStatics.SERVICE_NAME_IDENTIFIER); final String engineDswarmAPI = config.getProperty(TPUStatics.ENGINE_DSWARM_API_IDENTIFIER); LOG.info(String.format("[%s] Starting 'Ingest (Task)' no. '%d' ...", serviceName, cnt)); final String dataModelID = this.dataModelID; final String updateResourceID = resourceID; try {//w ww. j a v a 2 s. c om // build a InputDataModel for the resource final String name = String.format("resource for project '%s'", resource); final String description = String.format("'%s' - case %d", projectName, cnt); final String inputResourceJson = uploadFileAndUpdateResource(updateResourceID, resource, name, description, serviceName, engineDswarmAPI); final JsonReader jsonReader; if (inputResourceJson == null) { LOG.error("something went wrong at resource update"); return null; } jsonReader = Json.createReader(IOUtils.toInputStream(inputResourceJson, APIStatics.UTF_8)); final String inputResourceID = jsonReader.readObject().getString(DswarmBackendStatics.UUID_IDENTIFIER); LOG.info(String.format("[%s] inputResourceID = %s", serviceName, inputResourceID)); if (inputResourceID != null) { // update the datamodel (will use it's (update) resource) updateDataModelContent(dataModelID, serviceName, engineDswarmAPI); // we don't need to transform after each ingest of a slice of records, // so transform and export will be done separately LOG.info(String.format("[%s] (Note: Only ingest, but no transformation or export done.)", serviceName)); } // no need to clean up resources or datamodels anymore LOG.info(String.format("[%s] Finished 'Ingest (Task)' no. '%d' ...", serviceName, cnt)); return null; } catch (final Exception e) { final String message = String.format("[%s] Processing resource '%s' failed with a %s", serviceName, resource, e.getClass().getSimpleName()); LOG.error(message, e); throw new RuntimeException(message, e); } }
From source file:ch.cyberduck.core.editor.AbstractEditorTest.java
@Test public void testOpen() throws Exception { final AtomicBoolean t = new AtomicBoolean(); final NullSession session = new NullSession(new Host(new TestProtocol())) { @Override//from w w w . ja v a2s . c om @SuppressWarnings("unchecked") public <T> T _getFeature(final Class<T> type) { if (type.equals(Read.class)) { return (T) new Read() { @Override public InputStream read(final Path file, final TransferStatus status, final ConnectionCallback callback) throws BackgroundException { t.set(true); return IOUtils.toInputStream("content", Charset.defaultCharset()); } @Override public boolean offset(final Path file) { assertEquals(new Path("/f", EnumSet.of(Path.Type.file)), file); return false; } }; } return super._getFeature(type); } }; final AtomicBoolean e = new AtomicBoolean(); final Path file = new Path("/f", EnumSet.of(Path.Type.file)); file.attributes().setSize("content".getBytes().length); final AbstractEditor editor = new AbstractEditor(new Application("com.editor"), new StatelessSessionPool(new TestLoginConnectionService(), session, PathCache.empty(), new DisabledTranscriptListener(), new DefaultVaultRegistry(new DisabledPasswordCallback())), file, new DisabledProgressListener()) { @Override protected void edit(final ApplicationQuitCallback quit, final FileWatcherListener listener) throws IOException { e.set(true); } @Override protected void watch(final Local local, final FileWatcherListener listener) throws IOException { // } }; editor.open(new DisabledApplicationQuitCallback(), new DisabledTransferErrorCallback(), new DisabledFileWatcherListener()).run(session); assertTrue(t.get()); assertNotNull(editor.getLocal()); assertTrue(e.get()); assertTrue(editor.getLocal().exists()); }
From source file:com.telefonica.euro_iaas.sdc.installator.InstallatorPuppetTest.java
@Before public void setup() throws ClientProtocolException, IOException, OpenStackException { Product product = new Product("testProduct", "description"); Metadata metadata = new Metadata("installator", "puppet"); List<Metadata> metadatas = new ArrayList<Metadata>(); metadatas.add(metadata);// ww w .jav a2 s . c o m product.setMetadatas(metadatas); host = new VM("fqn", "ip", "testName", "domain"); os = new OS("os1", "1", "os1 description", "v1"); host.setOsType(os.getOsType()); productRelease = new ProductRelease("version", "releaseNotes", product, Arrays.asList(os), null); productInstance = new ProductInstance(productRelease, Status.INSTALLING, host, "vdc"); attributeList = new ArrayList<Attribute>(); attribute1 = new Attribute("user", "pepito"); attributeList.add(attribute1); // client = (HttpClient) new HTTPClientMock(); client = mock(HttpClient.class); response = mock(HttpResponse.class); entity = mock(HttpEntity.class); statusLine = mock(StatusLine.class); openStackRegion = mock(OpenStackRegion.class); when(client.execute((HttpUriRequest) Mockito.anyObject())).thenReturn(response); when(response.getEntity()).thenReturn(entity); String source = ""; InputStream in = IOUtils.toInputStream(source, "UTF-8"); when(entity.getContent()).thenReturn(in); when(response.getStatusLine()).thenReturn(statusLine); propertiesProvider = mock(SystemPropertiesProvider.class); when(propertiesProvider.getProperty("PUPPET_MASTER_URL")) .thenReturn("http://130.206.82.190:8080/puppetwrapper/"); puppetInstallator = new InstallatorPuppetImpl(); puppetInstallator.setClient(client); puppetInstallator.setOpenStackRegion(openStackRegion); when(openStackRegion.getPuppetWrapperEndPoint("token")).thenReturn("http://"); }
From source file:com.twinsoft.convertigo.eclipse.editors.jscript.JscriptStepEditor.java
public void init(IEditorSite site, IEditorInput input) throws PartInitException { // Get from the input the necessary objects as the temp IFile to create to hold the jscript code itself. file = ((FileEditorInput) input).getFile(); step = (SimpleStep) ((JscriptStepEditorInput) input).getStep(); try {/*from w w w.ja v a 2s . c o m*/ file.setCharset("UTF-8", null); } catch (CoreException e1) { ConvertigoPlugin.logDebug("Failed to set UTF-8 charset for editor file: " + e1); } try { // Create a temp file to hold step jscript code InputStream sbisHandlersStream = IOUtils.toInputStream(step.getExpression(), "UTF-8"); // Overrides temp file with step jscript code if (file.exists()) { try { file.setContents(sbisHandlersStream, true, false, null); } catch (CoreException e) { ConvertigoPlugin.logException(e, "Error while editing step jscript code"); } } // Create a temp file to hold step jscript code else { try { file.create(sbisHandlersStream, true, null); } catch (CoreException e) { ConvertigoPlugin.logException(e, "Error while editing the step jscript code"); } } setSite(site); setInput(input); eSite = site; eInput = input; String[] splits = file.getName().split(" "); setPartName(splits[splits.length - 1]); } catch (Exception e) { throw new PartInitException("Unable to create JS editor", e); } }
From source file:com.elsevier.spark_xml_utils.xslt.XSLTProcessor.java
/** * Initialization to improve performance for repetitive invocations of transformations. * /* w w w. j a v a 2 s . c om*/ * @throws XSLTException */ private void init() throws XSLTException { try { // Get the processor proc = new Processor(false); // Set any specified configuration properties for the processor if (featureMappings != null) { for (Entry<String, Object> entry : featureMappings.entrySet()) { proc.setConfigurationProperty(entry.getKey(), entry.getValue()); } } // Get the xslt compiler XsltCompiler xsltCompiler = proc.newXsltCompiler(); // Get the document builder (used for params) builder = proc.newDocumentBuilder(); // Set the uri resolver (for imported/included stylesheets) xsltCompiler.setURIResolver(new S3URIResolver()); // Compile the stylesheet XsltExecutable exp = xsltCompiler .compile(new StreamSource(IOUtils.toInputStream(stylesheet, CharEncoding.UTF_8))); // Set up the output for the transformation baos = new ByteArrayOutputStream(); serializer = proc.newSerializer(baos); serializer.setOutputStream(baos); // Appears ok to always set output property to xml serializer.setOutputProperty(Serializer.Property.METHOD, "xml"); serializer.setOutputProperty(Serializer.Property.OMIT_XML_DECLARATION, "yes"); serializer.setProcessor(proc); trans = exp.load(); trans.setDestination(serializer); } catch (SaxonApiException e) { log.error("Problems creating an XSLTProcessor. " + e.getMessage(), e); throw new XSLTException(e.getMessage()); } catch (IOException e) { log.error("Problems creating an XSLTProcessor. " + e.getMessage(), e); throw new XSLTException(e.getMessage()); } }
From source file:com.twinsoft.convertigo.eclipse.editors.jscript.JscriptTransactionEditor.java
public void init(IEditorSite site, IEditorInput input) throws PartInitException { // Get from the input the necessary objects as the temp IFile to create to hold the handlers data and the Transaction object itself. file = ((FileEditorInput) input).getFile(); transaction = (Transaction) ((JscriptTransactionEditorInput) input).getTransaction(); try {// w w w . j a va 2 s. co m file.setCharset("UTF-8", null); } catch (CoreException e1) { ConvertigoPlugin.logDebug("Failed to set UTF-8 charset for editor file: " + e1); } try { // Create a temp file to hold transaction's data InputStream sbisHandlersStream = IOUtils.toInputStream(transaction.handlers, "UTF-8"); // Overrides temp file with transaction's handler data if (file.exists()) { try { file.setContents(sbisHandlersStream, true, false, null); } catch (CoreException e) { ConvertigoPlugin.logException(e, "Error while editing the transaction handlers"); } } // Create a temp file to hold transaction's handler data else { try { file.create(sbisHandlersStream, true, null); } catch (CoreException e) { ConvertigoPlugin.logException(e, "Error while editing the transaction handlers"); } } setSite(site); setInput(input); eSite = site; eInput = input; setPartName(file.getName()); } catch (Exception e) { throw new PartInitException("Unable to create JS editor", e); } }