List of usage examples for java.io IOException getCause
public synchronized Throwable getCause()
From source file:org.openrdf.http.server.repository.RepositoryController.java
@Override protected ModelAndView handleRequestInternal(HttpServletRequest request, HttpServletResponse response) throws Exception { String reqMethod = request.getMethod(); String queryStr = request.getParameter(QUERY_PARAM_NAME); if (METHOD_POST.equals(reqMethod)) { String mimeType = HttpServerUtil.getMIMEType(request.getContentType()); if (!(Protocol.FORM_MIME_TYPE.equals(mimeType) || Protocol.SPARQL_QUERY_MIME_TYPE.equals(mimeType))) { throw new ClientHTTPException(SC_UNSUPPORTED_MEDIA_TYPE, "Unsupported MIME type: " + mimeType); }// w ww . j a v a 2 s . c o m if (Protocol.SPARQL_QUERY_MIME_TYPE.equals(mimeType)) { // The query should be the entire body try { queryStr = IOUtils.toString(request.getReader()); } catch (IOException e) { throw new HTTPException(HttpStatus.SC_BAD_REQUEST, "Error reading request message body", e); } if (queryStr.isEmpty()) queryStr = null; } } else if (METHOD_DELETE.equals(reqMethod)) { String repId = RepositoryInterceptor.getRepositoryID(request); logger.info("DELETE request invoked for repository '" + repId + "'"); if (queryStr != null) { logger.warn("query supplied on repository delete request, aborting delete"); throw new HTTPException(HttpStatus.SC_BAD_REQUEST, "Repository delete error: query supplied with request"); } if (SystemRepository.ID.equals(repId)) { logger.warn("attempted delete of SYSTEM repository, aborting"); throw new HTTPException(HttpStatus.SC_FORBIDDEN, "SYSTEM Repository can not be deleted"); } try { // we need to forcibly close the default repository connection // opened for this repository by // the interceptor. RepositoryConnection repositoryCon = RepositoryInterceptor.getRepositoryConnection(request); synchronized (repositoryCon) { repositoryCon.close(); } boolean success = repositoryManager.removeRepository(repId); if (success) { logger.info("DELETE request successfully completed"); return new ModelAndView(EmptySuccessView.getInstance()); } else { logger.error("error while attempting to delete repository '" + repId + "'"); throw new HTTPException(HttpStatus.SC_BAD_REQUEST, "could not locate repository configuration for repository '" + repId + "'."); } } catch (OpenRDFException e) { logger.error("error while attempting to delete repository '" + repId + "'", e); throw new ServerHTTPException("Repository delete error: " + e.getMessage(), e); } } Repository repository = RepositoryInterceptor.getRepository(request); int qryCode = 0; if (logger.isInfoEnabled() || logger.isDebugEnabled()) { qryCode = String.valueOf(queryStr).hashCode(); } boolean headersOnly = false; if (METHOD_GET.equals(reqMethod)) { logger.info("GET query {}", qryCode); } else if (METHOD_HEAD.equals(reqMethod)) { logger.info("HEAD query {}", qryCode); headersOnly = true; } else if (METHOD_POST.equals(reqMethod)) { logger.info("POST query {}", qryCode); } logger.debug("query {} = {}", qryCode, queryStr); if (queryStr != null) { RepositoryConnection repositoryCon = RepositoryInterceptor.getRepositoryConnection(request); synchronized (repositoryCon) { Query query = getQuery(repository, repositoryCon, queryStr, request, response); View view; Object queryResult; FileFormatServiceRegistry<? extends FileFormat, ?> registry; try { if (query instanceof TupleQuery) { TupleQuery tQuery = (TupleQuery) query; queryResult = headersOnly ? null : tQuery.evaluate(); registry = TupleQueryResultWriterRegistry.getInstance(); view = TupleQueryResultView.getInstance(); } else if (query instanceof GraphQuery) { GraphQuery gQuery = (GraphQuery) query; queryResult = headersOnly ? null : gQuery.evaluate(); registry = RDFWriterRegistry.getInstance(); view = GraphQueryResultView.getInstance(); } else if (query instanceof BooleanQuery) { BooleanQuery bQuery = (BooleanQuery) query; queryResult = headersOnly ? null : bQuery.evaluate(); registry = BooleanQueryResultWriterRegistry.getInstance(); view = BooleanQueryResultView.getInstance(); } else { throw new ClientHTTPException(SC_BAD_REQUEST, "Unsupported query type: " + query.getClass().getName()); } } catch (QueryInterruptedException e) { logger.info("Query interrupted", e); throw new ServerHTTPException(SC_SERVICE_UNAVAILABLE, "Query evaluation took too long"); } catch (QueryEvaluationException e) { logger.info("Query evaluation error", e); if (e.getCause() != null && e.getCause() instanceof HTTPException) { // custom signal from the backend, throw as HTTPException // directly (see SES-1016). throw (HTTPException) e.getCause(); } else { throw new ServerHTTPException("Query evaluation error: " + e.getMessage()); } } Object factory = ProtocolUtil.getAcceptableService(request, response, registry); Map<String, Object> model = new HashMap<String, Object>(); model.put(QueryResultView.FILENAME_HINT_KEY, "query-result"); model.put(QueryResultView.QUERY_RESULT_KEY, queryResult); model.put(QueryResultView.FACTORY_KEY, factory); model.put(QueryResultView.HEADERS_ONLY, headersOnly); return new ModelAndView(view, model); } } else { throw new ClientHTTPException(SC_BAD_REQUEST, "Missing parameter: " + QUERY_PARAM_NAME); } }
From source file:org.openrdf.http.server.repository.statements.StatementsController.java
private ModelAndView getSparqlUpdateResult(Repository repository, HttpServletRequest request, HttpServletResponse response) throws ServerHTTPException, ClientHTTPException, HTTPException { ProtocolUtil.logRequestParameters(request); String mimeType = HttpServerUtil.getMIMEType(request.getContentType()); String sparqlUpdateString;/*from w w w .j av a 2 s.c o m*/ if (Protocol.SPARQL_UPDATE_MIME_TYPE.equals(mimeType)) { // The query should be the entire body try { sparqlUpdateString = IOUtils.toString(request.getReader()); } catch (IOException e) { throw new ClientHTTPException(SC_BAD_REQUEST, "Error reading request message body", e); } if (sparqlUpdateString.isEmpty()) sparqlUpdateString = null; } else { sparqlUpdateString = request.getParameterValues(Protocol.UPDATE_PARAM_NAME)[0]; } // default query language is SPARQL QueryLanguage queryLn = QueryLanguage.SPARQL; String queryLnStr = request.getParameter(QUERY_LANGUAGE_PARAM_NAME); logger.debug("query language param = {}", queryLnStr); if (queryLnStr != null) { queryLn = QueryLanguage.valueOf(queryLnStr); if (queryLn == null) { throw new ClientHTTPException(SC_BAD_REQUEST, "Unknown query language: " + queryLnStr); } } String baseURI = request.getParameter(Protocol.BASEURI_PARAM_NAME); // determine if inferred triples should be included in query evaluation boolean includeInferred = ProtocolUtil.parseBooleanParam(request, INCLUDE_INFERRED_PARAM_NAME, true); // build a dataset, if specified String[] defaultRemoveGraphURIs = request.getParameterValues(REMOVE_GRAPH_PARAM_NAME); String[] defaultInsertGraphURIs = request.getParameterValues(INSERT_GRAPH_PARAM_NAME); String[] defaultGraphURIs = request.getParameterValues(USING_GRAPH_PARAM_NAME); String[] namedGraphURIs = request.getParameterValues(USING_NAMED_GRAPH_PARAM_NAME); SimpleDataset dataset = new SimpleDataset(); if (defaultRemoveGraphURIs != null) { for (String graphURI : defaultRemoveGraphURIs) { try { IRI uri = createURIOrNull(repository, graphURI); dataset.addDefaultRemoveGraph(uri); } catch (IllegalArgumentException e) { throw new ClientHTTPException(SC_BAD_REQUEST, "Illegal URI for default remove graph: " + graphURI); } } } if (defaultInsertGraphURIs != null && defaultInsertGraphURIs.length > 0) { String graphURI = defaultInsertGraphURIs[0]; try { IRI uri = createURIOrNull(repository, graphURI); dataset.setDefaultInsertGraph(uri); } catch (IllegalArgumentException e) { throw new ClientHTTPException(SC_BAD_REQUEST, "Illegal URI for default insert graph: " + graphURI); } } if (defaultGraphURIs != null) { for (String defaultGraphURI : defaultGraphURIs) { try { IRI uri = createURIOrNull(repository, defaultGraphURI); dataset.addDefaultGraph(uri); } catch (IllegalArgumentException e) { throw new ClientHTTPException(SC_BAD_REQUEST, "Illegal URI for default graph: " + defaultGraphURI); } } } if (namedGraphURIs != null) { for (String namedGraphURI : namedGraphURIs) { try { IRI uri = createURIOrNull(repository, namedGraphURI); dataset.addNamedGraph(uri); } catch (IllegalArgumentException e) { throw new ClientHTTPException(SC_BAD_REQUEST, "Illegal URI for named graph: " + namedGraphURI); } } } try { RepositoryConnection repositoryCon = RepositoryInterceptor.getRepositoryConnection(request); synchronized (repositoryCon) { Update update = repositoryCon.prepareUpdate(queryLn, sparqlUpdateString, baseURI); update.setIncludeInferred(includeInferred); if (dataset != null) { update.setDataset(dataset); } // determine if any variable bindings have been set on this update. @SuppressWarnings("unchecked") Enumeration<String> parameterNames = request.getParameterNames(); while (parameterNames.hasMoreElements()) { String parameterName = parameterNames.nextElement(); if (parameterName.startsWith(BINDING_PREFIX) && parameterName.length() > BINDING_PREFIX.length()) { String bindingName = parameterName.substring(BINDING_PREFIX.length()); Value bindingValue = ProtocolUtil.parseValueParam(request, parameterName, repository.getValueFactory()); update.setBinding(bindingName, bindingValue); } } update.execute(); } return new ModelAndView(EmptySuccessView.getInstance()); } catch (UpdateExecutionException e) { if (e.getCause() != null && e.getCause() instanceof HTTPException) { // custom signal from the backend, throw as HTTPException directly // (see SES-1016). throw (HTTPException) e.getCause(); } else { throw new ServerHTTPException("Repository update error: " + e.getMessage(), e); } } catch (RepositoryException e) { if (e.getCause() != null && e.getCause() instanceof HTTPException) { // custom signal from the backend, throw as HTTPException directly // (see SES-1016). throw (HTTPException) e.getCause(); } else { throw new ServerHTTPException("Repository update error: " + e.getMessage(), e); } } catch (MalformedQueryException e) { ErrorInfo errInfo = new ErrorInfo(ErrorType.MALFORMED_QUERY, e.getMessage()); throw new ClientHTTPException(SC_BAD_REQUEST, errInfo.toString()); } }
From source file:fr.certu.chouette.command.ExchangeCommand.java
/** * @param manager/*w ww .java 2 s . c o m*/ * @param parameters * @return */ @SuppressWarnings("incomplete-switch") public List<NeptuneIdentifiedObject> executeImport(INeptuneManager<NeptuneIdentifiedObject> manager, Map<String, List<String>> parameters) { String reportFileName = getSimpleString(parameters, "reportfile", ""); String reportFormat = getSimpleString(parameters, "reportformat", "txt"); boolean append = getBoolean(parameters, "append"); String format = getSimpleString(parameters, "format"); PrintStream stream = System.out; String encoding = Charset.defaultCharset().toString(); if (!reportFileName.isEmpty()) { try { if (reportFormat.equals("json")) { encoding = "UTF-8"; } stream = new PrintStream(new FileOutputStream(new File(reportFileName), append), true, encoding); } catch (IOException e) { System.err.println("cannot open file :" + reportFileName + " " + e.getMessage()); reportFileName = ""; } } try { List<FormatDescription> formats = manager.getImportFormats(null); FormatDescription description = null; for (FormatDescription formatDescription : formats) { if (formatDescription.getName().equalsIgnoreCase(format)) { description = formatDescription; break; } } if (description == null) { throw new IllegalArgumentException( "format " + format + " unavailable, check command getImportFormats for list "); } List<ParameterValue> values = new ArrayList<ParameterValue>(); for (ParameterDescription desc : description.getParameterDescriptions()) { String name = desc.getName(); String key = name.toLowerCase(); List<String> vals = parameters.get(key); if (vals == null) { if (desc.isMandatory()) { throw new IllegalArgumentException( "parameter -" + name + " is required, check command getImportFormats for list "); } } else { if (desc.isCollection()) { ListParameterValue val = new ListParameterValue(name); switch (desc.getType()) { case FILEPATH: val.setFilepathList(vals); break; case STRING: val.setStringList(vals); break; case FILENAME: val.setFilenameList(vals); break; default: throw new IllegalArgumentException( "parameter -" + name + " invalid, check command getImportFormats for list "); } values.add(val); } else { if (vals.size() != 1) { throw new IllegalArgumentException("parameter -" + name + " must be unique, check command getImportFormats for list "); } String simpleval = vals.get(0); SimpleParameterValue val = new SimpleParameterValue(name); switch (desc.getType()) { case FILEPATH: val.setFilepathValue(simpleval); break; case STRING: val.setStringValue(simpleval); break; case FILENAME: val.setFilenameValue(simpleval); break; case BOOLEAN: val.setBooleanValue(Boolean.parseBoolean(simpleval)); break; case INTEGER: val.setIntegerValue(Long.parseLong(simpleval)); break; case DATE: val.setDateValue(toCalendar(simpleval)); break; } values.add(val); } } } ReportHolder ireport = new ReportHolder(); ReportHolder vreport = new ReportHolder(); List<NeptuneIdentifiedObject> beans = manager.doImport(null, format, values, ireport, vreport); if (ireport.getReport() != null) { Report r = ireport.getReport(); if (reportFormat.equals("json")) { stream.println(r.toJSON()); } else { stream.println(r.getLocalizedMessage()); printItems(stream, "", r.getItems()); } } if (vreport.getReport() != null) { Report r = vreport.getReport(); if (reportFormat.equals("json")) { stream.println(r.toJSON()); } else { stream.println(r.getLocalizedMessage()); printItems(stream, "", r.getItems()); } } if (beans == null || beans.isEmpty()) { System.out.println("import failed"); } else { System.out.println("beans count = " + beans.size()); } return beans; } catch (ChouetteException e) { log.error(e.getMessage()); Throwable caused = e.getCause(); while (caused != null) { log.error("caused by " + caused.getMessage()); caused = caused.getCause(); } throw new RuntimeException("import failed , see log for details"); } finally { if (!reportFileName.isEmpty()) { stream.close(); } } }
From source file:org.eclipse.rdf4j.http.server.repository.statements.StatementsController.java
private ModelAndView getSparqlUpdateResult(Repository repository, HttpServletRequest request, HttpServletResponse response) throws ServerHTTPException, ClientHTTPException, HTTPException { ProtocolUtil.logRequestParameters(request); String mimeType = HttpServerUtil.getMIMEType(request.getContentType()); String sparqlUpdateString;/*from w w w . j av a 2s . c o m*/ if (Protocol.SPARQL_UPDATE_MIME_TYPE.equals(mimeType)) { // The query should be the entire body try { sparqlUpdateString = IOUtils.toString(request.getReader()); } catch (IOException e) { throw new ClientHTTPException(SC_BAD_REQUEST, "Error reading request message body", e); } if (sparqlUpdateString.isEmpty()) sparqlUpdateString = null; } else { sparqlUpdateString = request.getParameterValues(Protocol.UPDATE_PARAM_NAME)[0]; } // default query language is SPARQL QueryLanguage queryLn = QueryLanguage.SPARQL; String queryLnStr = request.getParameter(QUERY_LANGUAGE_PARAM_NAME); logger.debug("query language param = {}", queryLnStr); if (queryLnStr != null) { queryLn = QueryLanguage.valueOf(queryLnStr); if (queryLn == null) { throw new ClientHTTPException(SC_BAD_REQUEST, "Unknown query language: " + queryLnStr); } } String baseURI = request.getParameter(Protocol.BASEURI_PARAM_NAME); // determine if inferred triples should be included in query evaluation boolean includeInferred = ProtocolUtil.parseBooleanParam(request, INCLUDE_INFERRED_PARAM_NAME, true); // build a dataset, if specified String[] defaultRemoveGraphURIs = request.getParameterValues(REMOVE_GRAPH_PARAM_NAME); String[] defaultInsertGraphURIs = request.getParameterValues(INSERT_GRAPH_PARAM_NAME); String[] defaultGraphURIs = request.getParameterValues(USING_GRAPH_PARAM_NAME); String[] namedGraphURIs = request.getParameterValues(USING_NAMED_GRAPH_PARAM_NAME); SimpleDataset dataset = null; if (defaultRemoveGraphURIs != null || defaultInsertGraphURIs != null || defaultGraphURIs != null || namedGraphURIs != null) { dataset = new SimpleDataset(); } if (defaultRemoveGraphURIs != null) { for (String graphURI : defaultRemoveGraphURIs) { try { IRI uri = createURIOrNull(repository, graphURI); dataset.addDefaultRemoveGraph(uri); } catch (IllegalArgumentException e) { throw new ClientHTTPException(SC_BAD_REQUEST, "Illegal URI for default remove graph: " + graphURI); } } } if (defaultInsertGraphURIs != null && defaultInsertGraphURIs.length > 0) { String graphURI = defaultInsertGraphURIs[0]; try { IRI uri = createURIOrNull(repository, graphURI); dataset.setDefaultInsertGraph(uri); } catch (IllegalArgumentException e) { throw new ClientHTTPException(SC_BAD_REQUEST, "Illegal URI for default insert graph: " + graphURI); } } if (defaultGraphURIs != null) { for (String defaultGraphURI : defaultGraphURIs) { try { IRI uri = createURIOrNull(repository, defaultGraphURI); dataset.addDefaultGraph(uri); } catch (IllegalArgumentException e) { throw new ClientHTTPException(SC_BAD_REQUEST, "Illegal URI for default graph: " + defaultGraphURI); } } } if (namedGraphURIs != null) { for (String namedGraphURI : namedGraphURIs) { try { IRI uri = createURIOrNull(repository, namedGraphURI); dataset.addNamedGraph(uri); } catch (IllegalArgumentException e) { throw new ClientHTTPException(SC_BAD_REQUEST, "Illegal URI for named graph: " + namedGraphURI); } } } final int maxQueryTime = ProtocolUtil.parseTimeoutParam(request); try (RepositoryConnection repositoryCon = RepositoryInterceptor.getRepositoryConnection(request)) { Update update = repositoryCon.prepareUpdate(queryLn, sparqlUpdateString, baseURI); update.setIncludeInferred(includeInferred); update.setMaxExecutionTime(maxQueryTime); if (dataset != null) { update.setDataset(dataset); } // determine if any variable bindings have been set on this // update. @SuppressWarnings("unchecked") Enumeration<String> parameterNames = request.getParameterNames(); while (parameterNames.hasMoreElements()) { String parameterName = parameterNames.nextElement(); if (parameterName.startsWith(BINDING_PREFIX) && parameterName.length() > BINDING_PREFIX.length()) { String bindingName = parameterName.substring(BINDING_PREFIX.length()); Value bindingValue = ProtocolUtil.parseValueParam(request, parameterName, repository.getValueFactory()); update.setBinding(bindingName, bindingValue); } } update.execute(); return new ModelAndView(EmptySuccessView.getInstance()); } catch (QueryInterruptedException e) { throw new ServerHTTPException(SC_SERVICE_UNAVAILABLE, "update execution took too long"); } catch (UpdateExecutionException e) { if (e.getCause() != null && e.getCause() instanceof HTTPException) { // custom signal from the backend, throw as HTTPException // directly // (see SES-1016). throw (HTTPException) e.getCause(); } else { throw new ServerHTTPException("Repository update error: " + e.getMessage(), e); } } catch (RepositoryException e) { if (e.getCause() != null && e.getCause() instanceof HTTPException) { // custom signal from the backend, throw as HTTPException // directly // (see SES-1016). throw (HTTPException) e.getCause(); } else { throw new ServerHTTPException("Repository update error: " + e.getMessage(), e); } } catch (MalformedQueryException e) { ErrorInfo errInfo = new ErrorInfo(ErrorType.MALFORMED_QUERY, e.getMessage()); throw new ClientHTTPException(SC_BAD_REQUEST, errInfo.toString()); } }
From source file:org.apache.geode.internal.cache.GemFireCacheImpl.java
/** * Initializes the contents of this <code>Cache</code> according to the declarative caching XML * file specified by the given <code>DistributedSystem</code>. Note that this operation cannot be * performed in the constructor because creating regions in the cache, etc. uses the cache itself * (which isn't initialized until the constructor returns). * * @throws CacheXmlException If something goes wrong while parsing the declarative caching XML * file.// w w w .j a va 2 s . c o m * @throws TimeoutException If a {@link org.apache.geode.cache.Region#put(Object, Object)}times * out while initializing the cache. * @throws CacheWriterException If a <code>CacheWriterException</code> is thrown while * initializing the cache. * @throws RegionExistsException If the declarative caching XML file desribes a region that * already exists (including the root region). * @throws GatewayException If a <code>GatewayException</code> is thrown while initializing the * cache. * * @see #loadCacheXml */ private void initializeDeclarativeCache() throws TimeoutException, CacheWriterException, GatewayException, RegionExistsException { URL url = getCacheXmlURL(); String cacheXmlDescription = this.cacheConfig.getCacheXMLDescription(); if (url == null && cacheXmlDescription == null) { if (isClient()) { determineDefaultPool(); initializeClientRegionShortcuts(this); } else { initializeRegionShortcuts(this); } initializePdxRegistry(); readyDynamicRegionFactory(); return; // nothing needs to be done } try { logCacheXML(url, cacheXmlDescription); InputStream stream = null; if (cacheXmlDescription != null) { if (logger.isTraceEnabled()) { logger.trace("initializing cache with generated XML: {}", cacheXmlDescription); } stream = new StringBufferInputStream(cacheXmlDescription); } else { stream = url.openStream(); } loadCacheXml(stream); try { stream.close(); } catch (IOException ignore) { } } catch (IOException ex) { throw new CacheXmlException( LocalizedStrings.GemFireCache_WHILE_OPENING_CACHE_XML_0_THE_FOLLOWING_ERROR_OCCURRED_1 .toLocalizedString(new Object[] { url.toString(), ex })); } catch (CacheXmlException ex) { CacheXmlException newEx = new CacheXmlException( LocalizedStrings.GemFireCache_WHILE_READING_CACHE_XML_0_1 .toLocalizedString(new Object[] { url, ex.getMessage() })); newEx.setStackTrace(ex.getStackTrace()); newEx.initCause(ex.getCause()); throw newEx; } }
From source file:org.eclipse.rdf4j.http.server.repository.transaction.TransactionController.java
private ModelAndView getSparqlUpdateResult(Transaction transaction, HttpServletRequest request, HttpServletResponse response) throws ServerHTTPException, ClientHTTPException, HTTPException { String sparqlUpdateString = null; final String contentType = request.getContentType(); if (contentType != null && contentType.contains(Protocol.SPARQL_UPDATE_MIME_TYPE)) { try {/*w ww . jav a 2s . com*/ final String encoding = request.getCharacterEncoding() != null ? request.getCharacterEncoding() : "UTF-8"; sparqlUpdateString = IOUtils.toString(request.getInputStream(), encoding); } catch (IOException e) { logger.warn("error reading sparql update string from request body", e); throw new ClientHTTPException(SC_BAD_REQUEST, "could not read SPARQL update string from body: " + e.getMessage()); } } else { sparqlUpdateString = request.getParameter(Protocol.UPDATE_PARAM_NAME); } logger.debug("SPARQL update string: {}", sparqlUpdateString); // default query language is SPARQL QueryLanguage queryLn = QueryLanguage.SPARQL; String queryLnStr = request.getParameter(QUERY_LANGUAGE_PARAM_NAME); logger.debug("query language param = {}", queryLnStr); if (queryLnStr != null) { queryLn = QueryLanguage.valueOf(queryLnStr); if (queryLn == null) { throw new ClientHTTPException(SC_BAD_REQUEST, "Unknown query language: " + queryLnStr); } } String baseURI = request.getParameter(Protocol.BASEURI_PARAM_NAME); // determine if inferred triples should be included in query evaluation boolean includeInferred = ProtocolUtil.parseBooleanParam(request, INCLUDE_INFERRED_PARAM_NAME, true); // build a dataset, if specified String[] defaultRemoveGraphURIs = request.getParameterValues(REMOVE_GRAPH_PARAM_NAME); String[] defaultInsertGraphURIs = request.getParameterValues(INSERT_GRAPH_PARAM_NAME); String[] defaultGraphURIs = request.getParameterValues(USING_GRAPH_PARAM_NAME); String[] namedGraphURIs = request.getParameterValues(USING_NAMED_GRAPH_PARAM_NAME); SimpleDataset dataset = new SimpleDataset(); if (defaultRemoveGraphURIs != null) { for (String graphURI : defaultRemoveGraphURIs) { try { IRI uri = null; if (!"null".equals(graphURI)) { uri = SimpleValueFactory.getInstance().createIRI(graphURI); } dataset.addDefaultRemoveGraph(uri); } catch (IllegalArgumentException e) { throw new ClientHTTPException(SC_BAD_REQUEST, "Illegal URI for default remove graph: " + graphURI); } } } if (defaultInsertGraphURIs != null && defaultInsertGraphURIs.length > 0) { String graphURI = defaultInsertGraphURIs[0]; try { IRI uri = null; if (!"null".equals(graphURI)) { uri = SimpleValueFactory.getInstance().createIRI(graphURI); } dataset.setDefaultInsertGraph(uri); } catch (IllegalArgumentException e) { throw new ClientHTTPException(SC_BAD_REQUEST, "Illegal URI for default insert graph: " + graphURI); } } if (defaultGraphURIs != null) { for (String defaultGraphURI : defaultGraphURIs) { try { IRI uri = null; if (!"null".equals(defaultGraphURI)) { uri = SimpleValueFactory.getInstance().createIRI(defaultGraphURI); } dataset.addDefaultGraph(uri); } catch (IllegalArgumentException e) { throw new ClientHTTPException(SC_BAD_REQUEST, "Illegal URI for default graph: " + defaultGraphURI); } } } if (namedGraphURIs != null) { for (String namedGraphURI : namedGraphURIs) { try { IRI uri = null; if (!"null".equals(namedGraphURI)) { uri = SimpleValueFactory.getInstance().createIRI(namedGraphURI); } dataset.addNamedGraph(uri); } catch (IllegalArgumentException e) { throw new ClientHTTPException(SC_BAD_REQUEST, "Illegal URI for named graph: " + namedGraphURI); } } } try { // determine if any variable bindings have been set on this update. @SuppressWarnings("unchecked") Enumeration<String> parameterNames = request.getParameterNames(); Map<String, Value> bindings = new HashMap<>(); while (parameterNames.hasMoreElements()) { String parameterName = parameterNames.nextElement(); if (parameterName.startsWith(BINDING_PREFIX) && parameterName.length() > BINDING_PREFIX.length()) { String bindingName = parameterName.substring(BINDING_PREFIX.length()); Value bindingValue = ProtocolUtil.parseValueParam(request, parameterName, SimpleValueFactory.getInstance()); bindings.put(bindingName, bindingValue); } } transaction.executeUpdate(queryLn, sparqlUpdateString, baseURI, includeInferred, dataset, bindings); return new ModelAndView(EmptySuccessView.getInstance()); } catch (UpdateExecutionException | InterruptedException | ExecutionException e) { if (e.getCause() != null && e.getCause() instanceof HTTPException) { // custom signal from the backend, throw as HTTPException directly // (see SES-1016). throw (HTTPException) e.getCause(); } else { throw new ServerHTTPException("Repository update error: " + e.getMessage(), e); } } catch (RepositoryException e) { if (e.getCause() != null && e.getCause() instanceof HTTPException) { // custom signal from the backend, throw as HTTPException directly // (see SES-1016). throw (HTTPException) e.getCause(); } else { throw new ServerHTTPException("Repository update error: " + e.getMessage(), e); } } catch (MalformedQueryException e) { ErrorInfo errInfo = new ErrorInfo(ErrorType.MALFORMED_QUERY, e.getMessage()); throw new ClientHTTPException(SC_BAD_REQUEST, errInfo.toString()); } }
From source file:org.codelibs.robot.extractor.impl.TikaExtractor.java
@Override public ExtractData getText(final InputStream inputStream, final Map<String, String> params) { if (inputStream == null) { throw new RobotSystemException("The inputstream is null."); }//from www. ja v a 2s . c o m File tempFile = null; try { tempFile = File.createTempFile("tikaExtractor-", ".out"); } catch (final IOException e) { throw new ExtractException("Could not create a temp file.", e); } try { try (OutputStream out = new FileOutputStream(tempFile)) { CopyUtil.copy(inputStream, out); } InputStream in = new FileInputStream(tempFile); final PrintStream originalOutStream = System.out; final ByteArrayOutputStream outStream = new ByteArrayOutputStream(); System.setOut(new PrintStream(outStream, true)); final PrintStream originalErrStream = System.err; final ByteArrayOutputStream errStream = new ByteArrayOutputStream(); System.setErr(new PrintStream(errStream, true)); try { final String resourceName = params == null ? null : params.get(TikaMetadataKeys.RESOURCE_NAME_KEY); final String contentType = params == null ? null : params.get(HttpHeaders.CONTENT_TYPE); String contentEncoding = params == null ? null : params.get(HttpHeaders.CONTENT_ENCODING); // password for pdf String pdfPassword = params == null ? null : params.get(ExtractData.PDF_PASSWORD); if (pdfPassword == null && params != null) { pdfPassword = getPdfPassword(params.get(ExtractData.URL), resourceName); } final Metadata metadata = createMetadata(resourceName, contentType, contentEncoding, pdfPassword); final Parser parser = new DetectParser(); final ParseContext parseContext = new ParseContext(); parseContext.set(Parser.class, parser); final StringWriter writer = new StringWriter(initialBufferSize); parser.parse(in, new BodyContentHandler(writer), metadata, parseContext); String content = normalizeContent(writer); if (StringUtil.isBlank(content)) { if (resourceName != null) { IOUtils.closeQuietly(in); if (logger.isDebugEnabled()) { logger.debug("retry without a resource name: {}", resourceName); } in = new FileInputStream(tempFile); final Metadata metadata2 = createMetadata(null, contentType, contentEncoding, pdfPassword); final StringWriter writer2 = new StringWriter(initialBufferSize); parser.parse(in, new BodyContentHandler(writer2), metadata2, parseContext); content = normalizeContent(writer2); } if (StringUtil.isBlank(content) && contentType != null) { IOUtils.closeQuietly(in); if (logger.isDebugEnabled()) { logger.debug("retry without a content type: {}", contentType); } in = new FileInputStream(tempFile); final Metadata metadata3 = createMetadata(null, null, contentEncoding, pdfPassword); final StringWriter writer3 = new StringWriter(initialBufferSize); parser.parse(in, new BodyContentHandler(writer3), metadata3, parseContext); content = normalizeContent(writer3); } if (readAsTextIfFailed && StringUtil.isBlank(content)) { IOUtils.closeQuietly(in); if (logger.isDebugEnabled()) { logger.debug("read the content as a text."); } if (contentEncoding == null) { contentEncoding = Constants.UTF_8; } BufferedReader br = null; try { br = new BufferedReader( new InputStreamReader(new FileInputStream(tempFile), contentEncoding)); final StringWriter writer4 = new StringWriter(initialBufferSize); String line; while ((line = br.readLine()) != null) { writer4.write(line.replaceAll("\\p{Cntrl}", " ").replaceAll("\\s+", " ").trim()); writer4.write(' '); } content = writer4.toString().trim(); } catch (final Exception e) { logger.warn("Could not read " + tempFile.getAbsolutePath(), e); } finally { IOUtils.closeQuietly(br); } } } final ExtractData extractData = new ExtractData(content); final String[] names = metadata.names(); Arrays.sort(names); for (final String name : names) { extractData.putValues(name, metadata.getValues(name)); } if (logger.isDebugEnabled()) { logger.debug("Result: metadata: {}", metadata); } return extractData; } catch (final TikaException e) { if (e.getMessage().indexOf("bomb") >= 0) { throw e; } final Throwable cause = e.getCause(); if (cause instanceof SAXException) { final Extractor xmlExtractor = robotContainer.getComponent("xmlExtractor"); if (xmlExtractor != null) { IOUtils.closeQuietly(in); in = new FileInputStream(tempFile); return xmlExtractor.getText(in, params); } } throw e; } finally { IOUtils.closeQuietly(in); if (originalOutStream != null) { System.setOut(originalOutStream); } if (originalErrStream != null) { System.setErr(originalErrStream); } try { if (logger.isInfoEnabled()) { final byte[] bs = outStream.toByteArray(); if (bs.length != 0) { logger.info(new String(bs, outputEncoding)); } } if (logger.isWarnEnabled()) { final byte[] bs = errStream.toByteArray(); if (bs.length != 0) { logger.warn(new String(bs, outputEncoding)); } } } catch (final Exception e) { // NOP } } } catch (final Exception e) { throw new ExtractException("Could not extract a content.", e); } finally { if (tempFile != null && !tempFile.delete()) { logger.warn("Failed to delete " + tempFile.getAbsolutePath()); } } }
From source file:org.tomitribe.tribestream.registryng.resources.ClientResource.java
@GET @Path("invoke/stream") @Produces("text/event-stream") // will be part of JAX-RS 2.1, for now just making it working public void invokeScenario(@Suspended final AsyncResponse asyncResponse, @Context final Providers providers, @Context final HttpServletRequest httpServletRequest, // base64 encoded json with the request and identify since EventSource doesnt handle it very well // TODO: use a ciphering with a POST endpoint to avoid to have it readable (or other) @QueryParam("request") final String requestBytes) { final SseRequest in = loadPayload(SseRequest.class, providers, requestBytes); final String auth = in.getIdentity(); security.check(auth, httpServletRequest, () -> { }, () -> {//from www . j a v a2 s . com throw new WebApplicationException(Response.Status.FORBIDDEN); }); final GenericClientService.Request req = toRequest(in.getHttp()); final Scenario scenario = in.getHttp().getScenario(); final MultivaluedHashMap<String, Object> fakeHttpHeaders = new MultivaluedHashMap<>(); final ConcurrentMap<Future<?>, Boolean> computations = new ConcurrentHashMap<>(); final MessageBodyWriter<LightHttpResponse> writerResponse = providers.getMessageBodyWriter( LightHttpResponse.class, LightHttpResponse.class, annotations, APPLICATION_JSON_TYPE); final MessageBodyWriter<ScenarioEnd> writerEnd = providers.getMessageBodyWriter(ScenarioEnd.class, ScenarioEnd.class, annotations, APPLICATION_JSON_TYPE); // not jaxrs one cause cxf wraps this one and prevents the flush() to works final HttpServletResponse httpServletResponse = HttpServletResponse.class .cast(httpServletRequest.getAttribute("tribe.registry.response")); httpServletResponse.setHeader("Content-Type", "text/event-stream"); try { httpServletResponse.flushBuffer(); } catch (final IOException e) { throw new IllegalStateException(e); } final ServletOutputStream out; try { out = httpServletResponse.getOutputStream(); } catch (final IOException e) { throw new IllegalStateException(e); } mes.submit(() -> { final AtomicReference<Invoker.Handle> handleRef = new AtomicReference<>(); try { // we compute some easy stats asynchronously final Map<Integer, AtomicInteger> sumPerResponse = new HashMap<>(); final AtomicInteger total = new AtomicInteger(); final AtomicLong min = new AtomicLong(); final AtomicLong max = new AtomicLong(); final AtomicLong sum = new AtomicLong(); final AtomicInteger writeErrors = new AtomicInteger(0); final long start = System.currentTimeMillis(); handleRef.set(invoker.invoke(scenario.getThreads(), scenario.getInvocations(), scenario.getDuration(), timeout, () -> { if (handleRef.get().isCancelled()) { return; } LightHttpResponse resp; try { final GenericClientService.Response invoke = service.invoke(req); resp = new LightHttpResponse(invoke.getStatus(), null, invoke.getClientExecutionDurationMs()); } catch (final RuntimeException e) { resp = new LightHttpResponse(-1, e.getMessage(), -1); } // let's process it in an environment where synchronisation is fine final LightHttpResponse respRef = resp; computations.put(mes.submit(() -> { synchronized (out) { try { out.write(dataStart); writerResponse.writeTo(respRef, LightHttpResponse.class, LightHttpResponse.class, annotations, APPLICATION_JSON_TYPE, fakeHttpHeaders, out); out.write(dataEnd); out.flush(); } catch (final IOException e) { if (writeErrors.incrementAndGet() > toleratedWriteErrors) { handleRef.get().cancel(); } throw new IllegalStateException(e); } } if (handleRef.get().isCancelled()) { return; } final long clientExecutionDurationMs = respRef.getClientExecutionDurationMs(); total.incrementAndGet(); sumPerResponse.computeIfAbsent(respRef.getStatus(), k -> new AtomicInteger()) .incrementAndGet(); sum.addAndGet(clientExecutionDurationMs); { long m = min.get(); do { m = min.get(); if (min.compareAndSet(m, clientExecutionDurationMs)) { break; } } while (m > clientExecutionDurationMs); } { long m = max.get(); do { m = max.get(); if (max.compareAndSet(m, clientExecutionDurationMs)) { break; } } while (m < clientExecutionDurationMs); } }), true); })); handleRef.get().await(); final long end = System.currentTimeMillis(); do { // wait all threads finished to compute the stats final Iterator<Future<?>> iterator = computations.keySet().iterator(); while (iterator.hasNext()) { try { iterator.next().get(timeout, TimeUnit.MILLISECONDS); } catch (final InterruptedException e) { Thread.interrupted(); } catch (final ExecutionException | TimeoutException e) { throw new IllegalStateException(e.getCause()); } finally { iterator.remove(); } } } while (!computations.isEmpty()); if (handleRef.get().isCancelled()) { return; } try { out.write(dataStart); writerEnd.writeTo( new ScenarioEnd( sumPerResponse.entrySet().stream() .collect(toMap(Map.Entry::getKey, t -> t.getValue().get())), end - start, total.get(), min.get(), max.get(), sum.get() * 1. / total.get()), ScenarioEnd.class, ScenarioEnd.class, annotations, APPLICATION_JSON_TYPE, new MultivaluedHashMap<>(), out); out.write(dataEnd); out.flush(); } catch (final IOException e) { throw new IllegalStateException(e); } } finally { try { // cxf will skip it since we already write ourself asyncResponse.resume(""); } catch (final RuntimeException re) { // no-op: not that important } } }); }
From source file:org.openrdf.http.server.repository.transaction.TransactionController.java
private ModelAndView getSparqlUpdateResult(RepositoryConnection conn, HttpServletRequest request, HttpServletResponse response) throws ServerHTTPException, ClientHTTPException, HTTPException { String sparqlUpdateString = null; final String contentType = request.getContentType(); if (contentType != null && contentType.contains(Protocol.SPARQL_UPDATE_MIME_TYPE)) { try {/*from www . j a v a 2s . c om*/ final String encoding = request.getCharacterEncoding() != null ? request.getCharacterEncoding() : "UTF-8"; sparqlUpdateString = IOUtils.toString(request.getInputStream(), encoding); } catch (IOException e) { logger.warn("error reading sparql update string from request body", e); throw new ClientHTTPException(SC_BAD_REQUEST, "could not read SPARQL update string from body: " + e.getMessage()); } } else { sparqlUpdateString = request.getParameter(Protocol.UPDATE_PARAM_NAME); } logger.debug("SPARQL update string: {}", sparqlUpdateString); // default query language is SPARQL QueryLanguage queryLn = QueryLanguage.SPARQL; String queryLnStr = request.getParameter(QUERY_LANGUAGE_PARAM_NAME); logger.debug("query language param = {}", queryLnStr); if (queryLnStr != null) { queryLn = QueryLanguage.valueOf(queryLnStr); if (queryLn == null) { throw new ClientHTTPException(SC_BAD_REQUEST, "Unknown query language: " + queryLnStr); } } String baseURI = request.getParameter(Protocol.BASEURI_PARAM_NAME); // determine if inferred triples should be included in query evaluation boolean includeInferred = ProtocolUtil.parseBooleanParam(request, INCLUDE_INFERRED_PARAM_NAME, true); // build a dataset, if specified String[] defaultRemoveGraphURIs = request.getParameterValues(REMOVE_GRAPH_PARAM_NAME); String[] defaultInsertGraphURIs = request.getParameterValues(INSERT_GRAPH_PARAM_NAME); String[] defaultGraphURIs = request.getParameterValues(USING_GRAPH_PARAM_NAME); String[] namedGraphURIs = request.getParameterValues(USING_NAMED_GRAPH_PARAM_NAME); SimpleDataset dataset = new SimpleDataset(); if (defaultRemoveGraphURIs != null) { for (String graphURI : defaultRemoveGraphURIs) { try { IRI uri = null; if (!"null".equals(graphURI)) { uri = conn.getValueFactory().createIRI(graphURI); } dataset.addDefaultRemoveGraph(uri); } catch (IllegalArgumentException e) { throw new ClientHTTPException(SC_BAD_REQUEST, "Illegal URI for default remove graph: " + graphURI); } } } if (defaultInsertGraphURIs != null && defaultInsertGraphURIs.length > 0) { String graphURI = defaultInsertGraphURIs[0]; try { IRI uri = null; if (!"null".equals(graphURI)) { uri = conn.getValueFactory().createIRI(graphURI); } dataset.setDefaultInsertGraph(uri); } catch (IllegalArgumentException e) { throw new ClientHTTPException(SC_BAD_REQUEST, "Illegal URI for default insert graph: " + graphURI); } } if (defaultGraphURIs != null) { for (String defaultGraphURI : defaultGraphURIs) { try { IRI uri = null; if (!"null".equals(defaultGraphURI)) { uri = conn.getValueFactory().createIRI(defaultGraphURI); } dataset.addDefaultGraph(uri); } catch (IllegalArgumentException e) { throw new ClientHTTPException(SC_BAD_REQUEST, "Illegal URI for default graph: " + defaultGraphURI); } } } if (namedGraphURIs != null) { for (String namedGraphURI : namedGraphURIs) { try { IRI uri = null; if (!"null".equals(namedGraphURI)) { uri = conn.getValueFactory().createIRI(namedGraphURI); } dataset.addNamedGraph(uri); } catch (IllegalArgumentException e) { throw new ClientHTTPException(SC_BAD_REQUEST, "Illegal URI for named graph: " + namedGraphURI); } } } try { Update update = conn.prepareUpdate(queryLn, sparqlUpdateString, baseURI); update.setIncludeInferred(includeInferred); if (dataset != null) { update.setDataset(dataset); } // determine if any variable bindings have been set on this update. @SuppressWarnings("unchecked") Enumeration<String> parameterNames = request.getParameterNames(); while (parameterNames.hasMoreElements()) { String parameterName = parameterNames.nextElement(); if (parameterName.startsWith(BINDING_PREFIX) && parameterName.length() > BINDING_PREFIX.length()) { String bindingName = parameterName.substring(BINDING_PREFIX.length()); Value bindingValue = ProtocolUtil.parseValueParam(request, parameterName, conn.getValueFactory()); update.setBinding(bindingName, bindingValue); } } update.execute(); return new ModelAndView(EmptySuccessView.getInstance()); } catch (UpdateExecutionException e) { if (e.getCause() != null && e.getCause() instanceof HTTPException) { // custom signal from the backend, throw as HTTPException directly // (see SES-1016). throw (HTTPException) e.getCause(); } else { throw new ServerHTTPException("Repository update error: " + e.getMessage(), e); } } catch (RepositoryException e) { if (e.getCause() != null && e.getCause() instanceof HTTPException) { // custom signal from the backend, throw as HTTPException directly // (see SES-1016). throw (HTTPException) e.getCause(); } else { throw new ServerHTTPException("Repository update error: " + e.getMessage(), e); } } catch (MalformedQueryException e) { ErrorInfo errInfo = new ErrorInfo(ErrorType.MALFORMED_QUERY, e.getMessage()); throw new ClientHTTPException(SC_BAD_REQUEST, errInfo.toString()); } }