List of usage examples for java.io IOException getCause
public synchronized Throwable getCause()
From source file:org.seasar.robot.extractor.impl.TikaExtractor.java
@Override public ExtractData getText(final InputStream inputStream, final Map<String, String> params) { if (inputStream == null) { throw new RobotSystemException("The inputstream is null."); }//from w w w . java 2 s . c om File tempFile = null; try { tempFile = File.createTempFile("tikaExtractor-", ".out"); } catch (final IOException e) { throw new ExtractException("Could not create a temp file.", e); } try { OutputStream out = null; try { out = new FileOutputStream(tempFile); StreamUtil.drain(inputStream, out); } finally { IOUtils.closeQuietly(out); } InputStream in = new FileInputStream(tempFile); final PrintStream originalOutStream = System.out; final ByteArrayOutputStream outStream = new ByteArrayOutputStream(); System.setOut(new PrintStream(outStream, true)); final PrintStream originalErrStream = System.err; final ByteArrayOutputStream errStream = new ByteArrayOutputStream(); System.setErr(new PrintStream(errStream, true)); try { final String resourceName = params == null ? null : params.get(TikaMetadataKeys.RESOURCE_NAME_KEY); final String contentType = params == null ? null : params.get(HttpHeaders.CONTENT_TYPE); String contentEncoding = params == null ? null : params.get(HttpHeaders.CONTENT_ENCODING); // password for pdf String pdfPassword = params == null ? null : params.get(ExtractData.PDF_PASSWORD); if (pdfPassword == null && params != null) { pdfPassword = getPdfPassword(params.get(ExtractData.URL), resourceName); } final Metadata metadata = createMetadata(resourceName, contentType, contentEncoding, pdfPassword); final Parser parser = new DetectParser(); final ParseContext parseContext = new ParseContext(); parseContext.set(Parser.class, parser); final StringWriter writer = new StringWriter(initialBufferSize); parser.parse(in, new BodyContentHandler(writer), metadata, parseContext); String content = normalizeContent(writer); if (StringUtil.isBlank(content)) { if (resourceName != null) { IOUtils.closeQuietly(in); if (logger.isDebugEnabled()) { logger.debug("retry without a resource name: {}", resourceName); } in = new FileInputStream(tempFile); final Metadata metadata2 = createMetadata(null, contentType, contentEncoding, pdfPassword); final StringWriter writer2 = new StringWriter(initialBufferSize); parser.parse(in, new BodyContentHandler(writer2), metadata2, parseContext); content = normalizeContent(writer2); } if (StringUtil.isBlank(content) && contentType != null) { IOUtils.closeQuietly(in); if (logger.isDebugEnabled()) { logger.debug("retry without a content type: {}", contentType); } in = new FileInputStream(tempFile); final Metadata metadata3 = createMetadata(null, null, contentEncoding, pdfPassword); final StringWriter writer3 = new StringWriter(initialBufferSize); parser.parse(in, new BodyContentHandler(writer3), metadata3, parseContext); content = normalizeContent(writer3); } if (readAsTextIfFailed && StringUtil.isBlank(content)) { IOUtils.closeQuietly(in); if (logger.isDebugEnabled()) { logger.debug("read the content as a text."); } if (contentEncoding == null) { contentEncoding = Constants.UTF_8; } BufferedReader br = null; try { br = new BufferedReader( new InputStreamReader(new FileInputStream(tempFile), contentEncoding)); final StringWriter writer4 = new StringWriter(initialBufferSize); String line; while ((line = br.readLine()) != null) { writer4.write(line.replaceAll("\\p{Cntrl}", " ").replaceAll("\\s+", " ").trim()); writer4.write(' '); } content = writer4.toString().trim(); } catch (final Exception e) { logger.warn("Could not read " + tempFile.getAbsolutePath(), e); } finally { IOUtils.closeQuietly(br); } } } final ExtractData extractData = new ExtractData(content); final String[] names = metadata.names(); Arrays.sort(names); for (final String name : names) { extractData.putValues(name, metadata.getValues(name)); } if (logger.isDebugEnabled()) { logger.debug("Result: metadata: {}", metadata); } return extractData; } catch (final TikaException e) { if (e.getMessage().indexOf("bomb") >= 0) { throw e; } final Throwable cause = e.getCause(); if (cause instanceof SAXException) { final Extractor xmlExtractor = SingletonS2Container.getComponent("xmlExtractor"); if (xmlExtractor != null) { IOUtils.closeQuietly(in); in = new FileInputStream(tempFile); return xmlExtractor.getText(in, params); } } throw e; } finally { IOUtils.closeQuietly(in); if (originalOutStream != null) { System.setOut(originalOutStream); } if (originalErrStream != null) { System.setErr(originalErrStream); } try { if (logger.isInfoEnabled()) { final byte[] bs = outStream.toByteArray(); if (bs.length != 0) { logger.info(new String(bs, outputEncoding)); } } if (logger.isWarnEnabled()) { final byte[] bs = errStream.toByteArray(); if (bs.length != 0) { logger.warn(new String(bs, outputEncoding)); } } } catch (final Exception e) { // NOP } } } catch (final Exception e) { throw new ExtractException("Could not extract a content.", e); } finally { if (tempFile != null && !tempFile.delete()) { logger.warn("Failed to delete " + tempFile.getAbsolutePath()); } } }
From source file:org.apache.hadoop.hbase.regionserver.HRegion.java
private long replayRecoveredEdits(final Path edits, Map<byte[], Long> maxSeqIdInStores, final CancelableProgressable reporter) throws IOException { String msg = "Replaying edits from " + edits; LOG.info(msg);// w ww .j ava 2s. co m MonitoredTask status = TaskMonitor.get().createStatus(msg); FileSystem fs = this.fs.getFileSystem(); status.setStatus("Opening logs"); HLog.Reader reader = null; try { reader = HLogFactory.createReader(fs, edits, conf); long currentEditSeqId = -1; long firstSeqIdInLog = -1; long skippedEdits = 0; long editsCount = 0; long intervalEdits = 0; HLog.Entry entry; Store store = null; boolean reported_once = false; ServerNonceManager ng = this.rsServices == null ? null : this.rsServices.getNonceManager(); try { // How many edits seen before we check elapsed time int interval = this.conf.getInt("hbase.hstore.report.interval.edits", 2000); // How often to send a progress report (default 1/2 master timeout) int period = this.conf.getInt("hbase.hstore.report.period", 300000); long lastReport = EnvironmentEdgeManager.currentTimeMillis(); while ((entry = reader.next()) != null) { HLogKey key = entry.getKey(); WALEdit val = entry.getEdit(); if (ng != null) { // some test, or nonces disabled ng.reportOperationFromWal(key.getNonceGroup(), key.getNonce(), key.getWriteTime()); } if (reporter != null) { intervalEdits += val.size(); if (intervalEdits >= interval) { // Number of edits interval reached intervalEdits = 0; long cur = EnvironmentEdgeManager.currentTimeMillis(); if (lastReport + period <= cur) { status.setStatus( "Replaying edits..." + " skipped=" + skippedEdits + " edits=" + editsCount); // Timeout reached if (!reporter.progress()) { msg = "Progressable reporter failed, stopping replay"; LOG.warn(msg); status.abort(msg); throw new IOException(msg); } reported_once = true; lastReport = cur; } } } // Start coprocessor replay here. The coprocessor is for each WALEdit // instead of a KeyValue. if (coprocessorHost != null) { status.setStatus("Running pre-WAL-restore hook in coprocessors"); if (coprocessorHost.preWALRestore(this.getRegionInfo(), key, val)) { // if bypass this log entry, ignore it ... continue; } } if (firstSeqIdInLog == -1) { firstSeqIdInLog = key.getLogSeqNum(); } currentEditSeqId = key.getLogSeqNum(); boolean flush = false; for (KeyValue kv : val.getKeyValues()) { // Check this edit is for me. Also, guard against writing the special // METACOLUMN info such as HBASE::CACHEFLUSH entries if (CellUtil.matchingFamily(kv, WALEdit.METAFAMILY) || !Bytes .equals(key.getEncodedRegionName(), this.getRegionInfo().getEncodedNameAsBytes())) { //this is a special edit, we should handle it CompactionDescriptor compaction = WALEdit.getCompaction(kv); if (compaction != null) { //replay the compaction completeCompactionMarker(compaction); } skippedEdits++; continue; } // Figure which store the edit is meant for. if (store == null || !CellUtil.matchingFamily(kv, store.getFamily().getName())) { store = getStore(kv); } if (store == null) { // This should never happen. Perhaps schema was changed between // crash and redeploy? LOG.warn("No family for " + kv); skippedEdits++; continue; } // Now, figure if we should skip this edit. if (key.getLogSeqNum() <= maxSeqIdInStores.get(store.getFamily().getName())) { skippedEdits++; continue; } // Once we are over the limit, restoreEdit will keep returning true to // flush -- but don't flush until we've played all the kvs that make up // the WALEdit. flush = restoreEdit(store, kv); editsCount++; } if (flush) internalFlushcache(null, currentEditSeqId, status); if (coprocessorHost != null) { coprocessorHost.postWALRestore(this.getRegionInfo(), key, val); } } } catch (EOFException eof) { Path p = HLogUtil.moveAsideBadEditsFile(fs, edits); msg = "Encountered EOF. Most likely due to Master failure during " + "log splitting, so we have this data in another edit. " + "Continuing, but renaming " + edits + " as " + p; LOG.warn(msg, eof); status.abort(msg); } catch (IOException ioe) { // If the IOE resulted from bad file format, // then this problem is idempotent and retrying won't help if (ioe.getCause() instanceof ParseException) { Path p = HLogUtil.moveAsideBadEditsFile(fs, edits); msg = "File corruption encountered! " + "Continuing, but renaming " + edits + " as " + p; LOG.warn(msg, ioe); status.setStatus(msg); } else { status.abort(StringUtils.stringifyException(ioe)); // other IO errors may be transient (bad network connection, // checksum exception on one datanode, etc). throw & retry throw ioe; } } if (reporter != null && !reported_once) { reporter.progress(); } msg = "Applied " + editsCount + ", skipped " + skippedEdits + ", firstSequenceIdInLog=" + firstSeqIdInLog + ", maxSequenceIdInLog=" + currentEditSeqId + ", path=" + edits; status.markComplete(msg); LOG.debug(msg); return currentEditSeqId; } finally { status.cleanup(); if (reader != null) { reader.close(); } } }
From source file:io.fabric8.kubernetes.client.dsl.internal.WatchConnectionManager.java
private final void runWatch() throws MalformedURLException, ExecutionException, InterruptedException { URL requestUrl = baseOperation.getNamespacedUrl(); HttpUrl.Builder httpUrlBuilder = HttpUrl.get(requestUrl).newBuilder(); String labelQueryParam = baseOperation.getLabelQueryParam(); if (isNotNullOrEmpty(labelQueryParam)) { httpUrlBuilder.addQueryParameter("labelSelector", labelQueryParam); }// w ww. j a v a 2s . c o m String fieldQueryString = baseOperation.getFieldQueryParam(); String name = baseOperation.getName(); if (name != null && name.length() > 0) { if (fieldQueryString.length() > 0) { fieldQueryString += ","; } fieldQueryString += "metadata.name=" + name; } if (isNotNullOrEmpty(fieldQueryString)) { httpUrlBuilder.addQueryParameter("fieldSelector", fieldQueryString); } httpUrlBuilder.addQueryParameter("resourceVersion", this.resourceVersion.get()).addQueryParameter("watch", "true"); Request request = new Request.Builder().get().url(httpUrlBuilder.build()) .addHeader("Origin", requestUrl.getProtocol() + "://" + requestUrl.getHost() + ":" + requestUrl.getPort()) .build(); webSocketCall = WebSocketCall.create(clonedClient, request); webSocketCall.enqueue(new WebSocketListener() { private final Logger logger = LoggerFactory.getLogger(this.getClass()); @Override public void onOpen(final WebSocket webSocket, Response response) { logger.debug("WebSocket successfully opened"); webSocketRef.set(webSocket); currentReconnectAttempt.set(0); started.set(true); queue.clear(); queue.add(true); pingFuture = executor.scheduleAtFixedRate(new Runnable() { @Override public void run() { try { webSocket.sendPing(new Buffer().writeUtf8("Alive?")); } catch (IOException e) { logger.error("Failed to send ping", e); onClose(4000, "Connection unexpectedly closed"); } } }, 0, 1, TimeUnit.SECONDS); } @Override public void onFailure(IOException e, Response response) { if (response != null) { //We only need to queue startup failures. Status status = OperationSupport.createStatus(response); logger.error("Exec Failure: HTTP:" + status.getCode() + ". Message:" + status.getMessage(), e); if (!started.get()) { queue.add(new KubernetesClientException(status)); } } else { logger.error("Exec Failure", e); if (!started.get()) { queue.add(KubernetesClientException.launderThrowable(e)); } } if (response != null && response.body() != null) { response.body().close(); } if (forceClosed.get()) { watcher.onClose(null); return; } if (reconnectLimit >= 0 && currentReconnectAttempt.getAndIncrement() >= reconnectLimit) { watcher.onClose(new KubernetesClientException("Connection unexpectedly closed", e)); return; } try { TimeUnit.MILLISECONDS.sleep(reconnectInterval); } catch (InterruptedException e1) { watcher.onClose(new KubernetesClientException("Connection unexpectedly closed", e1)); return; } onClose(4000, "Connection unexpectedly closed"); } @Override public void onMessage(ResponseBody message) throws IOException { String messageSource = null; try { messageSource = message.string(); WatchEvent event = mapper.readValue(messageSource, WatchEvent.class); if (event.getObject() instanceof HasMetadata) { T obj = (T) event.getObject(); //Dirty cast - should always be valid though String currentResourceVersion = resourceVersion.get(); String newResourceVersion = ((HasMetadata) obj).getMetadata().getResourceVersion(); if (currentResourceVersion.compareTo(newResourceVersion) < 0) { resourceVersion.compareAndSet(currentResourceVersion, newResourceVersion); } Watcher.Action action = Watcher.Action.valueOf(event.getType()); watcher.eventReceived(action, obj); } else if (event.getObject() instanceof Status) { Status status = (Status) event.getObject(); // The resource version no longer exists - this has to be handled by the caller. if (status.getCode() == HTTP_GONE) { close(); watcher.onClose(new KubernetesClientException(status)); return; } logger.error("Error received: {}", status.toString()); } } catch (IOException e) { logger.error("Could not deserialize watch event: {}", messageSource, e); } catch (ClassCastException e) { logger.error("Received wrong type of object for watch", e); } catch (IllegalArgumentException e) { logger.error("Invalid event type", e); } finally { message.close(); } } @Override public void onPong(Buffer buffer) { try { if (!buffer.equals(new Buffer().writeUtf8("Alive?"))) { logger.error("Failed to verify pong"); onClose(4000, "Connection unexpectedly closed"); } } finally { buffer.close(); } } @Override public void onClose(final int code, final String reason) { if (!pingFuture.isCancelled() && !pingFuture.isDone()) { pingFuture.cancel(true); } if (forceClosed.get()) { watcher.onClose(null); return; } // Let's close the websocket before trying to reopen a new one. try { WebSocket ws = webSocketRef.get(); if (ws != null) { ws.close(1000, "Closing..."); webSocketRef.set(null); } } catch (IOException e) { e.printStackTrace(); } catch (IllegalStateException e) { // Ignore... } executor.submit(new Runnable() { @Override public void run() { try { queue.clear(); runWatch(); } catch (ExecutionException e) { if (e.getCause() != null && e.getCause().getCause() != null && e.getCause().getCause() instanceof ConnectException) { if (reconnectLimit >= 0 && currentReconnectAttempt.getAndIncrement() >= reconnectLimit) { watcher.onClose( new KubernetesClientException("Connection unexpectedly closed", e)); return; } try { TimeUnit.MILLISECONDS.sleep(reconnectInterval); } catch (InterruptedException e1) { watcher.onClose( new KubernetesClientException("Connection unexpectedly closed", e1)); return; } onClose(code, reason); } } catch (MalformedURLException | InterruptedException e) { throw KubernetesClientException.launderThrowable(e); } } }); } }); }
From source file:org.eclipse.rdf4j.http.server.repository.RepositoryController.java
@Override protected ModelAndView handleRequestInternal(HttpServletRequest request, HttpServletResponse response) throws Exception { String reqMethod = request.getMethod(); String queryStr = request.getParameter(QUERY_PARAM_NAME); if (METHOD_POST.equals(reqMethod)) { String mimeType = HttpServerUtil.getMIMEType(request.getContentType()); if (!(Protocol.FORM_MIME_TYPE.equals(mimeType) || Protocol.SPARQL_QUERY_MIME_TYPE.equals(mimeType))) { throw new ClientHTTPException(SC_UNSUPPORTED_MEDIA_TYPE, "Unsupported MIME type: " + mimeType); }// w w w . jav a2s.co m if (Protocol.SPARQL_QUERY_MIME_TYPE.equals(mimeType)) { // The query should be the entire body try { queryStr = IOUtils.toString(request.getReader()); } catch (IOException e) { throw new HTTPException(HttpStatus.SC_BAD_REQUEST, "Error reading request message body", e); } if (queryStr.isEmpty()) queryStr = null; } } else if (METHOD_DELETE.equals(reqMethod)) { String repId = RepositoryInterceptor.getRepositoryID(request); logger.info("DELETE request invoked for repository '" + repId + "'"); if (queryStr != null) { logger.warn("query supplied on repository delete request, aborting delete"); throw new HTTPException(HttpStatus.SC_BAD_REQUEST, "Repository delete error: query supplied with request"); } if (SystemRepository.ID.equals(repId)) { logger.warn("attempted delete of SYSTEM repository, aborting"); throw new HTTPException(HttpStatus.SC_FORBIDDEN, "SYSTEM Repository can not be deleted"); } try { boolean success = repositoryManager.removeRepository(repId); if (success) { logger.info("DELETE request successfully completed"); return new ModelAndView(EmptySuccessView.getInstance()); } else { logger.error("error while attempting to delete repository '" + repId + "'"); throw new HTTPException(HttpStatus.SC_BAD_REQUEST, "could not locate repository configuration for repository '" + repId + "'."); } } catch (RDF4JException e) { logger.error("error while attempting to delete repository '" + repId + "'", e); throw new ServerHTTPException("Repository delete error: " + e.getMessage(), e); } } Repository repository = RepositoryInterceptor.getRepository(request); int qryCode = 0; if (logger.isInfoEnabled() || logger.isDebugEnabled()) { qryCode = String.valueOf(queryStr).hashCode(); } boolean headersOnly = false; if (METHOD_GET.equals(reqMethod)) { logger.info("GET query {}", qryCode); } else if (METHOD_HEAD.equals(reqMethod)) { logger.info("HEAD query {}", qryCode); headersOnly = true; } else if (METHOD_POST.equals(reqMethod)) { logger.info("POST query {}", qryCode); } logger.debug("query {} = {}", qryCode, queryStr); if (queryStr != null) { RepositoryConnection repositoryCon = RepositoryInterceptor.getRepositoryConnection(request); try { Query query = getQuery(repository, repositoryCon, queryStr, request, response); View view; Object queryResult = null; FileFormatServiceRegistry<? extends FileFormat, ?> registry; try { if (query instanceof TupleQuery) { if (!headersOnly) { TupleQuery tQuery = (TupleQuery) query; long limit = ProtocolUtil.parseLongParam(request, Protocol.LIMIT_PARAM_NAME, 0); long offset = ProtocolUtil.parseLongParam(request, Protocol.OFFSET_PARAM_NAME, 0); boolean distinct = ProtocolUtil.parseBooleanParam(request, Protocol.DISTINCT_PARAM_NAME, false); final TupleQueryResult tqr = distinct ? QueryResults.distinctResults(tQuery.evaluate()) : tQuery.evaluate(); queryResult = QueryResults.limitResults(tqr, limit, offset); } registry = TupleQueryResultWriterRegistry.getInstance(); view = TupleQueryResultView.getInstance(); } else if (query instanceof GraphQuery) { if (!headersOnly) { GraphQuery gQuery = (GraphQuery) query; long limit = ProtocolUtil.parseLongParam(request, Protocol.LIMIT_PARAM_NAME, 0); long offset = ProtocolUtil.parseLongParam(request, Protocol.OFFSET_PARAM_NAME, 0); boolean distinct = ProtocolUtil.parseBooleanParam(request, Protocol.DISTINCT_PARAM_NAME, false); final GraphQueryResult qqr = distinct ? QueryResults.distinctResults(gQuery.evaluate()) : gQuery.evaluate(); queryResult = QueryResults.limitResults(qqr, limit, offset); } registry = RDFWriterRegistry.getInstance(); view = GraphQueryResultView.getInstance(); } else if (query instanceof BooleanQuery) { BooleanQuery bQuery = (BooleanQuery) query; queryResult = headersOnly ? null : bQuery.evaluate(); registry = BooleanQueryResultWriterRegistry.getInstance(); view = BooleanQueryResultView.getInstance(); } else { throw new ClientHTTPException(SC_BAD_REQUEST, "Unsupported query type: " + query.getClass().getName()); } } catch (QueryInterruptedException e) { logger.info("Query interrupted", e); throw new ServerHTTPException(SC_SERVICE_UNAVAILABLE, "Query evaluation took too long"); } catch (QueryEvaluationException e) { logger.info("Query evaluation error", e); if (e.getCause() != null && e.getCause() instanceof HTTPException) { // custom signal from the backend, throw as HTTPException // directly (see SES-1016). throw (HTTPException) e.getCause(); } else { throw new ServerHTTPException("Query evaluation error: " + e.getMessage()); } } Object factory = ProtocolUtil.getAcceptableService(request, response, registry); Map<String, Object> model = new HashMap<String, Object>(); model.put(QueryResultView.FILENAME_HINT_KEY, "query-result"); model.put(QueryResultView.QUERY_RESULT_KEY, queryResult); model.put(QueryResultView.FACTORY_KEY, factory); model.put(QueryResultView.HEADERS_ONLY, headersOnly); model.put(QueryResultView.CONNECTION_KEY, repositoryCon); return new ModelAndView(view, model); } catch (Exception e) { // only close the connection when an exception occurs. Otherwise, the QueryResultView will take care of closing it. repositoryCon.close(); throw e; } } else { throw new ClientHTTPException(SC_BAD_REQUEST, "Missing parameter: " + QUERY_PARAM_NAME); } }
From source file:org.vpac.ndg.cli.Client.java
public void run() { try {//from ww w . j a v a2s. c o m workingDirectory = workingDirectory.toRealPath(); } catch (IOException e) { log.error("Could not determine working directory from {}.", workingDirectory); exit(1); return; } String category; String action; try { category = cmd.getArgs()[0]; } catch (IndexOutOfBoundsException e) { log.error("Missing category (e.g. \"dataset\")."); System.err.print(USAGE); exit(1); return; } try { action = cmd.getArgs()[1]; } catch (IndexOutOfBoundsException e) { log.error("Missing action (e.g. \"list\")."); System.err.print(USAGE); exit(1); return; } category = category.toLowerCase(); action = action.toLowerCase(); @SuppressWarnings("unchecked") List<String> args = cmd.getArgList(); List<String> remainingArgs = args.subList(2, args.size()); try { // Datasets if (category.equals("dataset")) { if (action.equals("list")) { listDatasets(); } else if (action.equals("search")) { searchDataset(remainingArgs); } else if (action.equals("show")) { showDataset(remainingArgs); } else if (action.equals("create")) { createDataset(remainingArgs); } else if (action.equals("update")) { updateDataset(remainingArgs); } else if (action.equals("delete")) { deleteDataset(remainingArgs); } else { throw new IllegalArgumentException(String.format("%s is not a recognised action.", action)); } // Time slices } else if (category.equals("timeslice")) { if (action.equals("list")) { listTimeSlices(remainingArgs); } else if (action.equals("show")) { showTimeSlice(remainingArgs); } else if (action.equals("create")) { createTimeSlice(remainingArgs); } else if (action.equals("delete")) { deleteTimeSlice(remainingArgs); } else if (action.equals("update")) { updateTimeSlice(remainingArgs); } else { throw new IllegalArgumentException(String.format("%s is not a recognised action.", action)); } // Bands } else if (category.equals("band")) { if (action.equals("list")) { listBands(remainingArgs); } else if (action.equals("create")) { createBand(remainingArgs); } else if (action.equals("delete")) { deleteBand(remainingArgs); } else if (action.equals("update")) { updateBand(remainingArgs); } else { throw new IllegalArgumentException(String.format("%s is not a recognised action.", action)); } } else if (category.equals("data")) { // case "upload": // System.out.println("Uploading data."); // break; if (action.equals("import")) { importData(remainingArgs); } else if (action.equals("export")) { exportData(remainingArgs); } else if (action.equals("download")) { downloadData(remainingArgs); } else if (action.equals("query")) { queryData(remainingArgs); } else if (action.equals("task")) { listTasks(remainingArgs); } else if (action.equals("cleanup")) { cleanup(remainingArgs); } else { throw new IllegalArgumentException(String.format("%s is not a recognised action.", action)); } // } else if (category.equals("help")) { // if (action.equals("export")) { // helpExport(remainingArgs); // } } else { throw new IllegalArgumentException(String.format("%s is not a recognised category.", category)); } } catch (IllegalArgumentException e) { log.error(e.getMessage()); if (e.getCause() != null) log.error("Cause: {}", e.getCause().getMessage()); exit(1); return; } catch (UnsupportedOperationException e) { log.error("Chosen operation not implemented: {}", e.getMessage()); if (e.getCause() != null) log.error("Cause: {}", e.getCause().getMessage()); exit(1); return; } catch (InfrastructureException e) { log.error("Failed to communicate with database: {}", e.getMessage()); if (e.getCause() != null) log.error("Cause: {}", e.getCause().getMessage()); exit(2); return; } catch (TaskInitialisationException | TaskException | IOException | QueryRuntimeException e) { log.error("Execution failed: {}", e.getMessage()); if (e.getCause() != null) log.error("Cause: {}", e.getCause().getMessage()); exit(2); return; } }
From source file:fr.certu.chouette.command.Command.java
/** * @param manager/*w w w.j av a 2 s . co m*/ * @param parameters * @return */ private List<NeptuneIdentifiedObject> executeImport(INeptuneManager<NeptuneIdentifiedObject> manager, Map<String, List<String>> parameters) { String reportFileName = getSimpleString(parameters, "reportfile", ""); String reportFormat = getSimpleString(parameters, "reportformat", "txt"); boolean append = getBoolean(parameters, "append"); String format = getSimpleString(parameters, "format"); PrintStream stream = System.out; String encoding = Charset.defaultCharset().toString(); if (!reportFileName.isEmpty()) { try { if (reportFormat.equals("json")) { encoding = "UTF-8"; } stream = new PrintStream(new FileOutputStream(new File(reportFileName), append), true, encoding); } catch (IOException e) { System.err.println("cannot open file :" + reportFileName + " " + e.getMessage()); reportFileName = ""; } } try { List<FormatDescription> formats = manager.getImportFormats(null); FormatDescription description = null; for (FormatDescription formatDescription : formats) { if (formatDescription.getName().equalsIgnoreCase(format)) { description = formatDescription; break; } } if (description == null) { throw new IllegalArgumentException( "format " + format + " unavailable, check command getImportFormats for list "); } List<ParameterValue> values = new ArrayList<ParameterValue>(); for (ParameterDescription desc : description.getParameterDescriptions()) { String name = desc.getName(); String key = name.toLowerCase(); List<String> vals = parameters.get(key); if (vals == null) { if (desc.isMandatory()) { throw new IllegalArgumentException( "parameter -" + name + " is required, check command getImportFormats for list "); } } else { if (desc.isCollection()) { ListParameterValue val = new ListParameterValue(name); switch (desc.getType()) { case FILEPATH: val.setFilepathList(vals); break; case STRING: val.setStringList(vals); break; case FILENAME: val.setFilenameList(vals); break; } values.add(val); } else { if (vals.size() != 1) { throw new IllegalArgumentException("parameter -" + name + " must be unique, check command getImportFormats for list "); } String simpleval = vals.get(0); SimpleParameterValue val = new SimpleParameterValue(name); switch (desc.getType()) { case FILEPATH: val.setFilepathValue(simpleval); break; case STRING: val.setStringValue(simpleval); break; case FILENAME: val.setFilenameValue(simpleval); break; case BOOLEAN: val.setBooleanValue(Boolean.parseBoolean(simpleval)); break; case INTEGER: val.setIntegerValue(Long.parseLong(simpleval)); break; case DATE: val.setDateValue(toCalendar(simpleval)); break; } values.add(val); } } } ReportHolder holder = new ReportHolder(); List<NeptuneIdentifiedObject> beans = manager.doImport(null, format, values, holder); if (holder.getReport() != null) { Report r = holder.getReport(); if (reportFormat.equals("json")) { stream.println(r.toJSON()); } else { stream.println(r.getLocalizedMessage()); printItems(stream, "", r.getItems()); } } if (beans == null || beans.isEmpty()) { System.out.println("import failed"); } else { System.out.println("beans count = " + beans.size()); } return beans; } catch (ChouetteException e) { logger.error(e.getMessage()); Throwable caused = e.getCause(); while (caused != null) { logger.error("caused by " + caused.getMessage()); caused = caused.getCause(); } throw new RuntimeException("import failed , see log for details"); } finally { if (!reportFileName.isEmpty()) { stream.close(); } } }
From source file:org.codelibs.fess.crawler.extractor.impl.TikaExtractor.java
@Override public ExtractData getText(final InputStream inputStream, final Map<String, String> params) { if (inputStream == null) { throw new CrawlerSystemException("The inputstream is null."); }//from ww w .jav a2s . c o m final File tempFile; final boolean isByteStream = inputStream instanceof ByteArrayInputStream; if (isByteStream) { inputStream.mark(0); tempFile = null; } else { try { tempFile = File.createTempFile("tikaExtractor-", ".out"); } catch (final IOException e) { throw new ExtractException("Could not create a temp file.", e); } } try { final PrintStream originalOutStream = System.out; final ByteArrayOutputStream outStream = new ByteArrayOutputStream(); System.setOut(new PrintStream(outStream, true)); final PrintStream originalErrStream = System.err; final ByteArrayOutputStream errStream = new ByteArrayOutputStream(); System.setErr(new PrintStream(errStream, true)); try { final String resourceName = params == null ? null : params.get(TikaMetadataKeys.RESOURCE_NAME_KEY); final String contentType = params == null ? null : params.get(HttpHeaders.CONTENT_TYPE); String contentEncoding = params == null ? null : params.get(HttpHeaders.CONTENT_ENCODING); // password for pdf String pdfPassword = params == null ? null : params.get(ExtractData.PDF_PASSWORD); if (pdfPassword == null && params != null) { pdfPassword = getPdfPassword(params.get(ExtractData.URL), resourceName); } final Metadata metadata = createMetadata(resourceName, contentType, contentEncoding, pdfPassword); final Parser parser = new DetectParser(); final ParseContext parseContext = new ParseContext(); parseContext.set(Parser.class, parser); String content = getContent(writer -> { InputStream in = null; try { if (!isByteStream) { try (OutputStream out = new FileOutputStream(tempFile)) { CopyUtil.copy(inputStream, out); } in = new FileInputStream(tempFile); } else { in = inputStream; } parser.parse(in, new BodyContentHandler(writer), metadata, parseContext); } finally { IOUtils.closeQuietly(in); } }, contentEncoding); if (StringUtil.isBlank(content)) { if (resourceName != null) { if (logger.isDebugEnabled()) { logger.debug("retry without a resource name: {}", resourceName); } final Metadata metadata2 = createMetadata(null, contentType, contentEncoding, pdfPassword); content = getContent(writer -> { InputStream in = null; try { if (isByteStream) { inputStream.reset(); in = inputStream; } else { in = new FileInputStream(tempFile); } parser.parse(in, new BodyContentHandler(writer), metadata2, parseContext); } finally { IOUtils.closeQuietly(in); } }, contentEncoding); } if (StringUtil.isBlank(content) && contentType != null) { if (logger.isDebugEnabled()) { logger.debug("retry without a content type: {}", contentType); } final Metadata metadata3 = createMetadata(null, null, contentEncoding, pdfPassword); content = getContent(writer -> { InputStream in = null; try { if (isByteStream) { inputStream.reset(); in = inputStream; } else { in = new FileInputStream(tempFile); } parser.parse(in, new BodyContentHandler(writer), metadata3, parseContext); } finally { IOUtils.closeQuietly(in); } }, contentEncoding); } if (readAsTextIfFailed && StringUtil.isBlank(content)) { if (logger.isDebugEnabled()) { logger.debug("read the content as a text."); } if (contentEncoding == null) { contentEncoding = Constants.UTF_8; } final String enc = contentEncoding; content = getContent(writer -> { BufferedReader br = null; try { if (isByteStream) { inputStream.reset(); br = new BufferedReader(new InputStreamReader(inputStream, enc)); } else { br = new BufferedReader( new InputStreamReader(new FileInputStream(tempFile), enc)); } String line; while ((line = br.readLine()) != null) { writer.write(line); } } catch (final Exception e) { logger.warn( "Could not read " + (tempFile != null ? tempFile.getAbsolutePath() : "a byte stream"), e); } finally { IOUtils.closeQuietly(br); } }, contentEncoding); } } final ExtractData extractData = new ExtractData(content); final String[] names = metadata.names(); Arrays.sort(names); for (final String name : names) { extractData.putValues(name, metadata.getValues(name)); } if (logger.isDebugEnabled()) { logger.debug("Result: metadata: {}", metadata); } return extractData; } catch (final TikaException e) { if (e.getMessage().indexOf("bomb") >= 0) { throw e; } final Throwable cause = e.getCause(); if (cause instanceof SAXException) { final Extractor xmlExtractor = crawlerContainer.getComponent("xmlExtractor"); if (xmlExtractor != null) { InputStream in = null; try { if (isByteStream) { inputStream.reset(); in = inputStream; } else { in = new FileInputStream(tempFile); } return xmlExtractor.getText(in, params); } finally { IOUtils.closeQuietly(in); } } } throw e; } finally { if (originalOutStream != null) { System.setOut(originalOutStream); } if (originalErrStream != null) { System.setErr(originalErrStream); } try { if (logger.isInfoEnabled()) { final byte[] bs = outStream.toByteArray(); if (bs.length != 0) { logger.info(new String(bs, outputEncoding)); } } if (logger.isWarnEnabled()) { final byte[] bs = errStream.toByteArray(); if (bs.length != 0) { logger.warn(new String(bs, outputEncoding)); } } } catch (final Exception e) { // NOP } } } catch (final Exception e) { throw new ExtractException("Could not extract a content.", e); } finally { if (tempFile != null && !tempFile.delete()) { logger.warn("Failed to delete " + tempFile.getAbsolutePath()); } } }
From source file:com.couchbase.lite.router.Router.java
public void start() { // Refer to: http://wiki.apache.org/couchdb/Complete_HTTP_API_Reference String method = connection.getRequestMethod(); // We're going to map the request into a method call using reflection based on the method and path. // Accumulate the method name into the string 'message': if ("HEAD".equals(method)) { method = "GET"; }/*from w ww . j a v a2 s .c o m*/ String message = String.format(Locale.ENGLISH, "do_%s", method); // First interpret the components of the request: List<String> path = splitPath(connection.getURL()); if (path == null) { connection.setResponseCode(Status.BAD_REQUEST); try { connection.getResponseOutputStream().close(); } catch (IOException e) { Log.e(TAG, "Error closing empty output stream"); } sendResponse(); return; } int pathLen = path.size(); if (pathLen > 0) { String dbName = path.get(0); if (dbName.startsWith("_")) { message += dbName; // special root path, like /_all_dbs } else { message += "_Database"; if (!Manager.isValidDatabaseName(dbName)) { Header resHeader = connection.getResHeader(); if (resHeader != null) { resHeader.add("Content-Type", CONTENT_TYPE_JSON); } Map<String, Object> result = new HashMap<String, Object>(); result.put("error", "Invalid database"); result.put("status", Status.BAD_REQUEST); connection.setResponseBody(new Body(result)); ByteArrayInputStream bais = new ByteArrayInputStream(connection.getResponseBody().getJson()); connection.setResponseInputStream(bais); connection.setResponseCode(Status.BAD_REQUEST); try { connection.getResponseOutputStream().close(); } catch (IOException e) { Log.e(TAG, "Error closing empty output stream"); } sendResponse(); return; } else { boolean mustExist = false; db = manager.getDatabase(dbName, mustExist); // NOTE: synchronized if (db == null) { connection.setResponseCode(Status.BAD_REQUEST); try { connection.getResponseOutputStream().close(); } catch (IOException e) { Log.e(TAG, "Error closing empty output stream"); } sendResponse(); return; } } } } else { message += "Root"; } String docID = null; if (db != null && pathLen > 1) { message = message.replaceFirst("_Database", "_Document"); // Make sure database exists, then interpret doc name: Status status = openDB(); if (!status.isSuccessful()) { connection.setResponseCode(status.getCode()); try { connection.getResponseOutputStream().close(); } catch (IOException e) { Log.e(TAG, "Error closing empty output stream"); } sendResponse(); return; } String name = path.get(1); if (!name.startsWith("_")) { // Regular document if (!Document.isValidDocumentId(name)) { connection.setResponseCode(Status.BAD_REQUEST); try { connection.getResponseOutputStream().close(); } catch (IOException e) { Log.e(TAG, "Error closing empty output stream"); } sendResponse(); return; } docID = name; } else if ("_design".equals(name) || "_local".equals(name)) { // "_design/____" and "_local/____" are document names if (pathLen <= 2) { connection.setResponseCode(Status.NOT_FOUND); try { connection.getResponseOutputStream().close(); } catch (IOException e) { Log.e(TAG, "Error closing empty output stream"); } sendResponse(); return; } docID = name + '/' + path.get(2); path.set(1, docID); path.remove(2); pathLen--; } else if (name.startsWith("_design") || name.startsWith("_local")) { // This is also a document, just with a URL-encoded "/" docID = name; } else if ("_session".equals(name)) { // There are two possible uri to get a session, /<db>/_session or /_session. // This is for /<db>/_session. message = message.replaceFirst("_Document", name); } else { // Special document name like "_all_docs": message += name; if (pathLen > 2) { List<String> subList = path.subList(2, pathLen - 1); StringBuilder sb = new StringBuilder(); Iterator<String> iter = subList.iterator(); while (iter.hasNext()) { sb.append(iter.next()); if (iter.hasNext()) { sb.append('/'); } } docID = sb.toString(); } } } String attachmentName = null; if (docID != null && pathLen > 2) { message = message.replaceFirst("_Document", "_Attachment"); // Interpret attachment name: attachmentName = path.get(2); if (attachmentName.startsWith("_") && docID.startsWith("_design")) { // Design-doc attribute like _info or _view message = message.replaceFirst("_Attachment", "_DesignDocument"); docID = docID.substring(8); // strip the "_design/" prefix attachmentName = pathLen > 3 ? path.get(3) : null; } else { if (pathLen > 3) { List<String> subList = path.subList(2, pathLen); StringBuilder sb = new StringBuilder(); Iterator<String> iter = subList.iterator(); while (iter.hasNext()) { sb.append(iter.next()); if (iter.hasNext()) { //sb.append("%2F"); sb.append('/'); } } attachmentName = sb.toString(); } } } // Send myself a message based on the components: Status status = null; try { Method m = Router.class.getMethod(message, Database.class, String.class, String.class); status = (Status) m.invoke(this, db, docID, attachmentName); } catch (NoSuchMethodException msme) { try { String errorMessage = String.format(Locale.ENGLISH, "Router unable to route request to %s", message); Log.w(TAG, errorMessage); // Check if there is an alternative method: boolean hasAltMethod = false; String curDoMethod = String.format(Locale.ENGLISH, "do_%s", method); String[] methods = { "GET", "POST", "PUT", "DELETE" }; for (String aMethod : methods) { if (!aMethod.equals(method)) { String altDoMethod = String.format(Locale.ENGLISH, "do_%s", aMethod); String altMessage = message.replaceAll(curDoMethod, altDoMethod); try { Method altMethod = Router.class.getMethod(altMessage, Database.class, String.class, String.class); hasAltMethod = true; break; } catch (Exception ex) { // go next } } } Method m = Router.class.getMethod(hasAltMethod ? "do_METHOD_NOT_ALLOWED" : "do_UNKNOWN", Database.class, String.class, String.class); status = (Status) m.invoke(this, db, docID, attachmentName); } catch (Exception e) { //default status is internal server error Log.e(TAG, "Router attempted do_UNKNWON fallback, but that threw an exception", e); status = new Status(Status.NOT_FOUND); Map<String, Object> result = new HashMap<String, Object>(); result.put("status", status.getHTTPCode()); result.put("error", status.getHTTPMessage()); result.put("reason", "Router unable to route request"); connection.setResponseBody(new Body(result)); } } catch (Exception e) { String errorMessage = "Router unable to route request to " + message; Log.w(TAG, errorMessage, e); Map<String, Object> result = new HashMap<String, Object>(); if (e.getCause() != null && e.getCause() instanceof CouchbaseLiteException) { status = ((CouchbaseLiteException) e.getCause()).getCBLStatus(); result.put("status", status.getHTTPCode()); result.put("error", status.getHTTPMessage()); result.put("reason", errorMessage + e.getCause().toString()); } else { status = new Status(Status.NOT_FOUND); result.put("status", status.getHTTPCode()); result.put("error", status.getHTTPMessage()); result.put("reason", errorMessage + e.toString()); } connection.setResponseBody(new Body(result)); } // If response is ready (nonzero status), tell my client about it: if (status.getCode() != 0) { // NOTE: processRequestRanges() is not implemented for CBL Java Core // Configure response headers: status = sendResponseHeaders(status); connection.setResponseCode(status.getCode()); if (status.isSuccessful() && connection.getResponseBody() == null && connection.getHeaderField("Content-Type") == null && dontOverwriteBody == false) { connection.setResponseBody(new Body("{\"ok\":true}".getBytes())); } if (!status.isSuccessful() && connection.getResponseBody() == null) { Map<String, Object> result = new HashMap<String, Object>(); result.put("status", status.getCode()); result.put("error", status.getHTTPMessage()); connection.setResponseBody(new Body(result)); connection.getResHeader().add("Content-Type", CONTENT_TYPE_JSON); } setResponse(); sendResponse(); } else { // NOTE code == 0 waiting = true; } if (waiting && db != null) { Log.v(TAG, "waiting=true & db!=null: call Database.addDatabaseListener()"); db.addDatabaseListener(this); } }
From source file:org.commoncrawl.service.crawler.CrawlLog.java
/** perform the actual checkpoint work here ... **/ private void doCheckpoint() { // at this point, we should be in the async thread, and all flusher // activities are blocked ... LOG.info("CrawlLog Checkpoint - Starting "); // collect all necessary information from thread-unsafe data structure now // (in async thread context) final Set<Long> activeSegments = new HashSet<Long>(); try {//ww w.jav a 2 s.co m // add all active segment ids to our key set ... activeSegments.addAll(_loggers.keySet()); LOG.info("CrawlLog Checkpoint - Preparing CrawlLog Files"); // checkpoint crawl log ... checkpointLocalCrawlLog(); LOG.info("CrawlLog Checkpoint - Preparing Segment Log Files"); // next checkpoint all active segment logs ... for (CrawlSegmentLog segmentLog : _loggers.values()) { segmentLog.checkpointLocalLog(); } LOG.info("CrawlLog Checkpoint - Ready for HDFS Transfer"); } catch (IOException e) { LOG.error("Checkpoint failed with Exception:" + CCStringUtils.stringifyException(e)); } // spawn a thread to do most of the blocking io ... _threadPool.submit(new ConcurrentTask<Boolean>(_eventLoop, new Callable<Boolean>() { public Boolean call() throws Exception { // we need to track these in case of failure ... Vector<Path> segmentLogStagingPaths = new Vector<Path>(); Vector<Path> segmentLogFinalPaths = new Vector<Path>(); // get the file system final FileSystem hdfs = CrawlEnvironment.getDefaultFileSystem(); try { LOG.info("CrawlLog Checkpoint - Transferring CrawlLog to HDFS"); // construct a target path (where we are going to store the // checkpointed crawl log ) Path stagingDirectory = new Path(CrawlEnvironment.getCheckpointStagingDirectory()); SequenceFileCrawlURLWriter hdfsWriter = new SequenceFileCrawlURLWriter( CrawlEnvironment.getHadoopConfig(), hdfs, stagingDirectory, getNodeName(), _checkpointId); try { // write out crawl log to hdfs ... transferLocalCheckpointLog(getCheckpointPath(_rootDirectory), hdfsWriter, _checkpointId); } catch (Exception e) { LOG.error("HDFS Write of CrawlLog failed. Deleting tempFiles:" + hdfsWriter.getFilenames() + " Exception:" + CCStringUtils.stringifyException(e)); // close writer hdfsWriter.close(); // delete any hdfs output ... for (Path path : hdfsWriter.getFilenames()) { LOG.info("Deleting temp (HDFS) checkpoint file:" + path); hdfs.delete(path, false); } throw e; } finally { hdfsWriter.close(); } LOG.info("CrawlLog Checkpoint - Transferring CrawlSegment Logs"); // and next for every segment for (long packedLogId : activeSegments) { File segmentLogPath = CrawlSegmentLog.buildCheckpointPath(_rootDirectory, getListIdFromLogId(packedLogId), getSegmentIdFromLogId(packedLogId)); // LOG.info("CrawlLog Checkpoint - Transferring CrawlSegment Log for Segment:" // + segmentId); // copy the segment log ... Path remoteLogFilePath = transferLocalSegmentLog(hdfs, segmentLogPath, _checkpointId, getListIdFromLogId(packedLogId), getSegmentIdFromLogId(packedLogId)); // if path is not null (data was copied) ... if (remoteLogFilePath != null) { // add it to vector ... segmentLogStagingPaths.add(remoteLogFilePath); // and add final path to vector while we are at it ... segmentLogFinalPaths.add(getFinalSegmentLogPath(hdfs, _checkpointId, getListIdFromLogId(packedLogId), getSegmentIdFromLogId(packedLogId))); } } LOG.info("CrawlLog Checkpoint - Finished Transferring CrawlSegment Logs"); // now if we got here ... all hdfs transfers succeeded ... // go ahead and move checkpoint log from staging to final data // directory ... Path checkpointDirectory = new Path(CrawlEnvironment.getCheckpointDataDirectory()); // if no checkpoint data directory ... create one ... if (!hdfs.exists(checkpointDirectory)) hdfs.mkdirs(checkpointDirectory); for (Path checkpointTempFilePath : hdfsWriter.getFilenames()) { Path checkpointFinalPath = new Path(checkpointDirectory, checkpointTempFilePath.getName()); LOG.info("Promoting Checking File From:" + checkpointTempFilePath + " to:" + checkpointFinalPath); // and essentially move the crawl log file from staging to data // directory .. boolean success = hdfs.rename(checkpointTempFilePath, checkpointFinalPath); if (!success) { throw new IOException("Failed to Rename Checkpoint Temp:" + checkpointTempFilePath + " to:" + checkpointFinalPath); } } // and now do the same thing for each segment log files for (int i = 0; i < segmentLogStagingPaths.size(); ++i) { hdfs.rename(segmentLogStagingPaths.get(i), segmentLogFinalPaths.get(i)); } // if we got here checkpoint was successfull... return true; } catch (Exception e) { LOG.error("Checkpoint:" + _checkpointId + " FAILED with exception:" + CCStringUtils.stringifyException(e)); for (Path segmentPath : segmentLogStagingPaths) { hdfs.delete(segmentPath, false); } for (Path segmentPath : segmentLogFinalPaths) { hdfs.delete(segmentPath, false); } throw e; } } }, new CompletionCallback<Boolean>() { public void taskComplete(Boolean updateResult) { Vector<Long> completedSegmentList = new Vector<Long>(); LOG.info("CrawlLog Checkpoint - Finalizing CrawlLog Checkpoint"); // delete the local checkpoint log ... finalizeCheckpoint(); LOG.info("CrawlLog Checkpoint - Finalizing CrawlSegmentLogs"); for (CrawlSegmentLog segmentLog : _loggers.values()) { // LOG.info("CrawlLog Checkpoint - Finalizing CrawlSegmentLog for Segment:" // + segmentLog.getSegmentId()); // finalize the checkpoint on the segment log ... segmentLog.finalizeCheckpoint(); // and check to see if the segment has been completed ... if (segmentLog.isSegmentComplete()) { // if so, add it our completed segments list ... completedSegmentList .add(makeSegmentLogId(segmentLog.getListId(), segmentLog.getSegmentId())); } } // now for all completed segments ... purge hdfs logs ... for (long packedSegmentId : completedSegmentList) { try { LOG.info( "CrawlLog Checkpoint - Purging HDFS CrawlSegmentLogs from Completed Segment. List:" + getListIdFromLogId(packedSegmentId) + " Segment:" + getSegmentIdFromLogId(packedSegmentId)); // purge hdfs files (and create a completion log file) purgeHDFSSegmentLogs(CrawlEnvironment.getDefaultFileSystem(), getListIdFromLogId(packedSegmentId), getSegmentIdFromLogId(packedSegmentId)); LOG.info( "CrawlLog Checkpoint - Purging Local CrawlSegmentLogs from Completed Segment. List:" + getListIdFromLogId(packedSegmentId) + " Segment:" + getSegmentIdFromLogId(packedSegmentId)); // and purge local files as well ... _loggers.get(packedSegmentId).purgeLocalFiles(); } catch (IOException e) { LOG.error("Purge SegmentLog for Segment List:" + getListIdFromLogId(packedSegmentId) + " Segment:" + getSegmentIdFromLogId(packedSegmentId) + " threw IOException:" + CCStringUtils.stringifyException(e)); } LOG.info("CrawlLog Checkpoint - DeRegistering Segment List:" + getListIdFromLogId(packedSegmentId) + " Segment:" + getSegmentIdFromLogId(packedSegmentId) + " From CrawlLog"); // no matter what ... unload the segment ... _loggers.remove(packedSegmentId); } CheckpointCompletionCallback callback = _checkpointCompletionCallback; long checkpointId = _checkpointId; // otherwise transition to a checkpoint in progress state _checkpointCompletionCallback = null; _checkpointId = -1; LOG.info("CrawlLog Checkpoint - Checkpoint Complete - Initiating Callback"); // and complete transaction ... callback.checkpointComplete(checkpointId, completedSegmentList); } public void taskFailed(Exception e) { // all failures are critical in this particular task ... LOG.error("Crawl Log FLUSH Threw Exception:" + CCStringUtils.stringifyException(e)); // revert checkpoint logs ... abortCheckpoint(); for (CrawlSegmentLog segmentLog : _loggers.values()) { segmentLog.abortCheckpoint(); } CheckpointCompletionCallback callback = _checkpointCompletionCallback; long checkpointId = _checkpointId; // otherwise transition to a checkpoint in progress state _checkpointCompletionCallback = null; _checkpointId = -1; // now check to see if this was corrupt crawl log exception if (e.getCause() instanceof CorruptCrawlLogException) { // ACK!!! LOG.fatal("Corrupt CrawlLog detected with Exception:" + CCStringUtils.stringifyException(e)); try { // this is a serious error ... time to purge the crawl log directory // altogether ... purgeActiveLog(); // and all active segment logs as well... for (CrawlSegmentLog segmentLog : _loggers.values()) { segmentLog.purgeActiveLog(); } } catch (IOException e2) { LOG.error("IOException during Segment Log PURGE:" + CCStringUtils.stringifyException(e2)); } // time to die hard ... throw new RuntimeException(e); } // and complete transaction ... callback.checkpointFailed(checkpointId, e); } })); }
From source file:com.virtusa.akura.student.controller.StudentDetailController.java
/** * @param student - Student obj.//from w w w . j ava 2 s .c om * @param result - BindingResult. * @param session - HttpSession * @param model - a hashMap that contains student's data * @param request - represents an instance of HttpServletRequest * @throws AkuraAppException - AkuraAppException. * @return name of the view which is redirected to. */ @RequestMapping(REQ_MAP_SAVE_STUDENT_DETAIL) public String onSubmit(@ModelAttribute(MODEL_ATT_STUDENT) Student student, BindingResult result, HttpSession session, HttpServletRequest request, ModelMap model) throws AkuraAppException { String returnResult = VIEW_GET_STUDENT_DETAIL_PAGE; studentDetailValidator.validate(student, result); String selectedCountryCodeRes = request.getParameter(SELECTED_COUNTRYCODE_RES); String selectedCountryCodeMob = request.getParameter(SELECTED_COUNTRYCODE_MOB); String selectedCountryCodeEmgRes = request.getParameter(SELECTED_COUNTRYCODE_EMG_RES); String selectedCountryCodeEmgMob = request.getParameter(SELECTED_COUNTRYCODE_EMG_MOB); String selectedCountryCodeEmgOff = request.getParameter(SELECTED_COUNTRYCODE_EMG_OFFICE); try { if (result.hasErrors()) { handleValidationError(student, model); resetCountryFlags(selectedCountryCodeRes, selectedCountryCodeMob, selectedCountryCodeEmgRes, selectedCountryCodeEmgMob, selectedCountryCodeEmgOff, model); return VIEW_GET_STUDENT_DETAIL_PAGE; } trimProperties(student); UserInfo userInfo = (UserInfo) session.getAttribute(USER); if (userInfo instanceof StudentDetails && !userInfo.getUserLevelIdentifier().equals(student.getAdmissionNo())) { handleValidationError(student, model); result.rejectValue(STUDENT_ID, ERR_TUDENT_ADMISSIONNO_VIOLATE); resetCountryFlags(selectedCountryCodeRes, selectedCountryCodeMob, selectedCountryCodeEmgRes, selectedCountryCodeEmgMob, selectedCountryCodeEmgOff, model); return VIEW_GET_STUDENT_DETAIL_PAGE; } // if check for initial save if (student != null && student.getStudentId() != 0) { Student stuObDB = studentService.findStudent(student.getStudentId()); if (stuObDB == null) { student.setStudentId(0); } else { String admissionNoDB = stuObDB.getAdmissionNo(); if (!admissionNoDB.equals(student.getAdmissionNo())) { if (studentService.isAdmissionNoExist(student.getAdmissionNo())) { handleValidationError(student, model); result.rejectValue(STUDENT_ID, ERR_STUDENT_ADMISSIONNO_DUPLCATE); resetCountryFlags(selectedCountryCodeRes, selectedCountryCodeMob, selectedCountryCodeEmgRes, selectedCountryCodeEmgMob, selectedCountryCodeEmgOff, model); return VIEW_GET_STUDENT_DETAIL_PAGE; } else { if (!student.getSiblingAdmitionNo().trim().isEmpty()) { if (!studentService.isAdmissionNoExist(student.getSiblingAdmitionNo()) || student.getAdmissionNo().equals(student.getSiblingAdmitionNo())) { result.rejectValue(SIBLING_ADMISSIONNO, ERR_SIBLING_ADMISSIONNO_VIOLATE); resetCountryFlags(selectedCountryCodeRes, selectedCountryCodeMob, selectedCountryCodeEmgRes, selectedCountryCodeEmgMob, selectedCountryCodeEmgOff, model); return VIEW_GET_STUDENT_DETAIL_PAGE; } } if (!student.getResidenceNo().isEmpty() && !selectedCountryCodeRes.isEmpty()) { if (student.getResidenceNo() != null && !selectedCountryCodeRes.equals(AkuraConstant.STRING_ZERO) && PhoneNumberValidateUtil.isValidPhoneNumber(student.getResidenceNo(), selectedCountryCodeRes)) { displayResidencePhoneNumberDetails(student, selectedCountryCodeRes); } else { displayCountryFlagsWhenError(student, model, selectedCountryCodeRes, selectedCountryCodeMob, selectedCountryCodeEmgRes, selectedCountryCodeEmgMob, selectedCountryCodeEmgOff); return VIEW_GET_STUDENT_DETAIL_PAGE; } } if (!student.getMobileNo().isEmpty() && !selectedCountryCodeMob.isEmpty()) { if (student.getMobileNo() != null && !selectedCountryCodeMob.equals(AkuraConstant.STRING_ZERO) && PhoneNumberValidateUtil.isValidPhoneNumber(student.getMobileNo(), selectedCountryCodeMob)) { displayMobilePhoneNumberDetails(student, selectedCountryCodeMob); } else { displayCountryFlagsWhenError(student, model, selectedCountryCodeRes, selectedCountryCodeMob, selectedCountryCodeEmgRes, selectedCountryCodeEmgMob, selectedCountryCodeEmgOff); return VIEW_GET_STUDENT_DETAIL_PAGE; } } if (!student.getEmergencyContactResidenceNo().isEmpty() && !selectedCountryCodeEmgRes.isEmpty()) { if (student.getEmergencyContactResidenceNo() != null && !selectedCountryCodeEmgRes.equals(AkuraConstant.STRING_ZERO) && PhoneNumberValidateUtil.isValidPhoneNumber( student.getEmergencyContactResidenceNo(), selectedCountryCodeEmgRes)) { displayEmgResidencePhoneNumberDetails(student, selectedCountryCodeEmgRes); } else { displayCountryFlagsWhenError(student, model, selectedCountryCodeRes, selectedCountryCodeMob, selectedCountryCodeEmgRes, selectedCountryCodeEmgMob, selectedCountryCodeEmgOff); return VIEW_GET_STUDENT_DETAIL_PAGE; } } if (!student.getEmergencyContactMobileNo().isEmpty() && !selectedCountryCodeEmgMob.isEmpty()) { if (student.getEmergencyContactMobileNo() != null && !selectedCountryCodeEmgMob.equals(AkuraConstant.STRING_ZERO) && PhoneNumberValidateUtil.isValidPhoneNumber( student.getEmergencyContactMobileNo(), selectedCountryCodeEmgMob)) { displayEmgMobilePhoneNumberDetails(student, selectedCountryCodeEmgMob); } else { displayCountryFlagsWhenError(student, model, selectedCountryCodeRes, selectedCountryCodeMob, selectedCountryCodeEmgRes, selectedCountryCodeEmgMob, selectedCountryCodeEmgOff); return VIEW_GET_STUDENT_DETAIL_PAGE; } } if (!student.getEmergencyContactOfficeNo().isEmpty() && !selectedCountryCodeEmgOff.isEmpty()) { if (student.getEmergencyContactOfficeNo() != null && !selectedCountryCodeEmgOff.equals(AkuraConstant.STRING_ZERO) && PhoneNumberValidateUtil.isValidPhoneNumber( student.getEmergencyContactOfficeNo(), selectedCountryCodeEmgOff)) { displayEmgOfficePhoneNumberDetails(student, selectedCountryCodeEmgOff); } else { displayCountryFlagsWhenError(student, model, selectedCountryCodeRes, selectedCountryCodeMob, selectedCountryCodeEmgRes, selectedCountryCodeEmgMob, selectedCountryCodeEmgOff); return VIEW_GET_STUDENT_DETAIL_PAGE; } } updateStudent(student); // Update if user login exist for this student UserLogin userLogin = userService .getUserLoginByIdentificationNo(stuObDB.getAdmissionNo()); if (userLogin != null) { userLogin.setUserIdentificationNo(student.getAdmissionNo()); userService.updateUser(userLogin); } // updated message pass through query string String successUpdate = new ErrorMsgLoader() .getErrorMessage(COMMON_MESSAGE_SUCCESSFULLY_UPDATED); return VIEW_STUDENT_DETAIL + QUERY_STRING_UPDATE + successUpdate; } } else { try { if (student.getMPhoto() != null) { MultipartFile multipartFile = student.getMPhoto(); if (multipartFile.getSize() > 0) { student.setPhoto(multipartFile.getBytes()); } } } catch (IOException e) { LOG.error(ERROR_WHILE_RETRIEVING_FILE + e.toString()); throw new AkuraAppException(AkuraConstant.FILE_NOT_FOUND, e); } if (!student.getSiblingAdmitionNo().trim().isEmpty()) { if (!studentService.isAdmissionNoExist(student.getSiblingAdmitionNo()) || student.getAdmissionNo().equals(student.getSiblingAdmitionNo())) { result.rejectValue(SIBLING_ADMISSIONNO, ERR_SIBLING_ADMISSIONNO_VIOLATE); returnResult = VIEW_GET_STUDENT_DETAIL_PAGE; resetCountryFlags(selectedCountryCodeRes, selectedCountryCodeMob, selectedCountryCodeEmgRes, selectedCountryCodeEmgMob, selectedCountryCodeEmgOff, model); return VIEW_GET_STUDENT_DETAIL_PAGE; } } if (!student.getResidenceNo().isEmpty() && !selectedCountryCodeRes.isEmpty()) { if (student.getResidenceNo() != null && !selectedCountryCodeRes.equals(AkuraConstant.STRING_ZERO) && PhoneNumberValidateUtil.isValidPhoneNumber(student.getResidenceNo(), selectedCountryCodeRes)) { displayResidencePhoneNumberDetails(student, selectedCountryCodeRes); } else { displayCountryFlagsWhenError(student, model, selectedCountryCodeRes, selectedCountryCodeMob, selectedCountryCodeEmgRes, selectedCountryCodeEmgMob, selectedCountryCodeEmgOff); return VIEW_GET_STUDENT_DETAIL_PAGE; } } if (!student.getMobileNo().isEmpty() && !selectedCountryCodeMob.isEmpty()) { if (student.getMobileNo() != null && !selectedCountryCodeMob.equals(AkuraConstant.STRING_ZERO) && PhoneNumberValidateUtil.isValidPhoneNumber(student.getMobileNo(), selectedCountryCodeMob)) { displayMobilePhoneNumberDetails(student, selectedCountryCodeMob); } else { displayCountryFlagsWhenError(student, model, selectedCountryCodeRes, selectedCountryCodeMob, selectedCountryCodeEmgRes, selectedCountryCodeEmgMob, selectedCountryCodeEmgOff); return VIEW_GET_STUDENT_DETAIL_PAGE; } } if (!student.getEmergencyContactResidenceNo().isEmpty() && !selectedCountryCodeEmgRes.isEmpty()) { if (student.getEmergencyContactResidenceNo() != null && !selectedCountryCodeEmgRes.equals(AkuraConstant.STRING_ZERO) && PhoneNumberValidateUtil.isValidPhoneNumber( student.getEmergencyContactResidenceNo(), selectedCountryCodeEmgRes)) { displayEmgResidencePhoneNumberDetails(student, selectedCountryCodeEmgRes); } else { displayCountryFlagsWhenError(student, model, selectedCountryCodeRes, selectedCountryCodeMob, selectedCountryCodeEmgRes, selectedCountryCodeEmgMob, selectedCountryCodeEmgOff); return VIEW_GET_STUDENT_DETAIL_PAGE; } } if (!student.getEmergencyContactMobileNo().isEmpty() && !selectedCountryCodeEmgMob.isEmpty()) { if (student.getEmergencyContactMobileNo() != null && !selectedCountryCodeEmgMob.equals(AkuraConstant.STRING_ZERO) && PhoneNumberValidateUtil.isValidPhoneNumber( student.getEmergencyContactMobileNo(), selectedCountryCodeEmgMob)) { displayEmgMobilePhoneNumberDetails(student, selectedCountryCodeEmgMob); } else { displayCountryFlagsWhenError(student, model, selectedCountryCodeRes, selectedCountryCodeMob, selectedCountryCodeEmgRes, selectedCountryCodeEmgMob, selectedCountryCodeEmgOff); return VIEW_GET_STUDENT_DETAIL_PAGE; } } if (!student.getEmergencyContactOfficeNo().isEmpty() && !selectedCountryCodeEmgOff.isEmpty()) { if (student.getEmergencyContactOfficeNo() != null && !selectedCountryCodeEmgOff.equals(AkuraConstant.STRING_ZERO) && PhoneNumberValidateUtil.isValidPhoneNumber( student.getEmergencyContactOfficeNo(), selectedCountryCodeEmgOff)) { displayEmgOfficePhoneNumberDetails(student, selectedCountryCodeEmgOff); } else { displayCountryFlagsWhenError(student, model, selectedCountryCodeRes, selectedCountryCodeMob, selectedCountryCodeEmgRes, selectedCountryCodeEmgMob, selectedCountryCodeEmgOff); return VIEW_GET_STUDENT_DETAIL_PAGE; } } updateStudent(student); // updated message pass through query string String successUpdate = new ErrorMsgLoader() .getErrorMessage(COMMON_MESSAGE_SUCCESSFULLY_UPDATED); return VIEW_STUDENT_DETAIL + QUERY_STRING_UPDATE + successUpdate; } } } if (student != null && student.getStudentId() == 0) { if (studentService.isAdmissionNoExist(student.getAdmissionNo())) { model.addAttribute(MODEL_ATT_IMAGE_PATH, RESOURCES_NO_PROFILE_IMAGE); result.rejectValue(STUDENT_ID, ERR_STUDENT_ADMISSIONNO_DUPLCATE); returnResult = VIEW_GET_STUDENT_DETAIL_PAGE; resetCountryFlags(selectedCountryCodeRes, selectedCountryCodeMob, selectedCountryCodeEmgRes, selectedCountryCodeEmgMob, selectedCountryCodeEmgOff, model); return VIEW_GET_STUDENT_DETAIL_PAGE; } else { if (student.getMPhoto() != null) { try { MultipartFile multipartFile = student.getMPhoto(); if (multipartFile.getSize() > 0) { student.setPhoto(multipartFile.getBytes()); } } catch (IOException e) { LOG.error(ERROR_WHILE_RETRIEVING_FILE + e.toString()); throw new AkuraAppException(AkuraConstant.FILE_NOT_FOUND, e); } } if (!student.getSiblingAdmitionNo().trim().isEmpty()) { if (!studentService.isAdmissionNoExist(student.getSiblingAdmitionNo()) || student.getAdmissionNo().equals(student.getSiblingAdmitionNo())) { result.rejectValue(SIBLING_ADMISSIONNO, ERR_SIBLING_ADMISSIONNO_VIOLATE); returnResult = VIEW_GET_STUDENT_DETAIL_PAGE; resetCountryFlags(selectedCountryCodeRes, selectedCountryCodeMob, selectedCountryCodeEmgRes, selectedCountryCodeEmgMob, selectedCountryCodeEmgOff, model); return VIEW_GET_STUDENT_DETAIL_PAGE; } } student.setStatusId(1); if (!student.getResidenceNo().isEmpty() && !selectedCountryCodeRes.isEmpty()) { if (student.getResidenceNo() != null && !selectedCountryCodeRes.equals(AkuraConstant.STRING_ZERO) && PhoneNumberValidateUtil.isValidPhoneNumber(student.getResidenceNo(), selectedCountryCodeRes)) { displayResidencePhoneNumberDetails(student, selectedCountryCodeRes); } else { displayCountryFlagsWhenError(student, model, selectedCountryCodeRes, selectedCountryCodeMob, selectedCountryCodeEmgRes, selectedCountryCodeEmgMob, selectedCountryCodeEmgOff); return VIEW_GET_STUDENT_DETAIL_PAGE; } } if (!student.getMobileNo().isEmpty() && !selectedCountryCodeMob.isEmpty()) { if (student.getMobileNo() != null && !selectedCountryCodeMob.equals(AkuraConstant.STRING_ZERO) && PhoneNumberValidateUtil.isValidPhoneNumber(student.getMobileNo(), selectedCountryCodeMob)) { displayMobilePhoneNumberDetails(student, selectedCountryCodeMob); } else { displayCountryFlagsWhenError(student, model, selectedCountryCodeRes, selectedCountryCodeMob, selectedCountryCodeEmgRes, selectedCountryCodeEmgMob, selectedCountryCodeEmgOff); return VIEW_GET_STUDENT_DETAIL_PAGE; } } if (!student.getEmergencyContactResidenceNo().isEmpty() && !selectedCountryCodeEmgRes.isEmpty()) { if (student.getEmergencyContactResidenceNo() != null && !selectedCountryCodeEmgRes.equals(AkuraConstant.STRING_ZERO) && PhoneNumberValidateUtil.isValidPhoneNumber( student.getEmergencyContactResidenceNo(), selectedCountryCodeEmgRes)) { displayEmgResidencePhoneNumberDetails(student, selectedCountryCodeEmgRes); } else { displayCountryFlagsWhenError(student, model, selectedCountryCodeRes, selectedCountryCodeMob, selectedCountryCodeEmgRes, selectedCountryCodeEmgMob, selectedCountryCodeEmgOff); return VIEW_GET_STUDENT_DETAIL_PAGE; } } if (!student.getEmergencyContactMobileNo().isEmpty() && !selectedCountryCodeEmgMob.isEmpty()) { if (student.getEmergencyContactMobileNo() != null && !selectedCountryCodeEmgMob.equals(AkuraConstant.STRING_ZERO) && PhoneNumberValidateUtil.isValidPhoneNumber(student.getEmergencyContactMobileNo(), selectedCountryCodeEmgMob)) { displayEmgMobilePhoneNumberDetails(student, selectedCountryCodeEmgMob); } else { displayCountryFlagsWhenError(student, model, selectedCountryCodeRes, selectedCountryCodeMob, selectedCountryCodeEmgRes, selectedCountryCodeEmgMob, selectedCountryCodeEmgOff); return VIEW_GET_STUDENT_DETAIL_PAGE; } } if (!student.getEmergencyContactOfficeNo().isEmpty() && !selectedCountryCodeEmgOff.isEmpty()) { if (student.getEmergencyContactOfficeNo() != null && !selectedCountryCodeEmgOff.equals(AkuraConstant.STRING_ZERO) && PhoneNumberValidateUtil.isValidPhoneNumber(student.getEmergencyContactOfficeNo(), selectedCountryCodeEmgOff)) { displayEmgOfficePhoneNumberDetails(student, selectedCountryCodeEmgOff); } else { displayCountryFlagsWhenError(student, model, selectedCountryCodeRes, selectedCountryCodeMob, selectedCountryCodeEmgRes, selectedCountryCodeEmgMob, selectedCountryCodeEmgOff); return VIEW_GET_STUDENT_DETAIL_PAGE; } } studentService.saveStudent(student); if (checkStudentDisabilityFilled(student.getStudentDisability())) { student.getStudentDisability().setStudentId(student.getStudentId()); trimStudentDisabilityObj(student.getStudentDisability()); studentService.saveStudentDisability(student.getStudentDisability()); } returnResult = VIEW_NEW_STUDENT_DETAIL; } } } catch (AkuraAppException e) { if (e.getCause() instanceof TransientDataAccessResourceException) { String message = new ErrorMsgLoader().getErrorMessage(IMAGE_DATABASE_SIZE); model.addAttribute(ERROR_MESSAGE, message); resetCountryFlags(selectedCountryCodeRes, selectedCountryCodeMob, selectedCountryCodeEmgRes, selectedCountryCodeEmgMob, selectedCountryCodeEmgOff, model); return returnResult; } } return VIEW_NEW_STUDENT_DETAIL; }