List of usage examples for java.io BufferedInputStream mark
public synchronized void mark(int readlimit)
mark
method of InputStream
. From source file:org.craftercms.studio.impl.deployment.PreviewDeployer.java
protected void writeFile(final String path, final InputStream content) throws IOException { BufferedInputStream contentStream = new BufferedInputStream(content); StringBuilder sbSavePath = new StringBuilder(previewStoreRootPath); sbSavePath.append(File.separator); sbSavePath.append(path);//from w w w . java2s . c o m String savePath = sbSavePath.toString(); savePath = savePath.replaceAll(File.separator + "+", File.separator); File file = new File(savePath); OutputStream outputStream = null; try { contentStream.mark(0); contentStream.reset(); // create new file if doesn't exist if (!file.exists()) { file.getParentFile().mkdirs(); file.createNewFile(); } outputStream = new FileOutputStream(file); IOUtils.copy(contentStream, outputStream); outputStream.flush(); } catch (FileNotFoundException e) { if (log.isErrorEnabled()) { log.error("Error: not able to open output stream for file " + path); } throw e; } catch (IOException e) { if (log.isErrorEnabled()) { log.error("Error: not able to write file " + path); } throw e; } finally { IOUtils.closeQuietly(outputStream); } }
From source file:org.infoscoop.request.filter.CalendarFilter.java
public byte[] process(String aContentType, String startDateStr, String endDateStr, InputStream responseStream) throws IOException { String charset = null;//w w w. ja v a 2 s. c o m String contentType = null; if (aContentType != null) { String[] str = aContentType.split("="); if (str != null) contentType = str[0]; if (str.length > 1) { charset = str[1]; } } BufferedInputStream bis = new BufferedInputStream(responseStream); //Processing of skipping to the first character int temp = 0; boolean noContent = false; bis.mark(1); while (true) { try { temp = bis.read(); if (temp == -1 || (temp >= 0x20 && temp <= 0x7e)) { if (temp == -1) { noContent = true; } break; } else { bis.mark(1); } } catch (IOException e) { log.error("", e); break; } } // if 200 and empty if (noContent) { bis.close(); return process("[]", 0); } try { bis.reset(); } catch (IOException e2) { } //Processing of skipping to the first character up to here Reader reader = null; boolean isXML = false; try { if (contentType != null && (contentType.startsWith("text/xml") || contentType.startsWith("application/xml") || contentType.startsWith("application/rss+xml") || contentType.startsWith("application/rdf+xml"))) { isXML = true; } else { char firstChar = (char) bis.read(); if (firstChar == '<') { isXML = true; } bis.reset(); } } catch (IOException e) { log.error("", e); } if (isXML) { if (isCalDAV(bis)) { StringBuffer buf = new StringBuffer(); BufferedReader br = new BufferedReader(new InputStreamReader(bis, charset)); String s = null; boolean append = false; boolean inVALARM = false; buf.append("BEGIN:VCALENDAR").append("\r\n"); String davHref = null; while ((s = br.readLine()) != null) { String _davHref = getDAVHref(s, br); if (_davHref != null) davHref = _davHref; if (s.indexOf("BEGIN:VEVENT") >= 0) { append = true; } if (s.indexOf("BEGIN:VALARM") >= 0) { inVALARM = true; } if (append && !inVALARM) { if (s.indexOf("END:VEVENT") >= 0 && davHref != null) { buf.append(davHref).append("\r\n"); davHref = null; } buf.append(s).append("\r\n"); } if (s.indexOf("END:VEVENT") >= 0) { append = false; } if (s.indexOf("END:VALARM") >= 0) { inVALARM = false; } } buf.append("END:VCALENDAR"); if (log.isDebugEnabled()) log.debug(buf.toString()); reader = new StringReader(buf.toString()); } else { try { reader = ICalendarUtil.convertRdf2Ics(bis); } catch (SAXException e) { log.error("", e); if (log.isInfoEnabled()) log.info("Unanalyzable RSS information is recieved.[" + e.getLocalizedMessage() + "]"); return process("Unanalyzable RSS information is recieved. : " + e.getLocalizedMessage(), 1); } catch (IOException e) { log.error("", e); if (log.isInfoEnabled()) log.info("Unanalyzable RSS information is recieved.[" + e.getLocalizedMessage() + "]"); return process("Unanalyzable RSS information is recieved.: " + e.getLocalizedMessage(), 1); } } } else { try { if (charset != null) reader = new InputStreamReader(bis, charset); else reader = new InputStreamReader(bis, "UTF-8"); } catch (UnsupportedEncodingException e) { try { reader = new InputStreamReader(bis, "UTF-8"); } catch (UnsupportedEncodingException e1) { log.error("", e1); } } } String result = null; try { //PrereqRDF and removal of line break ICS and Reader#reset done. result = parseICalendar(reader, startDateStr, endDateStr); } catch (IOException e) { log.error("", e); if (log.isInfoEnabled()) log.info("Unanalyzable ics information is recieved.[" + e.getLocalizedMessage() + "]"); return process("Unanalyzable ics information is recieved. : " + e.getLocalizedMessage(), 1); } catch (ParserException e) { log.error("", e); if (log.isInfoEnabled()) log.info("Unanalyzable ics information is recieved.[" + e.getLocalizedMessage() + "]"); return process("Unanalyzable ics information is recieved. : " + e.getLocalizedMessage(), 1); } return process("[" + result + "]", 0); }
From source file:org.codelibs.fess.web.admin.DataAction.java
@Execute(validator = true, input = "index") public String upload() { final String fileName = dataForm.uploadedFile.getFileName(); if (fileName.endsWith(".xml")) { try {/*from w w w. ja v a2 s .com*/ databaseService.importData(dataForm.uploadedFile.getInputStream(), dataForm.overwrite != null && "on".equalsIgnoreCase(dataForm.overwrite)); SAStrutsUtil.addSessionMessage("success.importing_data"); return "index?redirect=true"; } catch (final Exception e) { logger.error("Failed to import data.", e); throw new SSCActionMessagesException(e, "errors.failed_to_import_data"); } } else if (fileName.endsWith(".csv")) { BufferedInputStream is = null; File tempFile = null; FileOutputStream fos = null; final byte[] b = new byte[20]; try { tempFile = File.createTempFile("fess-import-", ".csv"); is = new BufferedInputStream(dataForm.uploadedFile.getInputStream()); is.mark(20); if (is.read(b, 0, 20) <= 0) { throw new FessSystemException("no import data."); } is.reset(); fos = new FileOutputStream(tempFile); StreamUtil.drain(is, fos); } catch (final Exception e) { if (tempFile != null && !tempFile.delete()) { logger.warn("Could not delete " + tempFile.getAbsolutePath()); } logger.error("Failed to import data.", e); throw new SSCActionMessagesException(e, "errors.failed_to_import_data"); } finally { IOUtils.closeQuietly(is); IOUtils.closeQuietly(fos); } final File oFile = tempFile; try { final String head = new String(b, Constants.UTF_8); if (!head.startsWith("SessionId,") && !head.startsWith("SearchWord,") && !head.startsWith("SearchId,")) { logger.error("Unknown file: " + dataForm.uploadedFile); throw new SSCActionMessagesException("errors.unknown_import_file"); } final String enc = crawlerProperties.getProperty(Constants.CSV_FILE_ENCODING_PROPERTY, Constants.UTF_8); new Thread(new Runnable() { @Override public void run() { Reader reader = null; try { reader = new BufferedReader(new InputStreamReader(new FileInputStream(oFile), enc)); if (head.startsWith("SessionId,")) { // Crawling Session crawlingSessionService.importCsv(reader); } else if (head.startsWith("SearchWord,")) { // Search Log searchLogService.importCsv(reader); } else if (head.startsWith("SearchId,")) { // Click Log clickLogService.importCsv(reader); } } catch (final Exception e) { logger.error("Failed to import data.", e); throw new FessSystemException("Failed to import data.", e); } finally { if (!oFile.delete()) { logger.warn("Could not delete " + oFile.getAbsolutePath()); } IOUtils.closeQuietly(reader); } } }).start(); } catch (final ActionMessagesException e) { if (!oFile.delete()) { logger.warn("Could not delete " + oFile.getAbsolutePath()); } throw e; } catch (final Exception e) { if (!oFile.delete()) { logger.warn("Could not delete " + oFile.getAbsolutePath()); } logger.error("Failed to import data.", e); throw new SSCActionMessagesException(e, "errors.failed_to_import_data"); } } SAStrutsUtil.addSessionMessage("success.importing_data"); return "index?redirect=true"; }
From source file:edu.harvard.iq.dataverse.ingest.tabulardata.impl.plugins.por.PORFileReaderSpi.java
@Override public boolean canDecodeInput(Object source) throws IOException { if (!(source instanceof BufferedInputStream)) { return false; }// w ww . ja v a 2s. co m if (source == null) { throw new IllegalArgumentException("source == null!"); } BufferedInputStream stream = (BufferedInputStream) source; dbgLog.fine("applying the por test\n"); byte[] b = new byte[POR_HEADER_SIZE]; if (stream.markSupported()) { stream.mark(0); } int nbytes = stream.read(b, 0, POR_HEADER_SIZE); //printHexDump(b, "hex dump of the byte-array"); if (nbytes == 0) { throw new IOException(); } else if (nbytes < 491) { // size test dbgLog.fine("this file is NOT spss-por type"); return false; } if (stream.markSupported()) { stream.reset(); } boolean DEBUG = false; //windows [0D0A]=> [1310] = [CR/LF] //unix [0A] => [10] //mac [0D] => [13] // 3char [0D0D0A]=> [131310] spss for windows rel 15 // expected results // unix case: [0A] : [80], [161], [242], [323], [404], [485] // windows case: [0D0A] : [81], [163], [245], [327], [409], [491] // : [0D0D0A] : [82], [165], [248], [331], [414], [495] // convert b into a ByteBuffer ByteBuffer buff = ByteBuffer.wrap(b); byte[] nlch = new byte[36]; int pos1; int pos2; int pos3; int ucase = 0; int wcase = 0; int mcase = 0; int three = 0; int nolines = 6; int nocols = 80; for (int i = 0; i < nolines; ++i) { int baseBias = nocols * (i + 1); // 1-char case pos1 = baseBias + i; buff.position(pos1); dbgLog.finer("\tposition(1)=" + buff.position()); int j = 6 * i; nlch[j] = buff.get(); if (nlch[j] == 10) { ucase++; } else if (nlch[j] == 13) { mcase++; } // 2-char case pos2 = baseBias + 2 * i; buff.position(pos2); dbgLog.finer("\tposition(2)=" + buff.position()); nlch[j + 1] = buff.get(); nlch[j + 2] = buff.get(); // 3-char case pos3 = baseBias + 3 * i; buff.position(pos3); dbgLog.finer("\tposition(3)=" + buff.position()); nlch[j + 3] = buff.get(); nlch[j + 4] = buff.get(); nlch[j + 5] = buff.get(); dbgLog.finer(i + "-th iteration position =" + nlch[j] + "\t" + nlch[j + 1] + "\t" + nlch[j + 2]); dbgLog.finer(i + "-th iteration position =" + nlch[j + 3] + "\t" + nlch[j + 4] + "\t" + nlch[j + 5]); if ((nlch[j + 3] == 13) && (nlch[j + 4] == 13) && (nlch[j + 5] == 10)) { three++; } else if ((nlch[j + 1] == 13) && (nlch[j + 2] == 10)) { wcase++; } buff.rewind(); } if (three == nolines) { dbgLog.fine("0D0D0A case"); windowsNewLine = false; } else if ((ucase == nolines) && (wcase < nolines)) { dbgLog.fine("0A case"); windowsNewLine = false; } else if ((ucase < nolines) && (wcase == nolines)) { dbgLog.fine("0D0A case"); } else if ((mcase == nolines) && (wcase < nolines)) { dbgLog.fine("0D case"); windowsNewLine = false; } buff.rewind(); int PORmarkPosition = POR_MARK_POSITION_DEFAULT; if (windowsNewLine) { PORmarkPosition = PORmarkPosition + 5; } else if (three == nolines) { PORmarkPosition = PORmarkPosition + 10; } byte[] pormark = new byte[8]; buff.position(PORmarkPosition); buff.get(pormark, 0, 8); String pormarks = new String(pormark); dbgLog.fine( "pormark[hex: 53 50 53 53 50 4F 52 54 == SPSSPORT] =>" + new String(Hex.encodeHex(pormark)) + "<-"); if (pormarks.equals(POR_MARK)) { dbgLog.fine("this file is spss-por type"); return true; } else { dbgLog.fine("this file is NOT spss-por type"); } return false; }
From source file:org.codelibs.fess.transformer.FessXpathTransformer.java
@Override protected void storeData(final ResponseData responseData, final ResultData resultData) { final File tempFile = ResponseDataUtil.createResponseBodyFile(responseData); try {/*from w ww .j av a 2 s . co m*/ final DOMParser parser = getDomParser(); BufferedInputStream bis = null; try { bis = new BufferedInputStream(new FileInputStream(tempFile)); final byte[] bomBytes = new byte[UTF8_BOM_SIZE]; bis.mark(UTF8_BOM_SIZE); bis.read(bomBytes); // NOSONAR if (!isUtf8BomBytes(bomBytes)) { bis.reset(); } final InputSource is = new InputSource(bis); if (responseData.getCharSet() != null) { is.setEncoding(responseData.getCharSet()); } parser.parse(is); } catch (final Exception e) { throw new RobotCrawlAccessException("Could not parse " + responseData.getUrl(), e); } finally { IOUtils.closeQuietly(bis); } final Document document = parser.getDocument(); final Map<String, Object> dataMap = new HashMap<String, Object>(); for (final Map.Entry<String, String> entry : fieldRuleMap.entrySet()) { final String path = entry.getValue(); try { final XObject xObj = getXPathAPI().eval(document, path); final int type = xObj.getType(); switch (type) { case XObject.CLASS_BOOLEAN: final boolean b = xObj.bool(); putResultDataBody(dataMap, entry.getKey(), Boolean.toString(b)); break; case XObject.CLASS_NUMBER: final double d = xObj.num(); putResultDataBody(dataMap, entry.getKey(), Double.toString(d)); break; case XObject.CLASS_STRING: final String str = xObj.str(); putResultDataBody(dataMap, entry.getKey(), str); break; case XObject.CLASS_NULL: case XObject.CLASS_UNKNOWN: case XObject.CLASS_NODESET: case XObject.CLASS_RTREEFRAG: case XObject.CLASS_UNRESOLVEDVARIABLE: default: final Node value = getXPathAPI().selectSingleNode(document, entry.getValue()); putResultDataBody(dataMap, entry.getKey(), value != null ? value.getTextContent() : null); break; } } catch (final TransformerException e) { logger.warn("Could not parse a value of " + entry.getKey() + ":" + entry.getValue()); } } FileInputStream fis = null; try { fis = new FileInputStream(tempFile); responseData.setResponseBody(fis); putAdditionalData(dataMap, responseData, document); } catch (final FileNotFoundException e) { logger.warn(tempFile + " does not exist.", e); putAdditionalData(dataMap, responseData, document); } finally { IOUtils.closeQuietly(fis); } try { resultData.setData(SerializeUtil.fromObjectToBinary(dataMap)); } catch (final Exception e) { throw new RobotCrawlAccessException("Could not serialize object: " + responseData.getUrl(), e); } resultData.setEncoding(charsetName); } finally { if (!tempFile.delete()) { logger.warn("Could not delete a temp file: " + tempFile); } } }
From source file:com.google.acre.script.AcreFetch.java
@SuppressWarnings("boxing") public void fetch(boolean system, String response_encoding, boolean log_to_user, boolean no_redirect) { if (request_url.length() > 2047) { throw new AcreURLFetchException("fetching URL failed - url is too long"); }/* w w w . ja v a 2 s . co m*/ DefaultHttpClient client = new DefaultHttpClient(_connectionManager, null); HttpParams params = client.getParams(); // pass the deadline down to the invoked service. // this will be ignored unless we are fetching from another // acre server. // note that we may send a deadline that is already passed: // it's not our job to throw here since we don't know how // the target service will interpret the quota header. // NOTE: this is done *after* the user sets the headers to overwrite // whatever settings they might have tried to change for this value // (which could be a security hazard) long sub_deadline = (HostEnv.LIMIT_EXECUTION_TIME) ? _deadline - HostEnv.SUBREQUEST_DEADLINE_ADVANCE : System.currentTimeMillis() + HostEnv.ACRE_URLFETCH_TIMEOUT; int reentries = _reentries + 1; request_headers.put(HostEnv.ACRE_QUOTAS_HEADER, "td=" + sub_deadline + ",r=" + reentries); // if this is not an internal call, we need to invoke the call thru a proxy if (!_internal) { // XXX No sense wasting the resources to gzip inside the network. // XXX seems that twitter gets upset when we do this /* if (!request_headers.containsKey("accept-encoding")) { request_headers.put("accept-encoding", "gzip"); } */ String proxy_host = Configuration.Values.HTTP_PROXY_HOST.getValue(); int proxy_port = -1; if (!(proxy_host.length() == 0)) { proxy_port = Configuration.Values.HTTP_PROXY_PORT.getInteger(); HttpHost proxy = new HttpHost(proxy_host, proxy_port, "http"); params.setParameter(AllClientPNames.DEFAULT_PROXY, proxy); } } params.setParameter(AllClientPNames.COOKIE_POLICY, CookiePolicy.BROWSER_COMPATIBILITY); // in msec long timeout = _deadline - System.currentTimeMillis(); if (timeout < 0) timeout = 0; params.setParameter(AllClientPNames.CONNECTION_TIMEOUT, (int) timeout); params.setParameter(AllClientPNames.SO_TIMEOUT, (int) timeout); // we're not streaming the request so this should be a win. params.setParameter(AllClientPNames.TCP_NODELAY, true); // reuse an existing socket if it is in TIME_WAIT state. params.setParameter(AllClientPNames.SO_REUSEADDR, true); // set the encoding of our POST payloads to UTF-8 params.setParameter(AllClientPNames.HTTP_CONTENT_CHARSET, "UTF-8"); BasicCookieStore cstore = new BasicCookieStore(); for (AcreCookie cookie : request_cookies.values()) { cstore.addCookie(cookie.toClientCookie()); } client.setCookieStore(cstore); HttpRequestBase method; HashMap<String, String> logmsg = new HashMap<String, String>(); logmsg.put("Method", request_method); logmsg.put("URL", request_url); params.setParameter(AllClientPNames.HANDLE_REDIRECTS, !no_redirect); logmsg.put("Redirect", Boolean.toString(!no_redirect)); try { if (request_method.equals("GET")) { method = new HttpGet(request_url); } else if (request_method.equals("POST")) { method = new HttpPost(request_url); } else if (request_method.equals("HEAD")) { method = new HttpHead(request_url); } else if (request_method.equals("PUT")) { method = new HttpPut(request_url); } else if (request_method.equals("DELETE")) { method = new HttpDelete(request_url); } else if (request_method.equals("PROPFIND")) { method = new HttpPropFind(request_url); } else { throw new AcreURLFetchException("Failed: unsupported (so far) method " + request_method); } method.getParams().setBooleanParameter(AllClientPNames.USE_EXPECT_CONTINUE, false); } catch (java.lang.IllegalArgumentException e) { throw new AcreURLFetchException("Unable to fetch URL; this is most likely an issue with URL encoding."); } catch (java.lang.IllegalStateException e) { throw new AcreURLFetchException("Unable to fetch URL; possibly an illegal protocol?"); } StringBuilder request_header_log = new StringBuilder(); for (Map.Entry<String, String> header : request_headers.entrySet()) { String key = header.getKey(); String value = header.getValue(); // XXX should suppress cookie headers? // content-type and length? if ("content-type".equalsIgnoreCase(key)) { Matcher m = contentTypeCharsetPattern.matcher(value); if (m.find()) { content_type = m.group(1); content_type_charset = m.group(2); } else { content_type_charset = "utf-8"; } method.addHeader(key, value); } else if ("content-length".equalsIgnoreCase(key)) { // ignore user-supplied content-length, which is // probably wrong due to chars vs bytes and is // redundant anyway ArrayList<String> msg = new ArrayList<String>(); msg.add("User-supplied content-length header is ignored"); _acre_response.log("warn", msg); } else if ("user-agent".equalsIgnoreCase(key)) { params.setParameter(AllClientPNames.USER_AGENT, value); } else { method.addHeader(key, value); } if (!("x-acre-auth".equalsIgnoreCase(key))) { request_header_log.append(key + ": " + value + "\r\n"); } } logmsg.put("Headers", request_header_log.toString()); // XXX need more detailed error checking if (method instanceof HttpEntityEnclosingRequestBase && request_body != null) { HttpEntityEnclosingRequestBase em = (HttpEntityEnclosingRequestBase) method; try { if (request_body instanceof String) { StringEntity ent = new StringEntity((String) request_body, content_type_charset); em.setEntity(ent); } else if (request_body instanceof JSBinary) { ByteArrayEntity ent = new ByteArrayEntity(((JSBinary) request_body).get_data()); em.setEntity(ent); } } catch (UnsupportedEncodingException e) { throw new AcreURLFetchException( "Failed to fetch URL. " + " - Unsupported charset: " + content_type_charset); } } if (!system && log_to_user) { ArrayList<Object> msg = new ArrayList<Object>(); msg.add("urlfetch request"); msg.add(logmsg); _acre_response.log("debug", msg); } _logger.info("urlfetch.request", logmsg); long startTime = System.currentTimeMillis(); try { // this sends the http request and waits HttpResponse hres = client.execute(method); status = hres.getStatusLine().getStatusCode(); HashMap<String, String> res_logmsg = new HashMap<String, String>(); res_logmsg.put("URL", request_url); res_logmsg.put("Status", ((Integer) status).toString()); Header content_type_header = null; // translate response headers StringBuilder response_header_log = new StringBuilder(); Header[] rawheaders = hres.getAllHeaders(); for (Header rawheader : rawheaders) { String headername = rawheader.getName().toLowerCase(); if (headername.equalsIgnoreCase("content-type")) { content_type_header = rawheader; // XXX should strip everything after ; content_type = rawheader.getValue(); // XXX don't set content_type_parameters, deprecated? } else if (headername.equalsIgnoreCase("x-metaweb-cost")) { _costCollector.merge(rawheader.getValue()); } else if (headername.equalsIgnoreCase("x-metaweb-tid")) { res_logmsg.put("ITID", rawheader.getValue()); } headers.put(headername, rawheader.getValue()); response_header_log.append(headername + ": " + rawheader.getValue() + "\r\n"); } res_logmsg.put("Headers", response_header_log.toString()); if (!system && log_to_user) { ArrayList<Object> msg = new ArrayList<Object>(); msg.add("urlfetch response"); msg.add(res_logmsg); _acre_response.log("debug", msg); } _logger.info("urlfetch.response", res_logmsg); // read cookies for (Cookie c : cstore.getCookies()) { cookies.put(c.getName(), new AcreCookie(c)); } // get body encoding String charset = null; if (content_type_header != null) { HeaderElement values[] = content_type_header.getElements(); if (values.length == 1) { NameValuePair param = values[0].getParameterByName("charset"); if (param != null) { charset = param.getValue(); } } } if (charset == null) charset = response_encoding; // read body HttpEntity ent = hres.getEntity(); if (ent != null) { InputStream res_stream = ent.getContent(); Header cenc = ent.getContentEncoding(); if (cenc != null && res_stream != null) { HeaderElement[] codecs = cenc.getElements(); for (HeaderElement codec : codecs) { if (codec.getName().equalsIgnoreCase("gzip")) { res_stream = new GZIPInputStream(res_stream); } } } long firstByteTime = 0; long endTime = 0; if (content_type != null && (content_type.startsWith("image/") || content_type.startsWith("application/octet-stream") || content_type.startsWith("multipart/form-data"))) { // HttpClient's InputStream doesn't support mark/reset, so // wrap it with one that does. BufferedInputStream bufis = new BufferedInputStream(res_stream); bufis.mark(2); bufis.read(); firstByteTime = System.currentTimeMillis(); bufis.reset(); byte[] data = IOUtils.toByteArray(bufis); endTime = System.currentTimeMillis(); body = new JSBinary(); ((JSBinary) body).set_data(data); try { if (res_stream != null) { res_stream.close(); } } catch (IOException e) { // ignore } } else if (res_stream == null || charset == null) { firstByteTime = endTime = System.currentTimeMillis(); body = ""; } else { StringWriter writer = new StringWriter(); Reader reader = new InputStreamReader(res_stream, charset); int i = reader.read(); firstByteTime = System.currentTimeMillis(); writer.write(i); IOUtils.copy(reader, writer); endTime = System.currentTimeMillis(); body = writer.toString(); try { reader.close(); writer.close(); } catch (IOException e) { // ignore } } long waitingTime = firstByteTime - startTime; long readingTime = endTime - firstByteTime; _logger.debug("urlfetch.timings", "waiting time: " + waitingTime + "ms"); _logger.debug("urlfetch.timings", "reading time: " + readingTime + "ms"); Statistics.instance().collectUrlfetchTime(startTime, firstByteTime, endTime); _costCollector.collect((system) ? "asuc" : "auuc").collect((system) ? "asuw" : "auuw", waitingTime) .collect((system) ? "asub" : "auub", waitingTime); } } catch (IllegalArgumentException e) { Throwable cause = e.getCause(); if (cause == null) cause = e; throw new AcreURLFetchException("failed to fetch URL. " + " - Request Error: " + cause.getMessage()); } catch (IOException e) { Throwable cause = e.getCause(); if (cause == null) cause = e; throw new AcreURLFetchException("Failed to fetch URL. " + " - Network Error: " + cause.getMessage()); } catch (RuntimeException e) { Throwable cause = e.getCause(); if (cause == null) cause = e; throw new AcreURLFetchException("Failed to fetch URL. " + " - Network Error: " + cause.getMessage()); } finally { method.abort(); } }
From source file:org.paxle.tools.ieporter.cm.impl.ConfigurationIEPorter.java
public Map<String, Dictionary<String, Object>> importConfigurations(File file) throws Exception { BufferedInputStream input = null; Map<String, Dictionary<String, Object>> configs = new HashMap<String, Dictionary<String, Object>>(); try {//w w w . j a va2 s. com input = new BufferedInputStream(new FileInputStream(file), 5); // pre-read data to detect file type byte[] test = new byte[5]; input.mark(5); input.read(test); input.reset(); if (new String(test, "UTF-8").equals("<?xml")) { // XML Document found Document doc = this.readXMLDocument(file); Map<String, Dictionary<String, Object>> config = this.importConfigurations(doc); configs.putAll(config); } else if (new String(test, 0, 2).equals("PK")) { // open zip file final ZipInputStream zis = new ZipInputStream(input); // loop through entries ZipEntry ze; while ((ze = zis.getNextEntry()) != null) { // skip directories if (ze.isDirectory()) continue; // read data into memory long size = ze.getSize(); ByteArrayOutputStream bout = (size < 0) ? new ByteArrayOutputStream() : new ByteArrayOutputStream((int) size); IOUtils.copy(zis, bout); bout.close(); // read XML ByteArrayInputStream bin = new ByteArrayInputStream(bout.toByteArray()); Document doc = this.readXMLStream(bin); bin.close(); // parser configuration Map<String, Dictionary<String, Object>> config = this.importConfigurations(doc); configs.putAll(config); } zis.close(); } else { // Unknown file throw new IllegalArgumentException("Unknown file type"); } } finally { if (input != null) try { input.close(); } catch (Exception e) { /* ignore this */} } return configs; }
From source file:org.codelibs.fess.web.admin.SuggestBadWordAction.java
@Token(save = false, validate = true) @Execute(validator = true, input = "uploadpage") public String upload() { BufferedInputStream is = null; File tempFile = null;/*from ww w. j a v a2 s. c o m*/ FileOutputStream fos = null; final byte[] b = new byte[20]; try { tempFile = File.createTempFile("suggestbadword-import-", ".csv"); is = new BufferedInputStream(suggestBadWordForm.suggestBadWordFile.getInputStream()); is.mark(20); if (is.read(b, 0, 20) <= 0) { throw new FessSystemException("no import data."); } is.reset(); fos = new FileOutputStream(tempFile); StreamUtil.drain(is, fos); } catch (final Exception e) { if (tempFile != null && !tempFile.delete()) { logger.warn("Could not delete " + tempFile.getAbsolutePath()); } logger.error("Failed to import data.", e); throw new SSCActionMessagesException(e, "errors.failed_to_import_data"); } finally { IOUtils.closeQuietly(is); IOUtils.closeQuietly(fos); } final File oFile = tempFile; try { final String head = new String(b, Constants.UTF_8); if (!(head.startsWith("\"BadWord\"") || head.startsWith("BadWord"))) { logger.error("Unknown file: " + suggestBadWordForm.suggestBadWordFile); throw new SSCActionMessagesException("errors.unknown_import_file"); } final String enc = crawlerProperties.getProperty(Constants.CSV_FILE_ENCODING_PROPERTY, Constants.UTF_8); new Thread(new Runnable() { public void run() { Reader reader = null; try { reader = new BufferedReader(new InputStreamReader(new FileInputStream(oFile), enc)); suggestBadWordService.importCsv(reader); } catch (final Exception e) { logger.error("Failed to import data.", e); throw new FessSystemException("Failed to import data.", e); } finally { if (!oFile.delete()) { logger.warn("Could not delete " + oFile.getAbsolutePath()); } IOUtils.closeQuietly(reader); suggestHelper.deleteAllBadWord(); suggestHelper.updateSolrBadwordFile(); } } }).start(); } catch (final ActionMessagesException e) { if (!oFile.delete()) { logger.warn("Could not delete " + oFile.getAbsolutePath()); } throw e; } catch (final Exception e) { if (!oFile.delete()) { logger.warn("Could not delete " + oFile.getAbsolutePath()); } logger.error("Failed to import data.", e); throw new SSCActionMessagesException(e, "errors.failed_to_import_data"); } SAStrutsUtil.addSessionMessage("success.upload_suggest_bad_word"); return "uploadpage?redirect=true"; }
From source file:org.codelibs.fess.web.admin.SuggestElevateWordAction.java
@Token(save = false, validate = true) @Execute(validator = true, input = "uploadpage") public String upload() { BufferedInputStream is = null; File tempFile = null;/* w w w .j a v a 2 s. c o m*/ FileOutputStream fos = null; final byte[] b = new byte[20]; try { tempFile = File.createTempFile("suggestelevateword-import-", ".csv"); is = new BufferedInputStream(suggestElevateWordForm.suggestElevateWordFile.getInputStream()); is.mark(20); if (is.read(b, 0, 20) <= 0) { throw new FessSystemException("no import data."); } is.reset(); fos = new FileOutputStream(tempFile); StreamUtil.drain(is, fos); } catch (final Exception e) { if (tempFile != null && !tempFile.delete()) { logger.warn("Could not delete " + tempFile.getAbsolutePath()); } logger.error("Failed to import data.", e); throw new SSCActionMessagesException(e, "errors.failed_to_import_data"); } finally { IOUtils.closeQuietly(is); IOUtils.closeQuietly(fos); } final File oFile = tempFile; try { final String head = new String(b, Constants.UTF_8); if (!(head.startsWith("\"SuggestWord\"") || head.startsWith("SuggestWord"))) { logger.error("Unknown file: " + suggestElevateWordForm.suggestElevateWordFile); throw new SSCActionMessagesException("errors.unknown_import_file"); } final String enc = crawlerProperties.getProperty(Constants.CSV_FILE_ENCODING_PROPERTY, Constants.UTF_8); new Thread(new Runnable() { public void run() { Reader reader = null; try { reader = new BufferedReader(new InputStreamReader(new FileInputStream(oFile), enc)); suggestElevateWordService.importCsv(reader); } catch (final Exception e) { logger.error("Failed to import data.", e); throw new FessSystemException("Failed to import data.", e); } finally { if (!oFile.delete()) { logger.warn("Could not delete " + oFile.getAbsolutePath()); } IOUtils.closeQuietly(reader); suggestHelper.storeAllElevateWords(); } } }).start(); } catch (final ActionMessagesException e) { if (!oFile.delete()) { logger.warn("Could not delete " + oFile.getAbsolutePath()); } throw e; } catch (final Exception e) { if (!oFile.delete()) { logger.warn("Could not delete " + oFile.getAbsolutePath()); } logger.error("Failed to import data.", e); throw new SSCActionMessagesException(e, "errors.failed_to_import_data"); } SAStrutsUtil.addSessionMessage("success.upload_suggest_elevate_word"); return "uploadpage?redirect=true"; }
From source file:edu.harvard.iq.dataverse.ingest.tabulardata.impl.plugins.por.PORFileReaderSpi.java
@Override public boolean canDecodeInput(BufferedInputStream stream) throws IOException { if (stream == null) { throw new IllegalArgumentException("file == null!"); }/*from w ww . jav a 2 s . c o m*/ dbgLog.fine("applying the por test\n"); byte[] b = new byte[POR_HEADER_SIZE]; if (stream.markSupported()) { stream.mark(0); } int nbytes = stream.read(b, 0, POR_HEADER_SIZE); //printHexDump(b, "hex dump of the byte-array"); if (nbytes == 0) { throw new IOException(); } else if (nbytes < 491) { // size test dbgLog.fine("this file is NOT spss-por type"); return false; } if (stream.markSupported()) { stream.reset(); } boolean DEBUG = false; //windows [0D0A]=> [1310] = [CR/LF] //unix [0A] => [10] //mac [0D] => [13] // 3char [0D0D0A]=> [131310] spss for windows rel 15 // expected results // unix case: [0A] : [80], [161], [242], [323], [404], [485] // windows case: [0D0A] : [81], [163], [245], [327], [409], [491] // : [0D0D0A] : [82], [165], [248], [331], [414], [495] // convert b into a ByteBuffer ByteBuffer buff = ByteBuffer.wrap(b); byte[] nlch = new byte[36]; int pos1; int pos2; int pos3; int ucase = 0; int wcase = 0; int mcase = 0; int three = 0; int nolines = 6; int nocols = 80; for (int i = 0; i < nolines; ++i) { int baseBias = nocols * (i + 1); // 1-char case pos1 = baseBias + i; buff.position(pos1); dbgLog.finer("\tposition(1)=" + buff.position()); int j = 6 * i; nlch[j] = buff.get(); if (nlch[j] == 10) { ucase++; } else if (nlch[j] == 13) { mcase++; } // 2-char case pos2 = baseBias + 2 * i; buff.position(pos2); dbgLog.finer("\tposition(2)=" + buff.position()); nlch[j + 1] = buff.get(); nlch[j + 2] = buff.get(); // 3-char case pos3 = baseBias + 3 * i; buff.position(pos3); dbgLog.finer("\tposition(3)=" + buff.position()); nlch[j + 3] = buff.get(); nlch[j + 4] = buff.get(); nlch[j + 5] = buff.get(); dbgLog.finer(i + "-th iteration position =" + nlch[j] + "\t" + nlch[j + 1] + "\t" + nlch[j + 2]); dbgLog.finer(i + "-th iteration position =" + nlch[j + 3] + "\t" + nlch[j + 4] + "\t" + nlch[j + 5]); if ((nlch[j + 3] == 13) && (nlch[j + 4] == 13) && (nlch[j + 5] == 10)) { three++; } else if ((nlch[j + 1] == 13) && (nlch[j + 2] == 10)) { wcase++; } buff.rewind(); } if (three == nolines) { dbgLog.fine("0D0D0A case"); windowsNewLine = false; } else if ((ucase == nolines) && (wcase < nolines)) { dbgLog.fine("0A case"); windowsNewLine = false; } else if ((ucase < nolines) && (wcase == nolines)) { dbgLog.fine("0D0A case"); } else if ((mcase == nolines) && (wcase < nolines)) { dbgLog.fine("0D case"); windowsNewLine = false; } buff.rewind(); int PORmarkPosition = POR_MARK_POSITION_DEFAULT; if (windowsNewLine) { PORmarkPosition = PORmarkPosition + 5; } else if (three == nolines) { PORmarkPosition = PORmarkPosition + 10; } byte[] pormark = new byte[8]; buff.position(PORmarkPosition); buff.get(pormark, 0, 8); String pormarks = new String(pormark); //dbgLog.fine("pormark =>" + pormarks + "<-"); dbgLog.fine( "pormark[hex: 53 50 53 53 50 4F 52 54 == SPSSPORT] =>" + new String(Hex.encodeHex(pormark)) + "<-"); if (pormarks.equals(POR_MARK)) { dbgLog.fine("this file is spss-por type"); return true; } else { dbgLog.fine("this file is NOT spss-por type"); } return false; }