List of usage examples for java.io InputStreamReader close
public void close() throws IOException
From source file:com.edgenius.wiki.service.impl.BackupServiceImpl.java
@Transactional(readOnly = false, propagation = Propagation.REQUIRED) public void restore(File file) throws BackupException, InvalidLicenseException { log.info("Restore starting...."); long time = System.currentTimeMillis(); FileInputStream zipis = null; try {/*from w ww .ja va 2s. c om*/ //unzip to temporary directory first String dir = FileUtil.createTempDirectory(TMP_RESTORE); zipis = new FileInputStream(file); ZipFileUtil.expandZipToFolder(zipis, dir); log.info("Restore file unzipped to {}. Took {}s", dir, (System.currentTimeMillis() - time) / 1000); //get databinder, to check options File binderFile = new File(FileUtil.getFullPath(dir, OBJS_BINDER_NAME)); //version check - it may has different fields name etc to cause need do migrate int binderVersion = versionCheck(binderFile); log.info("Data binder version is {}", binderVersion); time = System.currentTimeMillis(); FileInputStream bis = new FileInputStream(binderFile); InputStreamReader reader = new InputStreamReader(bis, Constants.UTF8); XStream xstream = createXStreamInstance(); DataBinder binder = (DataBinder) xstream.fromXML(reader); reader.close(); bis.close(); log.info("Parse binder XML took {}s", (System.currentTimeMillis() - time) / 1000); int leftUserCount = licenseCheck(binder); //As HL Robert's export, Page has list has duplicated: such as page id=93 is enclosed DataObject normally, //but, unexpected, same page appear again in page list, this time it is as a referenece object, but this cause same page //has duplicate ojbect in return list //<com.edgenius.wiki.model.Page reference="93"/> //Here is just temporary fix only for HL. But the fix is Hibernate.loadAll() see BaseDAOHibernate.getAll() List<Page> pages = (List<Page>) binder.get(Page.class.getName()); Set<Integer> dup = new HashSet<Integer>(); for (Iterator<Page> iter = pages.iterator(); iter.hasNext();) { Integer uid = iter.next().getUid(); if (dup.contains(uid)) { log.error("There are duplciated pages while import data, UID:" + uid); iter.remove(); continue; } dup.add(uid); } Map<Integer, String> spaceMap = null; if (binderVersion <= 2180) { //a bug fix - for older 2.18 version as customized theme use spaceUid as key, which changed after import... //Since 2.19, customized theme XML is removed, this problem doesn't exist any more. I found this bug when 2.19:( List<Space> spaces = (List<Space>) binder.get(Space.class.getName()); //save old version spaceUname and spaceUid into a map spaceMap = new HashMap<Integer, String>(); for (Iterator<Space> iter = spaces.iterator(); iter.hasNext();) { Space space = iter.next(); spaceMap.put(space.getUid(), space.getUnixName()); } } int options = binder.getOptions(); if ((options & BACKUP_DATA) > 0) { time = System.currentTimeMillis(); importData(binder, dir, binderVersion); log.info("Restore database table took {}s", (System.currentTimeMillis() - time) / 1000); //delete binder file after import success if (!binderFile.delete()) binderFile.deleteOnExit(); } time = System.currentTimeMillis(); if ((options & BACKUP_ATTACHMENT) > 0) { FileUtils.deleteDirectory(repositoryLocation.getFile()); FileUtils.moveDirectory(new File(FileUtil.getFullPath(dir, binder.getDir(BACKUP_ATTACHMENT))), repositoryLocation.getFile()); } if ((options & BACKUP_RSS) > 0) { FileUtils.deleteDirectory(rssLocation.getFile()); FileUtils.moveDirectory(new File(FileUtil.getFullPath(dir, binder.getDir(BACKUP_RSS))), rssLocation.getFile()); } if ((options & BACKUP_INDEX) > 0) { FileUtils.deleteDirectory(indexLocation.getFile()); FileUtils.moveDirectory(new File(FileUtil.getFullPath(dir, binder.getDir(BACKUP_INDEX))), indexLocation.getFile()); } if ((options & BACKUP_SKIN) > 0) { FileUtils.deleteDirectory(skinLocation.getFile()); FileUtils.moveDirectory(new File(FileUtil.getFullPath(dir, binder.getDir(BACKUP_SKIN))), skinLocation.getFile()); } if ((options & BACKUP_THEME) > 0) { FileUtils.deleteDirectory(themeLocation.getFile()); FileUtils.moveDirectory(new File(FileUtil.getFullPath(dir, binder.getDir(BACKUP_THEME))), themeLocation.getFile()); if (binderVersion <= 2180) { //rename customized theme to new spaceUid File customizedDir = new File(themeLocation.getFile(), "customized"); File customizedSubDir = new File(themeLocation.getFile(), "customizedTemp"); String[] files = customizedDir.list(FileFilterUtils.suffixFileFilter(".xml")); if (files.length > 0) { customizedSubDir.mkdirs(); } for (String name : files) { int uid = NumberUtils.toInt(name.substring(0, name.length() - 4), -1); if (uid == -1) { log.info("Unable to get correct space UID from theme file name {}", name); continue; } String uname = spaceMap.get(uid); if (uname == null) { log.warn("Unable to get old spaceUname by UID {}", uid); continue; } Space space = spaceDAO.getByUname(uname); if (space == null) { log.warn("Unable to get space by Uname {}", uname); continue; } uid = space.getUid(); FileUtils.moveFile(new File(customizedDir, name), new File(customizedSubDir, uid + ".xml")); } if (customizedSubDir.exists()) { //replace old by renamed themes FileUtils.deleteDirectory(customizedDir); FileUtils.moveDirectory(customizedSubDir, customizedDir); } } } //upgrade data file under DataRoot -- Here assume theme, index, rss etc. All use default name from DataRoot!!! try { upgradeService.doBackupPackageUpgardeForDataFiles(String.valueOf((float) binderVersion / 1000)); } catch (Exception e) { log.error("Unexpected erorr while upgrade backup export package from " + binderVersion + " to " + Version.VERSION, e); } log.info("Restore data root files tooks {}s", (System.currentTimeMillis() - time) / 1000); try { FileUtil.deleteDir(dir); } catch (IOException e) { log.error("Unable to delete restore temp directory " + dir); } Version.LEFT_USERS = leftUserCount; log.info("Restore success complete. Database transaction will submit."); } catch (InvalidLicenseException e) { log.error("Restore failed", e); throw e; } catch (Exception e) { log.error("Restore failed", e); throw new BackupException(e); } finally { if (zipis != null) { try { zipis.close(); } catch (Exception e) { } } } }
From source file:org.apache.jackrabbit.core.RepositoryImpl.java
/** * Returns the root node uuid.//from w w w .j av a 2s. co m * @param fs * @return * @throws RepositoryException */ protected NodeId loadRootNodeId(FileSystem fs) throws RepositoryException { FileSystemResource uuidFile = new FileSystemResource(fs, "rootUUID"); try { if (uuidFile.exists()) { try { // load uuid of the repository's root node InputStream in = uuidFile.getInputStream(); /* // uuid is stored in binary format (16 bytes) byte[] bytes = new byte[16]; try { in.read(bytes); } finally { try { in.close(); } catch (IOException ioe) { // ignore } } rootNodeUUID = new UUID(bytes).toString(); // uuid is stored in binary format (16 bytes) */ // uuid is stored in text format (36 characters) for better readability char[] chars = new char[36]; InputStreamReader reader = new InputStreamReader(in); try { reader.read(chars); } finally { try { reader.close(); } catch (IOException ioe) { // ignore } } return NodeId.valueOf(new String(chars)); } catch (Exception e) { String msg = "failed to load persisted repository state"; log.debug(msg); throw new RepositoryException(msg, e); } } else { // create new uuid /* UUID rootUUID = UUID.randomUUID(); // version 4 uuid rootNodeUUID = rootUUID.toString(); */ /** * use hard-coded uuid for root node rather than generating * a different uuid per repository instance; using a * hard-coded uuid makes it easier to copy/move entire * workspaces from one repository instance to another. */ try { // persist uuid of the repository's root node OutputStream out = uuidFile.getOutputStream(); /* // store uuid in binary format try { out.write(rootUUID.getBytes()); } finally { try { out.close(); } catch (IOException ioe) { // ignore } } */ // store uuid in text format for better readability OutputStreamWriter writer = new OutputStreamWriter(out); try { writer.write(ROOT_NODE_ID.toString()); } finally { try { writer.close(); } catch (IOException ioe) { // ignore } } return ROOT_NODE_ID; } catch (Exception e) { String msg = "failed to persist repository state"; log.debug(msg); throw new RepositoryException(msg, e); } } } catch (FileSystemException fse) { String msg = "failed to access repository state"; log.debug(msg); throw new RepositoryException(msg, fse); } }
From source file:com.siblinks.ws.Notification.Helper.FireBaseNotification.java
public String sendMessage(final String toTokenId, final String title, final String dataType, final String dataId, final String content, final String icon, final String priority) { InputStreamReader in = null; BufferedReader br = null;/*w w w.j a v a2 s . c o m*/ String lines = ""; try { // FirebaseOptions options = new FirebaseOptions.Builder() // .setServiceAccount(new // FileInputStream("path/to/serviceAccountCredentials.json")) // .setDatabaseUrl("https://databaseName.firebaseio.com/") // .build(); // FirebaseApp.initializeApp(options); HttpClient client = HttpClientBuilder.create().build(); HttpPost post = new HttpPost(SibConstants.URL_SEND_NOTIFICATION_FIREBASE); post.setHeader("Content-type", "application/json"); post.setHeader("Authorization", "key=" + env.getProperty("firebase.server.key")); JSONObject message = new JSONObject(); message.put(Parameters.TO, toTokenId); message.put(Parameters.PRIORITY, priority); JSONObject notification = new JSONObject(); notification.put(Parameters.TITLE, title); notification.put(Parameters.BODY, content); // click action JSONObject clickAction = new JSONObject(); clickAction.put(Parameters.DATA_ID, dataId); clickAction.put(Parameters.DATA_TYPE, dataType); notification.put(Parameters.CLICK_ACTION, clickAction); // message.put(Parameters.NOTIFICATION, notification); StringEntity mesage = new StringEntity(message.toString(), "UTF-8"); logger.info("Message send Firebase: " + message.toString()); post.setEntity(mesage); HttpResponse response = client.execute(post); in = new InputStreamReader(response.getEntity().getContent(), "UTF-8"); br = new BufferedReader(in); String line = null; while ((line = br.readLine()) != null) { lines += line; } logger.info("Send Firebase result: " + lines.toString()); } catch (Exception e) { e.printStackTrace(); return e.getMessage(); } finally { try { if (in != null) { in.close(); } if (br != null) { br.close(); } } catch (IOException e) { // Do nothing } } return lines; }
From source file:com.ibm.amc.demo.provider.AmcDemoCommands.java
@Override public void setFirmware(DeviceContext deviceContext, InputStream inputStream) throws InvalidCredentialsException, DeviceExecutionException, AMPIOException, AMPException { if (logger.isEntryEnabled()) logger.entry("setFirmware", deviceContext, inputStream); final Device device = getDevice(deviceContext); String contents = null;//from w ww. ja v a 2 s. c o m String tagname = "firmwareRev"; InputStreamReader inputStreamReader = null; BufferedReader bufferedReader = null; try { try { String openTag = "<" + tagname + ">"; String closeTag = "</" + tagname + ">"; inputStreamReader = new InputStreamReader(new Base64InputStream(inputStream), "ISO-8859-1"); bufferedReader = new BufferedReader(inputStreamReader); while (bufferedReader.ready() && (contents == null)) { String line = bufferedReader.readLine(); if (line != null) { if (line.indexOf("-----BEGIN ") > -1) { break; } int openTagIndex = line.indexOf(openTag); if (openTagIndex > -1) { int closeTagIndex = line.lastIndexOf(closeTag); int beginIndex = openTagIndex + openTag.length(); int endIndex = closeTagIndex; contents = line.substring(beginIndex, endIndex); } } } } finally { if (bufferedReader != null) { bufferedReader.close(); } else if (inputStreamReader != null) { inputStreamReader.close(); } else if (inputStream != null) { inputStream.close(); } } } catch (Throwable e) { throw new AMPException(e); } if (contents == null) { throw new AMPException(); } int periodIndex = contents.indexOf("."); String version = contents.substring(periodIndex + 1); device.setFirmwareLevel(version); unquiesceDevice(deviceContext); if (logger.isEntryEnabled()) logger.exit("setFirmware"); }
From source file:com.zoffcc.applications.aagtl.HTMLDownloader.java
public void loadCookies() { String[] ret = new String[2]; // make dirs/*from w ww . j a va2s . c o m*/ File dir1 = new File(this.main_aagtl.main_dir + "/config"); dir1.mkdirs(); // load cookies from file File cookie_file = new File(this.main_aagtl.main_dir + "/config/cookie.txt"); FileInputStream fIn = null; InputStreamReader isr = null; char[] inputBuffer = new char[255]; Writer writer = new StringWriter(); String data = null; try { fIn = new FileInputStream(cookie_file); isr = new InputStreamReader(fIn); int n = 0; while ((n = isr.read(inputBuffer)) != -1) { writer.write(inputBuffer, 0, n); } data = writer.toString(); } catch (Exception e) { e.printStackTrace(); System.out.println("loadCookies: Exception1"); return; } finally { try { isr.close(); fIn.close(); } catch (NullPointerException e2) { System.out.println("loadCookies: Exception2"); return; } catch (IOException e) { System.out.println("loadCookies: Exception3"); e.printStackTrace(); return; } } if (cookie_jar == null) { // cookie_jar = new CookieStore(); return; } else { cookie_jar.clear(); } // Log.d("load cookie:", "->" + String.valueOf(data)); // [[version: 0][name: ASP.NET_SessionId] // [value: cyuoctxrwio1x13vivqzlxgi][domain: www.geocaching.com] // [path: /][expiry: null], [version: 0][name: userid] // [value: 8a72e55f-419c-4da7-8de3-7813a3fda9c7][domain: // www.geocaching.com] // [path: /][expiry: Tue Apr 26 15:41:14 Europe/Belgrade 2011]] if (data.length() > 1) { // check for multpile cookies if (data.startsWith("[[")) { // strip [ and ] at begin and end of string data = data.substring(1, data.length() - 1); String s3 = "\\], \\["; String[] a3 = data.split(s3); String data_cookie; for (int j3 = 0; j3 < a3.length; j3++) { data_cookie = a3[j3]; if (j3 == 0) { data_cookie = data_cookie + "]"; } else { data_cookie = "[" + data_cookie; } // System.out.println("parsing cookie #" + j3 + ": " + // data_cookie); String s2 = "]"; String[] a = data_cookie.split(s2); String x = null; String c1, c2 = null; String c_name = null, c_value = null, c_domain = null, c_path = null; String c_version = null; BasicClientCookie this_cookie = null; for (int j = 0; j < a.length; j++) { x = a[j].replace("[", "").trim(); c1 = x.split(":")[0]; c2 = x.split(":")[1].substring(1); // Log.d("load cookie:", "->" + String.valueOf(c1)); // Log.d("load cookie:", "->" + String.valueOf(c2)); if (c1.matches("name") == true) { // Log.d("name:", "->" + String.valueOf(c1)); c_name = c2; } else if (c1.matches("value") == true) { c_value = c2; } else if (c1.matches("domain") == true) { c_domain = c2; } else if (c1.matches("path") == true) { c_path = c2; } else if (c1.matches("version") == true) { c_version = c2; } } this_cookie = new BasicClientCookie(c_name, c_value); this_cookie.setDomain(c_domain); this_cookie.setPath(c_path); // System.out.println("created cookie: ->" + // String.valueOf(this_cookie)); this.cookie_jar.addCookie(this_cookie); } } // single cookie else { String s2 = "]"; String[] a = data.split(s2); String x = null; String c1, c2 = null; String c_name = null, c_value = null, c_domain = null, c_path = null; String c_version = null; BasicClientCookie this_cookie = null; for (int j = 0; j < a.length; j++) { x = a[j].replace("[", "").trim(); c1 = x.split(":")[0]; c2 = x.split(":")[1].substring(1); // Log.d("load cookie:", "->" + String.valueOf(c1)); // Log.d("load cookie:", "->" + String.valueOf(c2)); if (c1.matches("name") == true) { // Log.d("name:", "->" + String.valueOf(c1)); c_name = c2; } else if (c1.matches("value") == true) { c_value = c2; } else if (c1.matches("domain") == true) { c_domain = c2; } else if (c1.matches("path") == true) { c_path = c2; } else if (c1.matches("version") == true) { c_version = c2; } } this_cookie = new BasicClientCookie(c_name, c_value); this_cookie.setDomain(c_domain); this_cookie.setPath(c_path); // System.out.println("created cookie: ->" + // String.valueOf(this_cookie)); this.cookie_jar.addCookie(this_cookie); } } return; }
From source file:com.osbitools.ws.shared.web.BasicWebUtils.java
public WebResponse readHttpData(String method, String url, byte[] params, String sheader, String stoken, String ctype) {/*ww w . j a v a 2 s . co m*/ WebResponse res; InputStreamReader in = null; HttpURLConnection conn = null; Boolean fparams = params.length != 0; try { conn = (HttpURLConnection) (new URL(url)).openConnection(); conn.setDoOutput(fparams); conn.setRequestMethod(method); if (ctype != null) { conn.setRequestProperty("Content-Type", ctype); conn.setRequestProperty("Content-Length", String.valueOf(params.length)); } if (stoken != null) conn.setRequestProperty(sheader == null ? "Cookie" : sheader, (sheader == null ? Constants.SECURE_TOKEN_NAME + "=" : "") + stoken); // Initiate connection conn.connect(); if (fparams) { OutputStream os = null; try { os = conn.getOutputStream(); os.write(params); } catch (IOException e) { return new WebResponse(conn); } finally { if (os != null) os.close(); } } // Response code int code; try { in = new InputStreamReader(conn.getInputStream()); } catch (IOException e) { return new WebResponse(conn); } // Read response try { code = conn.getResponseCode(); } catch (IOException e) { return null; } try { StringWriter out = new StringWriter(); GenericUtils.copy(in, out); String msg = out.toString(); out.close(); in.close(); res = new WebResponse(code, msg.replaceFirst("\"request_id\":\\d*", "\"request_id\":")); // Read and remember cookie for POST method if (method == "POST") { res.setCookie(conn.getHeaderField("Set-Cookie")); } } catch (IOException e) { return new WebResponse(code); } } catch (IOException e) { System.out.println("HTTP Request failed. " + e.getMessage()); return null; } finally { if (in != null) { try { in.close(); } catch (IOException e) { // Do nothing } } if (conn != null) conn.disconnect(); } return res; }
From source file:org.acmsl.commons.utils.io.FileUtils.java
/** * Reads a file and returns its contents. * @param file the file to be read.//from w w w .j a v a 2s . com * @param charset the {@link Charset}. * @return the contents of the file. * @throws FileNotFoundException if the file is not found. * @throws SecurityException if the operation is forbidden because of * security manager settings. * @throws IOException if some I/O exception occurs. */ @NotNull public char[] readFileContents(@NotNull final File file, @NotNull final Charset charset) throws SecurityException, IOException { @NotNull final char[] result; @Nullable FileInputStream t_fisFileStream = null; @Nullable InputStreamReader t_isFileReader = null; /* * To read file's contents it's better to use BufferedReader class. */ @Nullable BufferedReader t_frPageBufferedReader = null; try { /* * Instantiate a FileReader object to read file's contents. */ t_fisFileStream = new FileInputStream(file); t_isFileReader = new InputStreamReader(t_fisFileStream, charset); /* * To read file's contents it's better to use BufferedReader class. */ t_frPageBufferedReader = new BufferedReader(t_isFileReader); if (file.length() > Integer.MAX_VALUE) { throw new IOException("File too large (" + file.length() + " bytes)"); } /* * Next, I find out the necessary size of the array where file's * contents will be copied into. */ result = new char[(int) file.length()]; /* * Now I actually read the file, and fill the array. */ t_frPageBufferedReader.read(result, 0, result.length); } finally { if (t_frPageBufferedReader != null) { try { t_frPageBufferedReader.close(); } catch (final IOException cannotCloseStream) { LogFactory.getLog(FileUtils.class).warn("Cannot close file", cannotCloseStream); } } if (t_isFileReader != null) { try { t_isFileReader.close(); } catch (final IOException cannotCloseStream) { LogFactory.getLog(FileUtils.class).warn("Cannot close file", cannotCloseStream); } } if (t_fisFileStream != null) { try { t_fisFileStream.close(); } catch (final IOException cannotCloseStream) { LogFactory.getLog(FileUtils.class).warn("Cannot close file", cannotCloseStream); } } } return result; }
From source file:com.ibm.team.build.internal.hjplugin.util.HttpUtils.java
/** * Log the error that occurred and provide an exception that encapsulates the failure as best as * possible. This means parsing the output and if its from RTC extract the stack trace from * there./* w w w. j a va 2 s. c o m*/ * @param fullURI The URI requested * @param httpResponse The response from the request * @param message A message for the failure if nothing can be detected from the response * @return An exception representing the failure */ @SuppressWarnings("rawtypes") private static IOException logError(String fullURI, CloseableHttpResponse httpResponse, String message) { printMessageHeaders(httpResponse); IOException error = new IOException(message); try { InputStreamReader inputStream = new InputStreamReader(httpResponse.getEntity().getContent(), UTF_8); try { String response = IOUtils.toString(inputStream); // this is one lonnnng string if its a stack trace. // try to get it as JSON so we can output it in a more friendly way. try { JSON json = JSONSerializer.toJSON(response); response = json.toString(4); if (json instanceof JSONObject) { // see if we have a stack trace JSONObject jsonObject = (JSONObject) json; String errorMessage = jsonObject.getString("errorMessage"); //$NON-NLS-1$ error = new IOException(errorMessage); JSONArray trace = jsonObject.getJSONArray("errorTraceMarshall"); //$NON-NLS-1$ List<StackTraceElement> stackElements = new ArrayList<StackTraceElement>(trace.size()); for (Iterator iterator = trace.iterator(); iterator.hasNext();) { Object element = iterator.next(); if (element instanceof JSONObject) { JSONObject jsonElement = (JSONObject) element; String cls = jsonElement.getString("errorTraceClassName"); //$NON-NLS-1$ String method = jsonElement.getString("errorTraceMethodName"); //$NON-NLS-1$ String file = jsonElement.getString("errorTraceFileName"); //$NON-NLS-1$ int line = jsonElement.getInt("errorTraceLineNumber"); //$NON-NLS-1$ StackTraceElement stackElement = new StackTraceElement(cls, method, file, line); stackElements.add(stackElement); } } error.setStackTrace(stackElements.toArray(new StackTraceElement[stackElements.size()])); // our RTC responses have the stack trace in there twice. Remove 1 copy of it. jsonObject.remove("errorTraceMarshall"); //$NON-NLS-1$ response = jsonObject.toString(4); } } catch (JSONException e) { // not JSON or not a RTC stack trace in the JSONObject so just log what we have } LOGGER.finer(response); } finally { try { inputStream.close(); } catch (IOException e) { LOGGER.finer("Failed to close the result input stream for request: " + fullURI); //$NON-NLS-1$ } } } catch (IOException e) { LOGGER.finer("Unable to capture details of the failure"); //$NON-NLS-1$ } return error; }
From source file:be.docarch.odt2braille.PEF.java
/** * maxPages: -1 = infinity/*www . j a v a2 s .c om*/ */ private int addPagesToSection(Document document, Element sectionElement, File brailleFile, int maxRows, int maxCols, int maxPages) throws IOException, Exception { int pageCount = 0; FileInputStream fileInputStream = new FileInputStream(brailleFile); InputStreamReader inputStreamReader = new InputStreamReader(fileInputStream, "UTF-8"); BufferedReader bufferedReader = new BufferedReader(inputStreamReader); Element pageElement; Element rowElement; Node textNode; String line; boolean nextPage = bufferedReader.ready() && (maxPages > pageCount || maxPages == -1); try { while (nextPage) { pageElement = document.createElementNS(pefNS, "page"); for (int i = 0; i < maxRows; i++) { line = bufferedReader.readLine(); if (line == null) { throw new Exception("number of rows < " + maxRows); } line = line.replaceAll("\u2800", "\u0020").replaceAll("\u00A0", "\u0020") .replaceAll("\uE00F", "\u002D").replaceAll("\uE000", "\u0020"); if (line.length() > maxCols) { throw new Exception("line length > " + maxCols); } rowElement = document.createElementNS(pefNS, "row"); textNode = document.createTextNode(liblouisTable.toBraille(line)); rowElement.appendChild(textNode); pageElement.appendChild(rowElement); if (IS_WINDOWS) { bufferedReader.readLine(); } } sectionElement.appendChild(pageElement); pageCount++; if (bufferedReader.read() != '\f') { throw new Exception("unexpected character, should be form feed"); } nextPage = nextPage = bufferedReader.ready() && (maxPages > pageCount || maxPages == -1); } } finally { if (bufferedReader != null) { bufferedReader.close(); inputStreamReader.close(); fileInputStream.close(); } } return pageCount; }
From source file:org.sakaiproject.sitestats.impl.DBHelper.java
public void preloadDefaultReports() { HibernateCallback hcb = new HibernateCallback() { public Object doInHibernate(Session session) throws HibernateException, SQLException { Connection c = null;//from w ww. java 2s .c om InputStreamReader isr = null; BufferedReader br = null; try { ClassPathResource defaultReports = new ClassPathResource(dbVendor + "/default_reports.sql"); LOG.info("init(): - preloading sitestats default reports"); isr = new InputStreamReader(defaultReports.getInputStream()); br = new BufferedReader(isr); c = session.connection(); String sqlLine = null; while ((sqlLine = br.readLine()) != null) { sqlLine = sqlLine.trim(); if (!sqlLine.equals("") && !sqlLine.startsWith("--")) { if (sqlLine.endsWith(";")) { sqlLine = sqlLine.substring(0, sqlLine.indexOf(";")); } Statement st = null; try { st = c.createStatement(); st.execute(sqlLine); } catch (SQLException e) { if (!"23000".equals(e.getSQLState())) { LOG.warn("Failed to preload default report: " + sqlLine, e); } } catch (Exception e) { LOG.warn("Failed to preload default report: " + sqlLine, e); } finally { if (st != null) st.close(); } } } } catch (HibernateException e) { LOG.error("Error while preloading default reports", e); } catch (Exception e) { LOG.error("Error while preloading default reports", e); } finally { if (br != null) { try { br.close(); } catch (IOException e) { } } if (isr != null) { try { isr.close(); } catch (IOException e) { } } if (c != null) { c.close(); } } return null; } }; getHibernateTemplate().execute(hcb); }