List of usage examples for java.io InputStreamReader close
public void close() throws IOException
From source file:de.unibi.cebitec.bibiserv.web.beans.runinthecloud.BashExecutor.java
/** * Creates the needed bibigrid.properties file which will configure the * bibigrid binaries to start correctly. * * @param tempDirectoryPath - The resolved unique folder id * @param isr - inputstream reader/*w w w. j a v a 2s . com*/ * @param out - outputStream * @param execScript - filediscriptor of execScript * @param sourceProperties - filediscriptor to the now building * bibigrid.properties */ private void createBiBiGridPropertiesFile(final Path tempDirectoryPath, final InputStreamReader isr, final OutputStream out, final File execScript, final Properties sourceProperties) { /** * Get selected values from wizard. */ final Integer numberOfSlaves = ec2InstanceWizard.getNumberOfSlaves(); final String masterInstanceType = ec2InstanceWizard.getSelectedEc2MasterInstance().getInstanceName(); final String slaveInstanceType = ec2InstanceWizard.getSelectedEc2SlaveInstance().getInstanceName(); final String region = ec2InstanceWizard.getSelectedRegion(); /** * Get identitiyfile from SSH-Keychain Module. */ final File identityfile = new File(tempDirectoryPath.toFile(), user.getId() + "_identityfile.pem"); final StringBuffer keypairname = new StringBuffer(); try (PrintWriter id_pw = new PrintWriter(identityfile)) { ArrayList<SshKeyPair> foundKeyPairs = new ArrayList(dc.retrieveSshKeyFile(user)); for (SshKeyPair kp : foundKeyPairs) { /** * If there is one active keypair we can quit searching and take * this keypair. The active keypair can be set in the * KeyChainModule. */ if (kp.isActive()) { id_pw.println(kp.getIdentityFile()); keypairname.append(kp.getKeypairName()); break; } } id_pw.close(); } catch (FileNotFoundException fnfe) { log.error(fnfe.getMessage(), fnfe); } /** * Start new Thread for creating and writing new bibigrid.properties. */ Thread bibiGridPropertiesCreatorThread = new Thread(new Runnable() { @Override public void run() { try { sourceProperties.load(isr); sourceProperties.setProperty("accessKey", ec2InstanceWizard.getAwsbean().getAccessKey()); sourceProperties.setProperty("secretKey", ec2InstanceWizard.getAwsbean().getSecretKey()); sourceProperties.setProperty("region", region); sourceProperties.setProperty("keypair", keypairname.toString()); /** * The availability zones are HARD CODED! I've at this point * no idea how to parse the available * endpoints/availability-zones. */ sourceProperties.setProperty("availability-zone", region + "a"); sourceProperties.setProperty("master-instance-type", masterInstanceType); sourceProperties.setProperty("master-image", ec2InstanceWizard.getIMAGE_ID_HVM_MASTER()); sourceProperties.setProperty("slave-instance-type", slaveInstanceType); sourceProperties.setProperty("slave-instance-min", "1"); sourceProperties.setProperty("slave-instance-start", String.valueOf(numberOfSlaves)); sourceProperties.setProperty("slave-instance-max", String.valueOf(numberOfSlaves)); sourceProperties.setProperty("slave-image", ec2InstanceWizard.getIMAGE_ID_HVM_SLAVE()); sourceProperties.setProperty("ports", "8080,8081"); sourceProperties.setProperty("execute-script", execScript.getName()); sourceProperties.setProperty("use-master-as-compute", "no"); sourceProperties.setProperty("grid-properties-file", gridPropertiesFile); sourceProperties.setProperty("identity-file", identityfile.getName()); sourceProperties.store(out, null); } catch (IOException | NullPointerException e) { log.error(e.getMessage(), e); } finally { try { isr.close(); out.close(); } catch (IOException e) { log.error(e.getMessage(), e); } } } }); bibiGridPropertiesCreatorThread.start(); }
From source file:cgeo.geocaching.cgBase.java
public static String requestJSON(String scheme, String host, String path, String method, String params) { int httpCode = -1; //String httpLocation = null; if (method == null) { method = "GET"; } else {/*w ww . j a va 2 s . co m*/ method = method.toUpperCase(); } boolean methodPost = false; if (method.equalsIgnoreCase("POST")) { methodPost = true; } URLConnection uc = null; HttpURLConnection connection = null; Integer timeout = 30000; final StringBuffer buffer = new StringBuffer(); for (int i = 0; i < 3; i++) { if (i > 0) { Log.w(cgSettings.tag, "Failed to download data, retrying. Attempt #" + (i + 1)); } buffer.delete(0, buffer.length()); timeout = 30000 + (i * 15000); try { try { URL u = null; if (methodPost) { u = new URL(scheme + host + path); } else { u = new URL(scheme + host + path + "?" + params); } if (u.getProtocol().toLowerCase().equals("https")) { trustAllHosts(); HttpsURLConnection https = (HttpsURLConnection) u.openConnection(); https.setHostnameVerifier(doNotVerify); uc = https; } else { uc = (HttpURLConnection) u.openConnection(); } uc.setRequestProperty("Host", host); uc.setRequestProperty("Accept", "application/json, text/javascript, */*; q=0.01"); if (methodPost) { uc.setRequestProperty("Content-Type", "application/x-www-form-urlencoded"); uc.setRequestProperty("Content-Length", Integer.toString(params.length())); uc.setRequestProperty("X-HTTP-Method-Override", "GET"); } else { uc.setRequestProperty("Content-Type", "application/json; charset=UTF-8"); } uc.setRequestProperty("X-Requested-With", "XMLHttpRequest"); connection = (HttpURLConnection) uc; connection.setReadTimeout(timeout); connection.setRequestMethod(method); HttpURLConnection.setFollowRedirects(false); // TODO: Fix these (FilCab) connection.setDoInput(true); if (methodPost) { connection.setDoOutput(true); final OutputStream out = connection.getOutputStream(); final OutputStreamWriter wr = new OutputStreamWriter(out); wr.write(params); wr.flush(); wr.close(); } else { connection.setDoOutput(false); } InputStream ins = getInputstreamFromConnection(connection); final InputStreamReader inr = new InputStreamReader(ins); final BufferedReader br = new BufferedReader(inr, 1024); readIntoBuffer(br, buffer); httpCode = connection.getResponseCode(); final String paramsLog = params.replaceAll(passMatch, "password=***"); Log.i(cgSettings.tag + " | JSON", "[POST " + (int) (params.length() / 1024) + "k | " + httpCode + " | " + (int) (buffer.length() / 1024) + "k] Downloaded " + "http://" + host + path + "?" + paramsLog); connection.disconnect(); br.close(); ins.close(); inr.close(); } catch (IOException e) { httpCode = connection.getResponseCode(); Log.e(cgSettings.tag, "cgeoBase.requestJSON.IOException: " + httpCode + ": " + connection.getResponseMessage() + " ~ " + e.toString()); } } catch (Exception e) { Log.e(cgSettings.tag, "cgeoBase.requestJSON: " + e.toString()); } if (StringUtils.isNotBlank(buffer)) { break; } if (httpCode == 403) { // we're not allowed to download content, so let's move break; } } String page = null; //This is reported as beeing deadCode (httpLocation is always null) //2011-08-09 - 302 is redirect so something should probably be done /* * if (httpCode == 302 && httpLocation != null) { * final Uri newLocation = Uri.parse(httpLocation); * if (newLocation.isRelative()) { * page = requestJSONgc(host, path, params); * } else { * page = requestJSONgc(newLocation.getHost(), newLocation.getPath(), params); * } * } else { */ replaceWhitespace(buffer); page = buffer.toString(); //} if (page != null) { return page; } else { return ""; } }
From source file:com.naryx.tagfusion.cfm.xml.ws.javaplatform.DynamicWebServiceStubGenerator.java
@SuppressWarnings("deprecation") private String getWSDLContents(String wsdlURL, CallParameters cp) throws cfmRunTimeException { try {/*from w ww.j ava2 s . c o m*/ String wsdlL = wsdlURL.toLowerCase(); String contents = null; if (wsdlL.startsWith("<?xml version")) { // The location is the WSDL itself (unexpected) contents = wsdlURL; } else { InputStream is = null; InputStreamReader isr = null; StringBuilder buffy = null; HttpGet method = null; try { if (wsdlL.startsWith("http:") || wsdlL.startsWith("https:")) { // Read from network DefaultHttpClient client = new DefaultHttpClient(); // Set the timeout int timeout = cp.getTimeout(); client.getParams().setParameter("http.connection.timeout", timeout); client.getParams().setParameter("http.socket.timeout", timeout); if (cp.getUsername() != null || cp.getPassword() != null) { // Set any credentials client.getCredentialsProvider().setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(cp.getUsername(), cp.getPassword())); } if (cp.getProxyServer() != null) { // Set the proxy HttpHost proxy = new HttpHost(cp.getProxyServer(), (cp.getProxyPort() == -1) ? cp.getDefaultProxyPort() : cp.getProxyPort()); client.getParams().setParameter(ConnRoutePNames.DEFAULT_PROXY, proxy); if (cp.getProxyUser() != null || cp.getProxyPassword() != null) { // Set the proxy credentials client.getCredentialsProvider().setCredentials( new AuthScope(cp.getProxyServer(), cp.getProxyPort()), new UsernamePasswordCredentials(cp.getProxyUser(), cp.getProxyPassword())); } } // Create the method and get the response method = new HttpGet(wsdlURL); client.getParams().setParameter("http.protocol.handle-redirects", true); HttpResponse response = client.execute(method); switch (response.getStatusLine().getStatusCode()) { case HttpStatus.SC_PROXY_AUTHENTICATION_REQUIRED: throw new cfmRunTimeException( catchDataFactory.extendedException("errorCode.runtimeError", "Failed to access WSDL: " + wsdlURL + ". Proxy authentication is required.", response.getStatusLine().toString())); case HttpStatus.SC_UNAUTHORIZED: throw new cfmRunTimeException( catchDataFactory.extendedException("errorCode.runtimeError", "Failed to access WSDL: " + wsdlURL + ". Authentication is required.", response.getStatusLine().toString())); case HttpStatus.SC_USE_PROXY: throw new cfmRunTimeException( catchDataFactory.extendedException("errorCode.runtimeError", "Failed to access WSDL: " + wsdlURL + ". The use of a proxy is required.", response.getStatusLine().toString())); } if (response.getStatusLine().getStatusCode() != HttpStatus.SC_OK) throw new cfmRunTimeException(catchDataFactory.extendedException( "errorCode.runtimeError", "Failed to access WSDL: " + wsdlURL, response.getStatusLine().toString())); is = response.getEntity().getContent(); } else { // Just try to read off disk File f = new File(wsdlURL); is = new FileInputStream(f); } // Read the data char[] buf = new char[4096]; int read = -1; buffy = new StringBuilder(); isr = new InputStreamReader(is); while ((read = isr.read(buf, 0, buf.length)) != -1) buffy.append(buf, 0, read); contents = buffy.toString(); } finally { if (isr != null) isr.close(); if (is != null) is.close(); if (method != null) method.releaseConnection(); } } // Calc the sum and return return contents; } catch (IOException ex) { throw new cfmRunTimeException( catchDataFactory.generalException("errorCode.runtimeError", "Failed to access WSDL located at " + wsdlURL + ". There may be an error in the target WSDL. " + ex.getMessage())); } }
From source file:cgeo.geocaching.cgBase.java
public String requestJSONgc(String host, String path, String params) { int httpCode = -1; String httpLocation = null;/*from w w w .ja v a 2 s .c om*/ final String cookiesDone = CookieJar.getCookiesAsString(prefs); URLConnection uc = null; HttpURLConnection connection = null; Integer timeout = 30000; final StringBuffer buffer = new StringBuffer(); for (int i = 0; i < 3; i++) { if (i > 0) { Log.w(cgSettings.tag, "Failed to download data, retrying. Attempt #" + (i + 1)); } buffer.delete(0, buffer.length()); timeout = 30000 + (i * 15000); try { // POST final URL u = new URL("http://" + host + path); uc = u.openConnection(); uc.setRequestProperty("Host", host); uc.setRequestProperty("Cookie", cookiesDone); uc.setRequestProperty("Content-Type", "application/json; charset=UTF-8"); uc.setRequestProperty("X-Requested-With", "XMLHttpRequest"); uc.setRequestProperty("Accept", "application/json, text/javascript, */*; q=0.01"); uc.setRequestProperty("Referer", host + "/" + path); if (settings.asBrowser == 1) { uc.setRequestProperty("Accept-Charset", "utf-8, iso-8859-1, utf-16, *;q=0.7"); uc.setRequestProperty("Accept-Language", "en-US"); uc.setRequestProperty("User-Agent", idBrowser); uc.setRequestProperty("Connection", "keep-alive"); uc.setRequestProperty("Keep-Alive", "300"); } connection = (HttpURLConnection) uc; connection.setReadTimeout(timeout); connection.setRequestMethod("POST"); HttpURLConnection.setFollowRedirects(false); // TODO: Fix these (FilCab) connection.setDoInput(true); connection.setDoOutput(true); final OutputStream out = connection.getOutputStream(); final OutputStreamWriter wr = new OutputStreamWriter(out); wr.write(params); wr.flush(); wr.close(); CookieJar.setCookies(prefs, uc); InputStream ins = getInputstreamFromConnection(connection); final InputStreamReader inr = new InputStreamReader(ins); final BufferedReader br = new BufferedReader(inr); readIntoBuffer(br, buffer); httpCode = connection.getResponseCode(); httpLocation = uc.getHeaderField("Location"); final String paramsLog = params.replaceAll(passMatch, "password=***"); Log.i(cgSettings.tag + " | JSON", "[POST " + (int) (params.length() / 1024) + "k | " + httpCode + " | " + (int) (buffer.length() / 1024) + "k] Downloaded " + "http://" + host + path + "?" + paramsLog); connection.disconnect(); br.close(); ins.close(); inr.close(); } catch (IOException e) { Log.e(cgSettings.tag, "cgeoBase.requestJSONgc.IOException: " + e.toString()); } catch (Exception e) { Log.e(cgSettings.tag, "cgeoBase.requestJSONgc: " + e.toString()); } if (buffer != null && buffer.length() > 0) { break; } } String page = null; if (httpCode == 302 && httpLocation != null) { final Uri newLocation = Uri.parse(httpLocation); if (newLocation.isRelative()) { page = requestJSONgc(host, path, params); } else { page = requestJSONgc(newLocation.getHost(), newLocation.getPath(), params); } } else { replaceWhitespace(buffer); page = buffer.toString(); } if (page != null) { return page; } else { return ""; } }
From source file:com.cohort.util.String2.java
/** * This reads the bytes of the file with the specified charset. * This does not alter the characters (e.g., the line endings). * //from ww w . j a v a2 s.c o m * <P>This method is generally appropriate for small and medium-sized * files. For very large files or files that need additional processing, * it may be better to write a custom method to * read the file line-by-line, processing as it goes. * * @param fileName is the (usually canonical) path (dir+name) for the file * @param charset e.g., ISO-8859-1, UTF-8, or "" or null for the default (ISO-8859-1) * @return a String with the decoded contents of the file. * @throws Exception if trouble */ public static String directReadFromFile(String fileName, String charset) throws Exception { //declare the BufferedReader variable //declare the results variable: String results[] = {"", ""}; //BufferedReader and results are declared outside try/catch so //that they can be accessed from within either try/catch block. long time = System.currentTimeMillis(); FileInputStream fis = new FileInputStream(fileName); InputStreamReader isr = new InputStreamReader(fis, charset == null || charset.length() == 0 ? ISO_8859_1 : charset); StringBuilder sb = new StringBuilder(8192); //get the text from the file try { char buffer[] = new char[8192]; int nRead; while ((nRead = isr.read(buffer)) >= 0) //-1 = end-of-file sb.append(buffer, 0, nRead); } finally { try { isr.close(); } catch (Exception e) { } } return sb.toString(); }
From source file:cgeo.geocaching.cgBase.java
public cgResponse request(boolean secure, String host, String path, String method, String params, int requestId, Boolean xContentType) {/*from w w w . j av a 2 s . c o m*/ URL u = null; int httpCode = -1; String httpMessage = null; String httpLocation = null; if (requestId == 0) { requestId = (int) (Math.random() * 1000); } if (method == null || (method.equalsIgnoreCase("GET") == false && method.equalsIgnoreCase("POST") == false)) { method = "POST"; } else { method = method.toUpperCase(); } // https String scheme = "http://"; if (secure) { scheme = "https://"; } String cookiesDone = CookieJar.getCookiesAsString(prefs); URLConnection uc = null; HttpURLConnection connection = null; Integer timeout = 30000; StringBuffer buffer = null; for (int i = 0; i < 5; i++) { if (i > 0) { Log.w(cgSettings.tag, "Failed to download data, retrying. Attempt #" + (i + 1)); } buffer = new StringBuffer(); timeout = 30000 + (i * 10000); try { if (method.equals("GET")) { // GET u = new URL(scheme + host + path + "?" + params); uc = u.openConnection(); uc.setRequestProperty("Host", host); uc.setRequestProperty("Cookie", cookiesDone); if (xContentType) { uc.setRequestProperty("Content-Type", "application/x-www-form-urlencoded"); } if (settings.asBrowser == 1) { uc.setRequestProperty("Accept", "application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5"); // uc.setRequestProperty("Accept-Encoding", "gzip"); // not supported via cellular network uc.setRequestProperty("Accept-Charset", "utf-8, iso-8859-1, utf-16, *;q=0.7"); uc.setRequestProperty("Accept-Language", "en-US"); uc.setRequestProperty("User-Agent", idBrowser); uc.setRequestProperty("Connection", "keep-alive"); uc.setRequestProperty("Keep-Alive", "300"); } connection = (HttpURLConnection) uc; connection.setReadTimeout(timeout); connection.setRequestMethod(method); HttpURLConnection.setFollowRedirects(false); connection.setDoInput(true); connection.setDoOutput(false); } else { // POST u = new URL(scheme + host + path); uc = u.openConnection(); uc.setRequestProperty("Host", host); uc.setRequestProperty("Cookie", cookiesDone); if (xContentType) { uc.setRequestProperty("Content-Type", "application/x-www-form-urlencoded"); } if (settings.asBrowser == 1) { uc.setRequestProperty("Accept", "application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5"); // uc.setRequestProperty("Accept-Encoding", "gzip"); // not supported via cellular network uc.setRequestProperty("Accept-Charset", "utf-8, iso-8859-1, utf-16, *;q=0.7"); uc.setRequestProperty("Accept-Language", "en-US"); uc.setRequestProperty("User-Agent", idBrowser); uc.setRequestProperty("Connection", "keep-alive"); uc.setRequestProperty("Keep-Alive", "300"); } connection = (HttpURLConnection) uc; connection.setReadTimeout(timeout); connection.setRequestMethod(method); HttpURLConnection.setFollowRedirects(false); connection.setDoInput(true); connection.setDoOutput(true); final OutputStream out = connection.getOutputStream(); final OutputStreamWriter wr = new OutputStreamWriter(out); wr.write(params); wr.flush(); wr.close(); } CookieJar.setCookies(prefs, uc); InputStream ins = getInputstreamFromConnection(connection); final InputStreamReader inr = new InputStreamReader(ins); final BufferedReader br = new BufferedReader(inr, 16 * 1024); readIntoBuffer(br, buffer); httpCode = connection.getResponseCode(); httpMessage = connection.getResponseMessage(); httpLocation = uc.getHeaderField("Location"); final String paramsLog = params.replaceAll(passMatch, "password=***"); Log.i(cgSettings.tag + "|" + requestId, "[" + method + " " + (int) (params.length() / 1024) + "k | " + httpCode + " | " + (int) (buffer.length() / 1024) + "k] Downloaded " + scheme + host + path + "?" + paramsLog); connection.disconnect(); br.close(); ins.close(); inr.close(); } catch (IOException e) { Log.e(cgSettings.tag, "cgeoBase.request.IOException: " + e.toString()); } catch (Exception e) { Log.e(cgSettings.tag, "cgeoBase.request: " + e.toString()); } if (buffer.length() > 0) { break; } } cgResponse response = new cgResponse(); try { if (httpCode == 302 && httpLocation != null) { final Uri newLocation = Uri.parse(httpLocation); if (newLocation.isRelative()) { response = request(secure, host, path, "GET", new HashMap<String, String>(), requestId, false, false, false); } else { boolean secureRedir = false; if (newLocation.getScheme().equals("https")) { secureRedir = true; } response = request(secureRedir, newLocation.getHost(), newLocation.getPath(), "GET", new HashMap<String, String>(), requestId, false, false, false); } } else { if (StringUtils.isNotEmpty(buffer)) { replaceWhitespace(buffer); String data = buffer.toString(); buffer = null; if (data != null) { response.setData(data); } else { response.setData(""); } response.setStatusCode(httpCode); response.setStatusMessage(httpMessage); response.setUrl(u.toString()); } } } catch (Exception e) { Log.e(cgSettings.tag, "cgeoBase.page: " + e.toString()); } return response; }
From source file:com.cohort.util.String2.java
/** * This is like the other readFromFile, but returns ArrayList of Strings * and throws Exception is trouble./* w w w .ja v a 2s . co m*/ * The strings in the ArrayList are not canonical! So this is useful * for reading, processing, and throwing away. * * <P>This method is generally appropriate for small and medium-sized * files. For very large files or files that need additional processing, * it may be more efficient to write a custom method to * read the file line-by-line, processing as it goes. * * @param fileName is the (usually canonical) path (dir+name) for the file * @param charset e.g., ISO-8859-1, UTF-8, or "" or null for the default (ISO-8859-1) * @param maxAttempt e.g. 3 (the tries are 1 second apart) * @return String[] with the lines from the file * @throws Exception if trouble */ public static String[] readLinesFromFile(String fileName, String charset, int maxAttempt) throws Exception { long time = System.currentTimeMillis(); InputStreamReader isr = null; for (int i = 0; i < maxAttempt; i++) { try { isr = new InputStreamReader(new FileInputStream(fileName), charset == null || charset.length() == 0 ? ISO_8859_1 : charset); break; //success } catch (RuntimeException e) { if (i == maxAttempt - 1) throw e; Math2.sleep(1); } } BufferedReader bufferedReader = new BufferedReader(isr); ArrayList<String> al = new ArrayList(); String s = bufferedReader.readLine(); while (s != null) { //null = end-of-file al.add(s); s = bufferedReader.readLine(); } bufferedReader.close(); isr.close(); return al.toArray(new String[0]); }
From source file:au.org.theark.study.util.DataUploader.java
public StringBuffer uploadAndReportSubjectAttachmentDataFile(InputStream inputStream, long size, String fileFormat, char delimChar, String user_id) throws FileFormatException, ArkSystemException { uploadReport = new StringBuffer(); long rowCount = 0; long insertFieldsCount = 0; long updateFieldsCount = 0; List<SubjectFile> subjectFiles = new ArrayList<SubjectFile>(); InputStreamReader inputStreamReader = null; CsvReader csvReader = null;/*from w w w .j a v a 2 s. c o m*/ DecimalFormat decimalFormat = new DecimalFormat("0.00"); delimiterCharacter = delimChar; if (fileFormat.equalsIgnoreCase("XLS")) { Workbook w; try { w = Workbook.getWorkbook(inputStream); delimiterCharacter = ','; XLStoCSV xlsToCsv = new XLStoCSV(delimiterCharacter); inputStream = xlsToCsv.convertXlsToCsv(w); inputStream.reset(); } catch (BiffException e) { log.error(e.getMessage()); } catch (IOException e) { log.error(e.getMessage()); } } try { inputStreamReader = new InputStreamReader(inputStream); csvReader = new CsvReader(inputStreamReader, delimiterCharacter); csvReader.readHeaders(); String[] stringLineArray; int subjectUidIndex = csvReader.getIndex("SUBJECTUID"); int filePathIndex = csvReader.getIndex("FILE_NAME_WITH_FULL_PATH"); int studyComponentIndex = csvReader.getIndex("STUDY_COMPONENT"); int commentIndex = csvReader.getIndex("COMMENT"); List<StudyComp> studyCompList = iArkCommonService.getStudyComponentByStudy(study); while (csvReader.readRecord()) { ++rowCount; stringLineArray = csvReader.getValues(); SubjectFile subjectFile = new SubjectFile(); subjectFile.setUserId(user_id); String subjectUID = stringLineArray[subjectUidIndex]; String studyCompName = stringLineArray[studyComponentIndex]; LinkSubjectStudy subject = iArkCommonService.getSubjectByUID(subjectUID, study); subjectFile.setLinkSubjectStudy(subject); for (StudyComp studyComp : studyCompList) { if (studyComp.getName().equals(studyCompName)) { subjectFile.setStudyComp(studyComp); break; } } subjectFile.setComments(stringLineArray[commentIndex]); // File processing String sourcePath = stringLineArray[filePathIndex]; File file = new File(sourcePath); subjectFile.setChecksum(iArkCommonService.generateArkFileChecksum(file, "MD5")); String fileName = file.getName(); subjectFile.setFilename(fileName); String fileId = iArkCommonService.generateArkFileId(fileName); subjectFile.setFileId(fileId); String directoryName = iArkCommonService.getArkFileDirName(study.getId(), subjectUID, au.org.theark.study.web.Constants.ARK_SUBJECT_ATTACHEMENT_DIR); // TODO need to check directory created successfully iArkCommonService.createArkFileAttachmentDirectoy(directoryName); String destinationPath = directoryName + File.separator + fileId; iArkCommonService.copyArkLargeFileAttachments(sourcePath, destinationPath); subjectFiles.add(subjectFile); } } catch (Exception e) { e.printStackTrace(); throw new ArkSystemException(e.getMessage()); } finally { uploadReport.append("Total file size: "); uploadReport.append(decimalFormat.format(size / 1024.0 / 1024.0)); uploadReport.append(" MB"); uploadReport.append("\n"); if (csvReader != null) { try { csvReader.close(); } catch (Exception ex) { log.error("Cleanup operation failed: csvRdr.close()", ex); } } if (inputStreamReader != null) { try { inputStreamReader.close(); } catch (Exception ex) { log.error("Cleanup operation failed: isr.close()", ex); } } } uploadReport.append("Process "); uploadReport.append(rowCount); uploadReport.append(" rows of data"); uploadReport.append("\n"); uploadReport.append(insertFieldsCount); uploadReport.append(" fields were inserted."); uploadReport.append("\n"); uploadReport.append(updateFieldsCount); uploadReport.append(" fields were updated."); uploadReport.append("\n"); try { iStudyService.processSubjectAttachmentBatch(subjectFiles); } catch (Exception e) { e.printStackTrace(); throw new ArkSystemException(e.getMessage()); } return uploadReport; }
From source file:au.org.ala.layers.dao.ObjectDAOImpl.java
@Override public void streamObjectsGeometryById(OutputStream os, String id, String geomtype) throws IOException { logger.info("Getting object info for id = " + id + " and geometry as " + geomtype); String sql = ""; if ("kml".equals(geomtype)) { sql = "SELECT ST_AsKml(the_geom) as geometry, name, \"desc\" as description FROM objects WHERE pid=?;"; } else if ("wkt".equals(geomtype)) { sql = "SELECT ST_AsText(the_geom) as geometry FROM objects WHERE pid=?;"; } else if ("geojson".equals(geomtype)) { sql = "SELECT ST_AsGeoJSON(the_geom) as geometry FROM objects WHERE pid=?;"; } else if ("shp".equals(geomtype)) { sql = "SELECT ST_AsText(the_geom) as geometry, name, \"desc\" as description FROM objects WHERE pid=?;"; }/*from ww w . ja va2 s . co m*/ List<Objects> l = jdbcTemplate.query(sql, ParameterizedBeanPropertyRowMapper.newInstance(Objects.class), id); if (l.size() > 0) { if ("shp".equals(geomtype)) { String wkt = l.get(0).getGeometry(); File zippedShapeFile = SpatialConversionUtils.buildZippedShapeFile(wkt, id, l.get(0).getName(), l.get(0).getDescription()); FileUtils.copyFile(zippedShapeFile, os); } else if ("kml".equals(geomtype)) { os.write(KML_HEADER.replace("<name></name>", "<name><![CDATA[" + l.get(0).getName() + "]]></name>") .replace("<description></description>", "<description><![CDATA[" + l.get(0).getDescription() + "]]></description>") .getBytes()); os.write(l.get(0).getGeometry().getBytes()); os.write(KML_FOOTER.getBytes()); } else { os.write(l.get(0).getGeometry().getBytes()); } } else { // get grid classes if (id.length() > 0) { // grid class pids are, 'layerPid:gridClassNumber' try { String[] s = id.split(":"); if (s.length >= 2) { int n = Integer.parseInt(s[1]); IntersectionFile f = layerIntersectDao.getConfig().getIntersectionFile(s[0]); if (f != null && f.getClasses() != null) { GridClass gc = f.getClasses().get(n); if (gc != null && ("kml".equals(geomtype) || "wkt".equals(geomtype) || "geojson".equals(geomtype) || "shp".equals(geomtype))) { // TODO: enable for type 'a' after // implementation of fields table defaultLayer // field File file = new File( f.getFilePath() + File.separator + s[1] + "." + geomtype + ".zip"); if ((f.getType().equals("a") || s.length == 2) && file.exists()) { ZipInputStream zis = null; try { zis = new ZipInputStream(new FileInputStream(file)); zis.getNextEntry(); byte[] buffer = new byte[1024]; int size; while ((size = zis.read(buffer)) > 0) { os.write(buffer, 0, size); } } catch (Exception e) { logger.error(e.getMessage(), e); } finally { if (zis != null) { try { zis.close(); } catch (Exception e) { logger.error(e.getMessage(), e); } } } } else { // polygon BufferedInputStream bis = null; InputStreamReader isr = null; try { String[] cells = null; HashMap<String, Object> map = s.length == 2 ? null : getGridIndexEntry(f.getFilePath() + File.separator + s[1], s[2]); String wkt = null; if (map != null) { cells = new String[] { s[2], String.valueOf(map.get("charoffset")) }; if (cells != null) { // get polygon wkt string File file2 = new File( f.getFilePath() + File.separator + s[1] + ".wkt"); bis = new BufferedInputStream(new FileInputStream(file2)); isr = new InputStreamReader(bis); isr.skip(Long.parseLong(cells[1])); char[] buffer = new char[1024]; int size; StringBuilder sb = new StringBuilder(); sb.append("POLYGON"); int end = -1; while (end < 0 && (size = isr.read(buffer)) > 0) { sb.append(buffer, 0, size); end = sb.toString().indexOf("))"); } end += 2; wkt = sb.toString().substring(0, end); } } else { wkt = gc.getBbox(); } if (geomtype.equals("wkt")) { os.write(wkt.getBytes()); } else { WKTReader r = new WKTReader(); Geometry g = r.read(wkt); if (geomtype.equals("kml")) { os.write(KML_HEADER.getBytes()); Encoder encoder = new Encoder(new KMLConfiguration()); encoder.setIndenting(true); encoder.encode(g, KML.Geometry, os); os.write(KML_FOOTER.getBytes()); } else if (geomtype.equals("geojson")) { FeatureJSON fjson = new FeatureJSON(); final SimpleFeatureType TYPE = DataUtilities.createType("class", "the_geom:MultiPolygon,name:String"); SimpleFeatureBuilder featureBuilder = new SimpleFeatureBuilder( TYPE); featureBuilder.add(g); featureBuilder.add(gc.getName()); fjson.writeFeature(featureBuilder.buildFeature(null), os); } else if (geomtype == "shp") { File zippedShapeFile = SpatialConversionUtils .buildZippedShapeFile(wkt, id, gc.getName(), null); FileUtils.copyFile(zippedShapeFile, os); } } } catch (Exception e) { logger.error(e.getMessage(), e); } finally { if (bis != null) { try { bis.close(); } catch (Exception e) { logger.error(e.getMessage(), e); } } if (isr != null) { try { isr.close(); } catch (Exception e) { logger.error(e.getMessage(), e); } } } } } } } } catch (Exception e) { logger.error(e.getMessage(), e); } } } }
From source file:au.org.theark.study.util.DataUploader.java
public StringBuffer uploadAndReportPedigreeDataFile(InputStream inputStream, long size, String fileFormat, char delimChar) throws FileFormatException, ArkSystemException { uploadReport = new StringBuffer(); long rowCount = 0; long insertFieldsCount = 0; long updateFieldsCount = 0; List<LinkSubjectPedigree> parentSubjectLinkRelationships = new ArrayList<LinkSubjectPedigree>(); List<LinkSubjectTwin> twinSubjectLinkRelationships = new ArrayList<LinkSubjectTwin>(); delimiterCharacter = delimChar;// w w w . java 2 s .co m InputStreamReader inputStreamReader = null; CsvReader csvReader = null; DecimalFormat decimalFormat = new DecimalFormat("0.00"); try { inputStreamReader = new InputStreamReader(inputStream); csvReader = new CsvReader(inputStreamReader, delimiterCharacter); // csvReader.readHeaders(); String[] stringLineArray; List<Relationship> familyRelationshipList = iArkCommonService.getFamilyRelationships(); HashMap<String, Relationship> familyRelationshipMap = new HashMap<String, Relationship>(); for (Relationship relationship : familyRelationshipList) { familyRelationshipMap.put(relationship.getName(), relationship); } List<TwinType> twinRelationshipList = iStudyService.getTwinTypes(); HashMap<String, TwinType> twinRelationshipMap = new HashMap<String, TwinType>(); for (TwinType type : twinRelationshipList) { twinRelationshipMap.put(type.getName(), type); } while (csvReader.readRecord()) { ++rowCount; int index = 0; stringLineArray = csvReader.getValues(); String subjectUID = stringLineArray[index++]; String fatherUID = stringLineArray[index++]; String motherUID = stringLineArray[index++]; String twinStatus = stringLineArray[index++]; String twinUID = stringLineArray[index++]; LinkSubjectStudy subjectUser = iArkCommonService.getSubjectByUID(subjectUID, study); if (fatherUID != null && !fatherUID.equalsIgnoreCase("-")) { LinkSubjectPedigree father = new LinkSubjectPedigree(); father.setSubject(subjectUser); father.setRelationship(familyRelationshipMap.get("Father")); LinkSubjectStudy fatherUser = iArkCommonService.getSubjectByUID(fatherUID, study); father.setRelative(fatherUser); parentSubjectLinkRelationships.add(father); } if (motherUID != null && !motherUID.equalsIgnoreCase("-")) { LinkSubjectPedigree mother = new LinkSubjectPedigree(); mother.setSubject(subjectUser); mother.setRelationship(familyRelationshipMap.get("Mother")); LinkSubjectStudy motherUser = iArkCommonService.getSubjectByUID(motherUID, study); mother.setRelative(motherUser); parentSubjectLinkRelationships.add(mother); } if (twinStatus != null && !twinStatus.equalsIgnoreCase("-") && !isTwinRelationshipExists(twinSubjectLinkRelationships, subjectUID, twinUID)) { LinkSubjectTwin twin = new LinkSubjectTwin(); if ("M".equalsIgnoreCase(twinStatus)) { twin.setTwinType(twinRelationshipMap.get("MZ")); } else { twin.setTwinType(twinRelationshipMap.get("DZ")); } twin.setFirstSubject(subjectUser); LinkSubjectStudy siblingUser = iArkCommonService.getSubjectByUID(twinUID, study); twin.setSecondSubject(siblingUser); twinSubjectLinkRelationships.add(twin); } } } catch (Exception e) { e.printStackTrace(); throw new ArkSystemException(e.getMessage()); } finally { uploadReport.append("Total file size: "); uploadReport.append(decimalFormat.format(size / 1024.0 / 1024.0)); uploadReport.append(" MB"); uploadReport.append("\n"); if (csvReader != null) { try { csvReader.close(); } catch (Exception ex) { log.error("Cleanup operation failed: csvRdr.close()", ex); } } if (inputStreamReader != null) { try { inputStreamReader.close(); } catch (Exception ex) { log.error("Cleanup operation failed: isr.close()", ex); } } } uploadReport.append("Process "); uploadReport.append(rowCount); uploadReport.append(" rows of data"); uploadReport.append("\n"); uploadReport.append(insertFieldsCount); uploadReport.append(" fields were inserted."); uploadReport.append("\n"); uploadReport.append(updateFieldsCount); uploadReport.append(" fields were updated."); uploadReport.append("\n"); try { iStudyService.processPedigreeBatch(parentSubjectLinkRelationships, twinSubjectLinkRelationships); } catch (Exception e) { e.printStackTrace(); throw new ArkSystemException(e.getMessage()); } return uploadReport; }