List of usage examples for java.util Scanner next
public String next()
From source file:com.nuvolect.deepdive.probe.DecompileApk.java
/** * Build a new DEX file excluding classes in the OPTIMIZED_CLASS_EXCLUSION file * @return//from ww w . j a va 2 s.co m */ private JSONObject optimizeDex() { final Thread.UncaughtExceptionHandler uncaughtExceptionHandler = new Thread.UncaughtExceptionHandler() { @Override public void uncaughtException(Thread t, Throwable e) { LogUtil.log(LogUtil.LogType.DECOMPILE, "Uncaught exception: " + e.toString()); m_progressStream.putStream("Uncaught exception: " + t.getName()); m_progressStream.putStream("Uncaught exception: " + e.toString()); } }; m_optimize_dex_time = System.currentTimeMillis(); // Save start time for tracking m_optimizeDexThread = new Thread(m_threadGroup, new Runnable() { @Override public void run() { m_progressStream = new ProgressStream( new OmniFile(m_volumeId, m_appFolderPath + DEX_OPTIMIZATION_LOG_FILE)); m_progressStream .putStream("Optimizing classes, reference: " + OPTIMIZED_CLASSES_EXCLUSION_FILENAME); Scanner s = null; try { OmniFile omniFile = new OmniFile(m_volumeId, m_appFolderPath + OPTIMIZED_CLASSES_EXCLUSION_FILENAME); s = new Scanner(omniFile.getStdFile()); while (s.hasNext()) { String excludeClass = s.next(); ignoredLibs.add(excludeClass); m_progressStream.putStream("Exclude class: " + excludeClass); } } catch (Exception e) { LogUtil.logException(LogUtil.LogType.DECOMPILE, e); } if (s != null) s.close(); ArrayList<OmniFile> dexFiles = new ArrayList<>(); for (String fileName : m_dexFileNames) { OmniFile dexFile = new OmniFile(m_volumeId, m_appFolderPath + fileName + ".dex"); if (dexFile.exists() && dexFile.isFile()) { dexFiles.add(dexFile);// Keep track for summary List<ClassDef> classes = new ArrayList<>(); m_progressStream.putStream("Processing: " + fileName + ".dex"); org.jf.dexlib2.iface.DexFile memoryDexFile = null; try { memoryDexFile = DexFileFactory.loadDexFile(dexFile.getStdFile(), Opcodes.forApi(19)); } catch (Exception e) { m_progressStream.putStream("The app DEX file cannot be decompiled."); LogUtil.logException(LogUtil.LogType.DECOMPILE, e); continue; } int excludedClassCount = 0; Set<? extends ClassDef> origClassSet = memoryDexFile.getClasses(); memoryDexFile = null; // Release memory for (org.jf.dexlib2.iface.ClassDef classDef : origClassSet) { final String currentClass = classDef.getType(); if (isIgnored(currentClass)) { ++excludedClassCount; m_progressStream.putStream("Excluded class: " + currentClass); } else { m_progressStream.putStream("Included class: " + currentClass); classes.add(classDef); } } origClassSet = null; // Release memory m_progressStream.putStream("Excluded classes #" + excludedClassCount); m_progressStream.putStream("Included classes #" + classes.size()); m_progressStream.putStream("Rebuilding immutable dex: " + fileName + ".dex"); if (classes.size() > 0) { DexFile optDexFile = new ImmutableDexFile(Opcodes.forApi(19), classes); classes = null; // Release memory try { if (dexFile.delete()) m_progressStream.putStream("Fat DEX file delete success: " + dexFile.getName()); else m_progressStream.putStream("Fat DEX file delete FAILED: " + dexFile.getName()); DexPool.writeTo(dexFile.getStdFile().getAbsolutePath(), optDexFile); String size = NumberFormat.getNumberInstance(Locale.US).format(dexFile.length()); m_progressStream.putStream( "Optimized DEX file created: " + dexFile.getName() + ", size: " + size); } catch (IOException e) { m_progressStream.putStream("DEX IOException, write error: " + dexFile.getName()); LogUtil.logException(LogUtil.LogType.DECOMPILE, e); } catch (Exception e) { m_progressStream.putStream("DEX Exception, write error: " + dexFile.getName()); LogUtil.logException(LogUtil.LogType.DECOMPILE, e); } optDexFile = null; // release memory } else { m_progressStream .putStream("All classes excluded, DEX file not needed: " + dexFile.getName()); m_progressStream.putStream("Deleting: " + dexFile.getName()); dexFile.delete(); } } } for (OmniFile f : dexFiles) { if (f.exists()) { String formatted_count = String.format(Locale.US, "%,d", f.length()) + " bytes"; m_progressStream.putStream("DEX optimized: " + f.getName() + ": " + formatted_count); } else { m_progressStream.putStream("DEX deleted: " + f.getName() + ", all classes excluded"); } } dexFiles = new ArrayList<>();// Release memory m_progressStream .putStream("Optimize DEX complete: " + TimeUtil.deltaTimeHrMinSec(m_optimize_dex_time)); m_progressStream.close(); m_optimize_dex_time = 0; } }, UNZIP_APK_THREAD, STACK_SIZE); m_optimizeDexThread.setPriority(Thread.MAX_PRIORITY); m_optimizeDexThread.setUncaughtExceptionHandler(uncaughtExceptionHandler); m_optimizeDexThread.start(); return new JSONObject(); }
From source file:org.kalypso.grid.ConvertAscii2Binary.java
public void doConvert(final IProgressMonitor monitor) throws CoreException { KalypsoDeegreeDebug.GRID_OPS.printf("%s", //$NON-NLS-1$ "converting ascii-grid to binary (" + m_ascbinFile.getName() + ")...\n"); //$NON-NLS-1$ //$NON-NLS-2$ final SubMonitor progress = SubMonitor.convert(monitor, Messages.getString("ConvertAscii2Binary.4"), 100); //$NON-NLS-1$ /* Convert to binary file */ InputStream bis = null;/*from w ww . jav a 2 s . co m*/ BinaryGeoGrid binaryGrid = null; try { bis = new BufferedInputStream(m_asciiFileURL.openStream()); final Scanner scanner = new Scanner(bis); // reading header data final AsciiGridReader asciiGridReader = new AsciiGridReader(scanner); final String noData = asciiGridReader.getNoDataValue(); final double cellSize = asciiGridReader.getCellSize(); m_gridDomain = asciiGridReader.getGridDomain(m_sourceCRS); final GM_Point origin = m_gridDomain.getOrigin(m_sourceCRS); final int sizeX = m_gridDomain.getNumColumns(); final int sizeY = m_gridDomain.getNumRows(); final Coordinate coordOrigin = JTSAdapter.export(origin.getPosition()); final Coordinate offsetX = new Coordinate(cellSize, 0); final Coordinate offsetY = new Coordinate(0, -cellSize); progress.setWorkRemaining(sizeY + 2); /* Write header */ binaryGrid = BinaryGeoGrid.createGrid(m_ascbinFile, sizeX, sizeY, m_scale, coordOrigin, offsetX, offsetY, m_sourceCRS, false); ProgressUtilities.worked(progress, 1); /* The current filename - */ for (int y = 0; y < sizeY; y++) { if (y % 10 == 0) progress.subTask(String.format("%d / %d Zeilen", y, sizeY)); for (int x = 0; x < sizeX; x++) { final String next = scanner.next(); // do not use 'nextDouble' it is much too slow if (next.equals(noData)) binaryGrid.setValue(x, y, Double.NaN); else { final double currentValue = NumberUtils.parseQuietDouble(next); binaryGrid.setValue(x, y, currentValue); } } ProgressUtilities.worked(progress, 1); } bis.close(); binaryGrid.dispose(); ProgressUtilities.worked(monitor, 1); KalypsoDeegreeDebug.GRID_OPS.printf("%s", "converting ascii-grid to binary... done.\n"); //$NON-NLS-1$ //$NON-NLS-2$ } catch (final CoreException ce) { throw ce; } catch (final Exception e) { e.printStackTrace(); KalypsoDeegreeDebug.GRID_OPS.printf("%s", "converting ascii-grid to binary... failed.\n"); //$NON-NLS-1$ //$NON-NLS-2$ final String message = String.format("Failed to convert grid %s to %s:\n%s", m_asciiFileURL, m_ascbinFile, e.toString()); throw new CoreException(new Status(IStatus.ERROR, KalypsoDeegreePlugin.getID(), message, e)); } finally { if (binaryGrid != null) binaryGrid.dispose(); IOUtils.closeQuietly(bis); } }
From source file:org.cohorte.ecf.provider.jabsorb.host.JabsorbHttpSession.java
/** * Sends a POST request to the session URL with the given content * //from w w w . ja va2s.c om * @param aRequestContent * Request content * @return The result page * @throws ClientError * Something wrong happened */ protected String getUrlPostResult(final byte[] aRequestContent) { // Open a connection HttpURLConnection httpConnection = null; Scanner scanner = null; try { // Open the connection and cast it final URLConnection connection = pUrl.openConnection(); if (!(connection instanceof HttpURLConnection)) { throw new ClientError("Unknown URL connection for : " + pUrl); } httpConnection = (HttpURLConnection) connection; // Make the connection writable (POST) httpConnection.setRequestMethod("POST"); httpConnection.setDoOutput(true); // Set up the headers httpConnection.addRequestProperty("Content-Type", JSON_CONTENT_TYPE); httpConnection.addRequestProperty("Content-Length", Integer.toString(aRequestContent.length)); // Set POST data httpConnection.getOutputStream().write(aRequestContent); // Wait for an answer final int responseCode = httpConnection.getResponseCode(); if (responseCode != HttpURLConnection.HTTP_OK) { throw new ClientError("Got HTTP Status " + responseCode + " for URL " + pUrl); } // Use a scanner to read the response content See here for more // information: // http://weblogs.java.net/blog/pat/archive/2004/10/stupid_scanner_1.html scanner = new Scanner(connection.getInputStream()); scanner.useDelimiter("\\A"); return scanner.next(); } catch (final IOException e) { // Convert error class throw new ClientError(e); } finally { // In any case, free the connection if (httpConnection != null) { httpConnection.disconnect(); } // ... and close the scanner if (scanner != null) { scanner.close(); } } }
From source file:edu.harvard.iq.dataverse.dataaccess.TabularSubsetGenerator.java
public void subsetFile(InputStream in, String outfile, List<Integer> columns, Long numCases, String delimiter) { try {/*from w w w. j a v a 2 s . c o m*/ Scanner scanner = new Scanner(in); scanner.useDelimiter("\\n"); BufferedWriter out = new BufferedWriter(new FileWriter(outfile)); for (long caseIndex = 0; caseIndex < numCases; caseIndex++) { if (scanner.hasNext()) { String[] line = (scanner.next()).split(delimiter, -1); List<String> ln = new ArrayList<String>(); for (Integer i : columns) { ln.add(line[i]); } out.write(StringUtils.join(ln, "\t") + "\n"); } else { throw new RuntimeException("Tab file has fewer rows than the determined number of cases."); } } while (scanner.hasNext()) { if (!"".equals(scanner.next())) { throw new RuntimeException( "Tab file has extra nonempty rows than the determined number of cases."); } } scanner.close(); out.close(); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } }
From source file:edu.harvard.iq.dataverse.dataaccess.TabularSubsetGenerator.java
private File generateRotatedImage(File tabfile, int varcount, int casecount) throws IOException { // TODO: throw exceptions if bad file, zero varcount, etc. ... String fileName = tabfile.getAbsolutePath(); String rotatedImageFileName = fileName + ".90d"; int MAX_OUTPUT_STREAMS = 32; int MAX_BUFFERED_BYTES = 10 * 1024 * 1024; // 10 MB - for now? int MAX_COLUMN_BUFFER = 8 * 1024; // offsetHeader will contain the byte offsets of the individual column // vectors in the final rotated image file byte[] offsetHeader = new byte[varcount * 8]; int[] bufferedSizes = new int[varcount]; long[] cachedfileSizes = new long[varcount]; File[] columnTempFiles = new File[varcount]; for (int i = 0; i < varcount; i++) { bufferedSizes[i] = 0;//from w w w . jav a 2 s .c o m cachedfileSizes[i] = 0; } // TODO: adjust MAX_COLUMN_BUFFER here, so that the total size is // no more than MAX_BUFFERED_BYTES (but no less than 1024 maybe?) byte[][] bufferedColumns = new byte[varcount][MAX_COLUMN_BUFFER]; // read the tab-delimited file: FileInputStream tabfileStream = new FileInputStream(tabfile); Scanner scanner = new Scanner(tabfileStream); scanner.useDelimiter("\\n"); for (int caseindex = 0; caseindex < casecount; caseindex++) { if (scanner.hasNext()) { String[] line = (scanner.next()).split("\t", -1); // TODO: throw an exception if there are fewer tab-delimited // tokens than the number of variables specified. String token = ""; int tokensize = 0; for (int varindex = 0; varindex < varcount; varindex++) { // TODO: figure out the safest way to convert strings to // bytes here. Is it going to be safer to use getBytes("UTF8")? // we are already making the assumption that the values // in the tab file are in UTF8. -- L.A. token = line[varindex] + "\n"; tokensize = token.getBytes().length; if (bufferedSizes[varindex] + tokensize > MAX_COLUMN_BUFFER) { // fill the buffer and dump its contents into the temp file: // (do note that there may be *several* MAX_COLUMN_BUFFERs // worth of bytes in the token!) int tokenoffset = 0; if (bufferedSizes[varindex] != MAX_COLUMN_BUFFER) { tokenoffset = MAX_COLUMN_BUFFER - bufferedSizes[varindex]; System.arraycopy(token.getBytes(), 0, bufferedColumns[varindex], bufferedSizes[varindex], tokenoffset); } // (otherwise the buffer is already full, and we should // simply dump it into the temp file, without adding any // extra bytes to it) File bufferTempFile = columnTempFiles[varindex]; if (bufferTempFile == null) { bufferTempFile = File.createTempFile("columnBufferFile", "bytes"); columnTempFiles[varindex] = bufferTempFile; } // *append* the contents of the buffer to the end of the // temp file, if already exists: BufferedOutputStream outputStream = new BufferedOutputStream( new FileOutputStream(bufferTempFile, true)); outputStream.write(bufferedColumns[varindex], 0, MAX_COLUMN_BUFFER); cachedfileSizes[varindex] += MAX_COLUMN_BUFFER; // keep writing MAX_COLUMN_BUFFER-size chunks of bytes into // the temp file, for as long as there's more than MAX_COLUMN_BUFFER // bytes left in the token: while (tokensize - tokenoffset > MAX_COLUMN_BUFFER) { outputStream.write(token.getBytes(), tokenoffset, MAX_COLUMN_BUFFER); cachedfileSizes[varindex] += MAX_COLUMN_BUFFER; tokenoffset += MAX_COLUMN_BUFFER; } outputStream.close(); // buffer the remaining bytes and reset the buffered // byte counter: System.arraycopy(token.getBytes(), tokenoffset, bufferedColumns[varindex], 0, tokensize - tokenoffset); bufferedSizes[varindex] = tokensize - tokenoffset; } else { // continue buffering System.arraycopy(token.getBytes(), 0, bufferedColumns[varindex], bufferedSizes[varindex], tokensize); bufferedSizes[varindex] += tokensize; } } } else { scanner.close(); throw new IOException("Tab file has fewer rows than the stored number of cases!"); } } // OK, we've created the individual byte vectors of the tab file columns; // they may be partially saved in temp files and/or in memory. // We now need to go through all these buffers and create the final // rotated image file. BufferedOutputStream finalOut = new BufferedOutputStream( new FileOutputStream(new File(rotatedImageFileName))); // but first we should create the offset header and write it out into // the final file; because it should be at the head, doh! long columnOffset = varcount * 8; // (this is the offset of the first column vector; it is equal to the // size of the offset header, i.e. varcount * 8 bytes) for (int varindex = 0; varindex < varcount; varindex++) { long totalColumnBytes = cachedfileSizes[varindex] + bufferedSizes[varindex]; columnOffset += totalColumnBytes; //totalColumnBytes; byte[] columnOffsetByteArray = ByteBuffer.allocate(8).putLong(columnOffset).array(); System.arraycopy(columnOffsetByteArray, 0, offsetHeader, varindex * 8, 8); } finalOut.write(offsetHeader, 0, varcount * 8); for (int varindex = 0; varindex < varcount; varindex++) { long cachedBytesRead = 0; // check if there is a cached temp file: File cachedTempFile = columnTempFiles[varindex]; if (cachedTempFile != null) { byte[] cachedBytes = new byte[MAX_COLUMN_BUFFER]; BufferedInputStream cachedIn = new BufferedInputStream(new FileInputStream(cachedTempFile)); int readlen = 0; while ((readlen = cachedIn.read(cachedBytes)) > -1) { finalOut.write(cachedBytes, 0, readlen); cachedBytesRead += readlen; } cachedIn.close(); // delete the temp file: cachedTempFile.delete(); } if (cachedBytesRead != cachedfileSizes[varindex]) { finalOut.close(); throw new IOException("Could not read the correct number of bytes cached for column " + varindex + "; " + cachedfileSizes[varindex] + " bytes expected, " + cachedBytesRead + " read."); } // then check if there are any bytes buffered for this column: if (bufferedSizes[varindex] > 0) { finalOut.write(bufferedColumns[varindex], 0, bufferedSizes[varindex]); } } finalOut.close(); return new File(rotatedImageFileName); }
From source file:org.apache.openaz.xacml.rest.XACMLPapServlet.java
/** * Requests from the Admin Console to create new items or update existing ones * * @param request//from w w w . j av a 2 s . com * @param response * @param groupId * @throws ServletException * @throws java.io.IOException */ private void doACPut(HttpServletRequest request, HttpServletResponse response, String groupId) throws ServletException, IOException { try { // for PUT operations the group may or may not need to exist before the operation can be done PDPGroup group = papEngine.getGroup(groupId); // determine the operation needed based on the parameters in the request // for remaining operations the group must exist before the operation can be done if (group == null) { logger.error("Unknown groupId '" + groupId + "'"); response.sendError(HttpServletResponse.SC_NOT_FOUND, "Unknown groupId '" + groupId + "'"); return; } if (request.getParameter("policy") != null) { // group=<groupId> policy=<policyId> contents=policy file <= Create new policy file in group // dir, or replace it if it already exists (do not touch properties) // TODO - currently this is done by the AC, but it should be done here by getting the policy // file out of the contents and saving to disk logger.error("PARTIALLY IMPLEMENTED!!! ACTUAL CHANGES SHOULD BE MADE BY PAP SERVLET!!! "); response.setStatus(HttpServletResponse.SC_NO_CONTENT); return; } else if (request.getParameter("pdpId") != null) { // ARGS: group=<groupId> pdpId=<pdpId/URL> <= create a new PDP or Update an Existing one String pdpId = request.getParameter("pdpId"); // get the request content into a String String json = null; // read the inputStream into a buffer (trick found online scans entire input looking for // end-of-file) Scanner scanner = new Scanner(request.getInputStream()); scanner.useDelimiter("\\A"); json = scanner.hasNext() ? scanner.next() : ""; scanner.close(); logger.info("JSON request from AC: " + json); // convert Object sent as JSON into local object ObjectMapper mapper = new ObjectMapper(); Object objectFromJSON = mapper.readValue(json, StdPDP.class); if (pdpId == null || objectFromJSON == null || !(objectFromJSON instanceof StdPDP) || ((StdPDP) objectFromJSON).getId() == null || !((StdPDP) objectFromJSON).getId().equals(pdpId)) { logger.error( "PDP new/update had bad input. pdpId=" + pdpId + " objectFromJSON=" + objectFromJSON); response.sendError(500, "Bad input, pdpid=" + pdpId + " object=" + objectFromJSON); } StdPDP pdp = (StdPDP) objectFromJSON; if (papEngine.getPDP(pdpId) == null) { // this is a request to create a new PDP object papEngine.newPDP(pdp.getId(), group, pdp.getName(), pdp.getDescription()); } else { // this is a request to update the pdp papEngine.updatePDP(pdp); } response.setStatus(HttpServletResponse.SC_NO_CONTENT); if (logger.isDebugEnabled()) { logger.debug("PDP '" + pdpId + "' created/updated"); } // adjust the group's state including the new PDP ((StdPDPGroup) group).resetStatus(); // tell the Admin Consoles there is a change notifyAC(); // this might affect the PDP, so notify it of the change pdpChanged(pdp); return; } else if (request.getParameter("pipId") != null) { // group=<groupId> pipId=<pipEngineId> contents=pip properties <= add a PIP to pip config, or // replace it if it already exists (lenient operation) // TODO logger.error("UNIMPLEMENTED "); response.sendError(HttpServletResponse.SC_BAD_REQUEST, "UNIMPLEMENTED"); return; } else { // Assume that this is an update of an existing PDP Group // ARGS: group=<groupId> <= Update an Existing Group // get the request content into a String String json = null; // read the inputStream into a buffer (trick found online scans entire input looking for // end-of-file) Scanner scanner = new Scanner(request.getInputStream()); scanner.useDelimiter("\\A"); json = scanner.hasNext() ? scanner.next() : ""; scanner.close(); logger.info("JSON request from AC: " + json); // convert Object sent as JSON into local object ObjectMapper mapper = new ObjectMapper(); Object objectFromJSON = mapper.readValue(json, StdPDPGroup.class); if (objectFromJSON == null || !(objectFromJSON instanceof StdPDPGroup) || !((StdPDPGroup) objectFromJSON).getId().equals(group.getId())) { logger.error("Group update had bad input. id=" + group.getId() + " objectFromJSON=" + objectFromJSON); response.sendError(500, "Bad input, id=" + group.getId() + " object=" + objectFromJSON); } // The Path on the PAP side is not carried on the RESTful interface with the AC // (because it is local to the PAP) // so we need to fill that in before submitting the group for update ((StdPDPGroup) objectFromJSON).setDirectory(((StdPDPGroup) group).getDirectory()); papEngine.updateGroup((StdPDPGroup) objectFromJSON); response.setStatus(HttpServletResponse.SC_NO_CONTENT); if (logger.isDebugEnabled()) { logger.debug("Group '" + group.getId() + "' updated"); } // tell the Admin Consoles there is a change notifyAC(); // Group changed, which might include changing the policies groupChanged(group); return; } } catch (PAPException e) { logger.error("AC PUT exception: " + e, e); response.sendError(500, e.getMessage()); return; } }
From source file:org.yccheok.jstock.gui.Utils.java
public static String downloadAsString(String location) { final Utils.InputStreamAndMethod inputStreamAndMethod = Utils .getResponseBodyAsStreamBasedOnProxyAuthOption(location); if (inputStreamAndMethod.inputStream == null) { inputStreamAndMethod.method.releaseConnection(); return null; }/* ww w . j a v a 2s . co m*/ try { java.util.Scanner s = new java.util.Scanner(inputStreamAndMethod.inputStream, "UTF-8") .useDelimiter("\\A"); return s.hasNext() ? s.next() : null; } finally { org.yccheok.jstock.file.Utils.close(inputStreamAndMethod.inputStream); inputStreamAndMethod.method.releaseConnection(); } }
From source file:com.groupon.odo.HttpUtilities.java
/** * Sets up the given {@link org.apache.commons.httpclient.methods.PostMethod} to send the same standard POST data * as was sent in the given {@link HttpServletRequest} * * @param methodProxyRequest The {@link org.apache.commons.httpclient.methods.PostMethod} that we are configuring to send a * standard POST request * @param httpServletRequest The {@link HttpServletRequest} that contains the POST data to * be sent via the {@link org.apache.commons.httpclient.methods.PostMethod} * @param history The {@link com.groupon.odo.proxylib.models.History} log for this request *///from w w w .j ava 2 s.c om @SuppressWarnings("unchecked") public static void handleStandardPost(EntityEnclosingMethod methodProxyRequest, HttpServletRequest httpServletRequest, History history) throws Exception { String deserialisedMessages = ""; byte[] requestByteArray = null; // Create a new StringBuffer with the data to be passed StringBuilder requestBody = new StringBuilder(); InputStream body = httpServletRequest.getInputStream(); java.util.Scanner s = new java.util.Scanner(body).useDelimiter("\\A"); if (httpServletRequest.getContentType() != null && httpServletRequest.getContentType().contains(STRING_CONTENT_TYPE_FORM_URLENCODED)) { // Get the client POST data as a Map if content type is: application/x-www-form-urlencoded // We do this manually since some data is not properly parseable by the servlet request Map<String, String[]> mapPostParameters = HttpUtilities.mapUrlEncodedParameters(httpServletRequest); // Iterate the parameter names for (String stringParameterName : mapPostParameters.keySet()) { // Iterate the values for each parameter name String[] stringArrayParameterValues = mapPostParameters.get(stringParameterName); for (String stringParameterValue : stringArrayParameterValues) { // Create a NameValuePair and store in list // add an & if there is already data if (requestBody.length() > 0) { requestBody.append("&"); } requestBody.append(stringParameterName); // not everything has a value so lets check if (stringParameterValue.length() > 0) { requestBody.append("="); requestBody.append(stringParameterValue); } } } } else if (httpServletRequest.getContentType() != null && httpServletRequest.getContentType().contains(STRING_CONTENT_TYPE_MESSAGEPACK)) { /** * Convert input stream to bytes for it to be read by the deserializer * Unpack and iterate the list to see the contents */ MessagePack msgpack = new MessagePack(); requestByteArray = IOUtils.toByteArray(body); ByteArrayInputStream byteArrayIS = new ByteArrayInputStream(requestByteArray); Unpacker unpacker = msgpack.createUnpacker(byteArrayIS); for (Value message : unpacker) { deserialisedMessages += message; deserialisedMessages += "\n"; } } else { // just set the request body to the POST body if (s.hasNext()) { requestBody.append(s.next()); } } // Set the proxy request data StringRequestEntity stringEntity = new StringRequestEntity(requestBody.toString(), null, null); // set post body in history object history.setRequestPostData(requestBody.toString()); // set post body in proxy request object methodProxyRequest.setRequestEntity(stringEntity); /** * Set the history to have decoded messagepack. Pass the byte data back to request */ if (httpServletRequest.getContentType() != null && httpServletRequest.getContentType().contains(STRING_CONTENT_TYPE_MESSAGEPACK)) { history.setRequestPostData(deserialisedMessages); ByteArrayRequestEntity byteRequestEntity = new ByteArrayRequestEntity(requestByteArray); methodProxyRequest.setRequestEntity(byteRequestEntity); } }
From source file:com.marklogic.client.functionaltest.BasicJavaClientREST.java
/** * Convert file to string. Used on FileHandle * @param fileRead//from w ww .j a v a2 s . c o m * @return * @throws FileNotFoundException */ public String convertFileToString(File fileRead) throws FileNotFoundException { Scanner scanner = new Scanner(fileRead).useDelimiter("\\Z"); String readContent = scanner.next(); scanner.close(); return readContent; }
From source file:com.marklogic.client.functionaltest.BasicJavaClientREST.java
/** * Write document using StringHandle with metadata * @param client//from ww w . j a v a 2 s. com * @param filename * @param uri * @param metadataHandle * @param type * @throws IOException */ public void writeDocumentUsingStringHandle(DatabaseClient client, String filename, String uri, DocumentMetadataHandle metadataHandle, String type) throws IOException { // acquire the content File file = new File("src/test/java/com/marklogic/client/functionaltest/data/" + filename); FileInputStream fis = new FileInputStream(file); Scanner scanner = new Scanner(fis).useDelimiter("\\Z"); String readContent = scanner.next(); fis.close(); scanner.close(); // create doc manager DocumentManager docMgr = null; docMgr = documentManagerSelector(client, docMgr, type); String docId = uri + filename; // create handle StringHandle contentHandle = new StringHandle(); contentHandle.set(readContent); // write the doc docMgr.write(docId, metadataHandle, contentHandle); System.out.println("Write " + docId + " to the database"); }