List of usage examples for java.io IOException getClass
@HotSpotIntrinsicCandidate public final native Class<?> getClass();
From source file:pltag.corpus.TagCorpus.java
public void singleSentExtractor(String inputFilename, PennTree tree, int treeNo) { try {//from w ww .j a v a 2s. co m if (verbose) { printOutput(tree.getString()); } List<LexEntry> lexEntries = calculateLexEntries(tree); reconstructAndBuildLexicon(inputFilename, treeNo, tree, convertToElementaryStringTrees(lexEntries, inputFilename, treeNo, tree.getRandomGenerator())); } catch (IOException ex) { LogInfo.error("during conversion: " + ex.getClass().toString()); } catch (StackOverflowError e) { LogInfo.error("during conversion: " + e.getClass().toString()); } }
From source file:org.jwebsocket.plugins.mail.MailPlugInService.java
private Token archiveAttachments(WebSocketConnector aConnector, Token aToken, Token aMail, String aTargetFolder, TokenServer aServer, String aPlugInNS) { Token lRes = TokenFactory.createToken(); Integer lVolumeSize = aToken.getInteger("volumeSize", -1); String lArchiveName = aToken.getString("archiveName", "PackagedAttachments{partNo}.zip"); List<Object> lAttachments = aMail.getList("attachments"); int MIN_VOLUME_SIZE = 16348; int BUFFER_SIZE = 65536; if (lVolumeSize >= MIN_VOLUME_SIZE && null != lArchiveName && null != lAttachments && lAttachments.size() > 0) { // start to create auto splitted package archive try {/* w w w . j ava 2s .co m*/ // rar process List<String> lCmdLine = new FastList<String>(); // lCmdLine.add(System.getenv("windir") +"\\system32\\"+"tree.com"); // lCmdLine.add("/A"); lCmdLine.add(mSettings.getRarPath()); // path to rar lCmdLine.add("-y"); // Assume Yes on all queries. lCmdLine.add("-m5"); // compression level 0-5. lCmdLine.add("-ep1"); // disable path names in archive. lCmdLine.add("-v" + lVolumeSize + "b"); // Assume Yes on all queries. lCmdLine.add("a"); // add command lCmdLine.add(aTargetFolder + "test.rar"); // target archive for (Object lAttachment : lAttachments) { lCmdLine.add((String) lAttachment); // add files to compress } if (mLog.isDebugEnabled()) { mLog.debug("Executing: " + lCmdLine.toString()); } ProcessBuilder lProcessBuilder = new ProcessBuilder(lCmdLine); /* * Map<String, String> lEnvVars = lProcessBuilder.environment(); * lProcessBuilder.directory(new File(System.getenv("temp"))); * if (mLog.isDebugEnabled()) { mLog.debug("Directory : " + * System.getenv("temp")); } */ final Process lProcess = lProcessBuilder.start(); InputStream is = lProcess.getInputStream(); InputStreamReader isr = new InputStreamReader(is); BufferedReader br = new BufferedReader(isr); String line; while ((line = br.readLine()) != null) { System.out.println(line); } if (mLog.isDebugEnabled()) { mLog.debug("Rar process finished."); } List<String> lVolumeIds = new FastList<String>(); String lMask = "test*.*"; File lDir = new File(aTargetFolder); String[] lFiles = lDir.list(new WildcardFileFilter(lMask)); if (null != lFiles) { Token lEvent = TokenFactory.createToken(aPlugInNS, BaseToken.TT_EVENT); lEvent.setString("name", "volumeCreated"); for (int lIdx = 0; lIdx < lFiles.length; lIdx++) { if (mLog.isDebugEnabled()) { mLog.debug("Creating volume mail for " + lFiles[lIdx]); } Token lVolMail = TokenFactory.createToken(); String lVolId = "volume" + (lIdx + 1); lVolMail.setString("id", lVolId); lVolMail.setString("from", aMail.getString("from")); lVolMail.setString("to", aMail.getString("to")); lVolMail.setString("cc", aMail.getString("cc")); lVolMail.setString("bcc", aMail.getString("bcc")); lVolMail.setString("subject", aMail.getString("subject") + " (part " + (lIdx + 1) + ")"); lVolMail.setString("body", aMail.getString("body")); lVolMail.setBoolean("html", aMail.getBoolean("html")); List<String> lVolAtt = new FastList<String>(); lVolAtt.add(aTargetFolder + lFiles[lIdx]); lVolMail.setList("attachments", lVolAtt); lVolumeIds.add(lVolId); mMailStore.storeMail(lVolMail); lEvent.setString("id", lVolId); // send response to requester aServer.sendToken(aConnector, lEvent); } aMail.setList("volumeIds", lVolumeIds); mMailStore.storeMail(aMail); } else { // TODO: process error! } // zip process /* * String lArchiveAbsolutePath = aTargetFolder + lArchiveName; * * // Reference to the file we will be adding to the zipfile * BufferedInputStream lSource = null; * * // Reference to our zip file FileOutputStream lDest = new * FileOutputStream(lArchiveAbsolutePath); * * // Wrap our destination zipfile with a ZipOutputStream * ZipOutputStream lZipOut = new ZipOutputStream(new * BufferedOutputStream(lDest)); * * // Create a byte[] buffer that we will read data from the * source // files into and then transfer it to the zip file * byte[] lBuff = new byte[BUFFER_SIZE]; * * // Iterate over all of the files in our list for (String * lAttachment : lAttachments) { String lFilenameInArchive = * FilenameUtils.getName(lAttachment); // Get a * BufferedInputStream that we can use to read the source file * if (mLog.isDebugEnabled()) { mLog.debug("Adding " + * lAttachment + " to " + lFilenameInArchive + "..."); } * System.out.println(); FileInputStream lFileIn = new * FileInputStream(lAttachment); lSource = new * BufferedInputStream(lFileIn, BUFFER_SIZE); * * // Setup the entry in the zip file // here you can specify * the name and folder in the archive ZipEntry lEntry = new * ZipEntry(lFilenameInArchive); lZipOut.putNextEntry(lEntry); * * // Read data from the source file and write it out to the * zip file int lRead; while ((lRead = lSource.read(lBuff, 0, * BUFFER_SIZE)) != -1) { lZipOut.write(lBuff, 0, lRead); } * * // Close the source file lSource.close(); } * * // Close the zip file lZipOut.close(); */ /* * FileInputStream lFIS = new * FileInputStream(lArchiveAbsolutePath); // Read data from the * source file and write it out to the zip file int lRead; int * lPart = 0; lBuff = new byte[lVolumeSize]; while ((lRead = * lFIS.read(lBuff, 0, lVolumeSize)) != -1) { lPart++; * FileOutputStream lFOS = new * FileOutputStream(lArchiveAbsolutePath + ".part" + lPart); * lFOS.write(lBuff, 0, lRead); lFOS.close(); } lFIS.close(); */ lRes.setInteger("code", 0); } catch (IOException lEx) { lRes.setInteger("code", -1); lRes.setString("msg", lEx.getClass().getSimpleName() + ": " + lEx.getMessage()); } } return lRes; }
From source file:index.IncrementIndex.java
/** * * @param path// ww w. j a va2 s . c o m * @param storeIdPath * @param rs * @return */ public static boolean indexBuilding(String path, String storeIdPath, ResultSet rs, String classPath, String keyName) throws SQLException { try { Analyzer luceneAnalyzer = new StandardAnalyzer(); // ??ID?? boolean isEmpty = true; try { File file = new File(storeIdPath); if (!file.exists()) { file.createNewFile(); } FileReader fr = new FileReader(storeIdPath); BufferedReader br = new BufferedReader(fr); if (br.readLine() != null) { isEmpty = false; } br.close(); fr.close(); } catch (IOException e) { e.printStackTrace(); } //isEmpty=false? IndexWriter writer = new IndexWriter(path, luceneAnalyzer, isEmpty); String storeId = ""; boolean indexFlag = false; // ? Class c2 = Class.forName(classPath); // java.lang.reflect.Field[] fields = c2.getDeclaredFields(); while (rs.next()) { // list? Map map = new HashMap(); // ???? for (java.lang.reflect.Field field : fields) { // if (keyName.equals(field.getName())) { storeId = rs.getString(field.getName().toUpperCase()); } // ??? if (whetherExist(field.getName().toUpperCase(), rs)) { map.put(field.getName(), rs.getString(field.getName().toUpperCase())); } } writer.addDocument(Document(map)); indexFlag = true; } writer.optimize(); writer.close(); if (indexFlag) { // ?ID? writeStoreId(storeIdPath, storeId); } return true; } catch (Exception e) { e.printStackTrace(); System.out.println("" + e.getClass() + "\n ?: " + e.getMessage()); return false; } finally { if (null != rs) { rs.close(); } } }
From source file:org.lilyproject.avro.AvroConverter.java
public AvroIOException convert(IOException exception) { AvroIOException avroIOException = new AvroIOException(); avroIOException.setMessage$(exception.getMessage()); avroIOException.setRemoteCauses(buildCauses(exception)); avroIOException.setExceptionClass(exception.getClass().getName()); return avroIOException; }
From source file:org.apache.tez.runtime.library.common.shuffle.orderedgrouped.FetcherOrderedGrouped.java
protected InputAttemptIdentifier[] copyMapOutput(MapHost host, DataInputStream input) throws FetcherReadTimeoutException { MapOutput mapOutput = null;/*from w w w. java 2 s . c o m*/ InputAttemptIdentifier srcAttemptId = null; long decompressedLength = -1; long compressedLength = -1; try { long startTime = System.currentTimeMillis(); int forReduce = -1; //Read the shuffle header try { ShuffleHeader header = new ShuffleHeader(); // TODO Review: Multiple header reads in case of status WAIT ? header.readFields(input); if (!header.mapId.startsWith(InputAttemptIdentifier.PATH_PREFIX)) { if (!stopped) { badIdErrs.increment(1); LOG.warn("Invalid map id: " + header.mapId + ", expected to start with " + InputAttemptIdentifier.PATH_PREFIX + ", partition: " + header.forReduce); return new InputAttemptIdentifier[] { getNextRemainingAttempt() }; } else { LOG.info("Already shutdown. Ignoring invalid map id error"); return EMPTY_ATTEMPT_ID_ARRAY; } } srcAttemptId = scheduler.getIdentifierForFetchedOutput(header.mapId, header.forReduce); compressedLength = header.compressedLength; decompressedLength = header.uncompressedLength; forReduce = header.forReduce; } catch (IllegalArgumentException e) { if (!stopped) { badIdErrs.increment(1); LOG.warn("Invalid map id ", e); // Don't know which one was bad, so consider this one bad and dont read // the remaining because we dont know where to start reading from. YARN-1773 return new InputAttemptIdentifier[] { getNextRemainingAttempt() }; } else { LOG.info("Already shutdown. Ignoring invalid map id error. Exception: " + e.getClass().getName() + ", Message: " + e.getMessage()); return EMPTY_ATTEMPT_ID_ARRAY; } } // Do some basic sanity verification if (!verifySanity(compressedLength, decompressedLength, forReduce, remaining, srcAttemptId)) { if (!stopped) { if (srcAttemptId == null) { LOG.warn("Was expecting " + getNextRemainingAttempt() + " but got null"); srcAttemptId = getNextRemainingAttempt(); } assert (srcAttemptId != null); return new InputAttemptIdentifier[] { srcAttemptId }; } else { LOG.info("Already stopped. Ignoring verification failure."); return EMPTY_ATTEMPT_ID_ARRAY; } } if (LOG.isDebugEnabled()) { LOG.debug("header: " + srcAttemptId + ", len: " + compressedLength + ", decomp len: " + decompressedLength); } // Get the location for the map output - either in-memory or on-disk try { mapOutput = merger.reserve(srcAttemptId, decompressedLength, compressedLength, id); } catch (IOException e) { if (!stopped) { // Kill the reduce attempt ioErrs.increment(1); scheduler.reportLocalError(e); } else { LOG.info("Already stopped. Ignoring error from merger.reserve"); } return EMPTY_ATTEMPT_ID_ARRAY; } // Check if we can shuffle *now* ... if (mapOutput.getType() == Type.WAIT) { // TODO Review: Does this cause a tight loop ? LOG.info("fetcher#" + id + " - MergerManager returned Status.WAIT ..."); //Not an error but wait to process data. return EMPTY_ATTEMPT_ID_ARRAY; } // Go! LOG.info("fetcher#" + id + " about to shuffle output of map " + mapOutput.getAttemptIdentifier() + " decomp: " + decompressedLength + " len: " + compressedLength + " to " + mapOutput.getType()); if (mapOutput.getType() == Type.MEMORY) { ShuffleUtils.shuffleToMemory(mapOutput.getMemory(), input, (int) decompressedLength, (int) compressedLength, codec, ifileReadAhead, ifileReadAheadLength, LOG, mapOutput.getAttemptIdentifier().toString()); } else if (mapOutput.getType() == Type.DISK) { ShuffleUtils.shuffleToDisk(mapOutput.getDisk(), host.getHostIdentifier(), input, compressedLength, LOG, mapOutput.getAttemptIdentifier().toString()); } else { throw new IOException("Unknown mapOutput type while fetching shuffle data:" + mapOutput.getType()); } // Inform the shuffle scheduler long endTime = System.currentTimeMillis(); // Reset retryStartTime as map task make progress if retried before. retryStartTime = 0; scheduler.copySucceeded(srcAttemptId, host, compressedLength, decompressedLength, endTime - startTime, mapOutput); // Note successful shuffle remaining.remove(srcAttemptId); metrics.successFetch(); return null; } catch (IOException ioe) { if (stopped) { LOG.info("Not reporting fetch failure for exception during data copy: [" + ioe.getClass().getName() + ", " + ioe.getMessage() + "]"); cleanupCurrentConnection(true); if (mapOutput != null) { mapOutput.abort(); // Release resources } // Don't need to put back - since that's handled by the invoker return EMPTY_ATTEMPT_ID_ARRAY; } if (shouldRetry(host, ioe)) { //release mem/file handles if (mapOutput != null) { mapOutput.abort(); } throw new FetcherReadTimeoutException(ioe); } ioErrs.increment(1); if (srcAttemptId == null || mapOutput == null) { LOG.info("fetcher#" + id + " failed to read map header" + srcAttemptId + " decomp: " + decompressedLength + ", " + compressedLength, ioe); if (srcAttemptId == null) { return remaining.toArray(new InputAttemptIdentifier[remaining.size()]); } else { return new InputAttemptIdentifier[] { srcAttemptId }; } } LOG.warn("Failed to shuffle output of " + srcAttemptId + " from " + host.getHostIdentifier(), ioe); // Inform the shuffle-scheduler mapOutput.abort(); metrics.failedFetch(); return new InputAttemptIdentifier[] { srcAttemptId }; } }
From source file:com.maverick.ssl.SSLHandshakeProtocol.java
void processMessage(byte[] fragment, int off, int len) throws SSLException { ByteArrayInputStream reader = new ByteArrayInputStream(fragment, off, len); // Update the handshake hashes updateHandshakeHashes(fragment);/* ww w .j av a 2s.c om*/ while (reader.available() > 0 && !isComplete()) { int type = reader.read(); int length = (reader.read() & 0xFF) << 16 | (reader.read() & 0xFF) << 8 | (reader.read() & 0xFF); // #ifdef DEBUG log.debug(MessageFormat.format(Messages.getString("SSLHandshakeProtocol.processingType"), //$NON-NLS-1$ new Object[] { new Integer(type), new Long(length) })); // #endif byte[] msg = new byte[length]; try { reader.read(msg); } catch (IOException ex) { throw new SSLException(SSLException.INTERNAL_ERROR, ex.getMessage() == null ? ex.getClass().getName() : ex.getMessage()); } switch (type) { case HELLO_REQUEST_MSG: // #ifdef DEBUG log.debug(Messages.getString("SSLHandshakeProtocol.receivedHELLO")); //$NON-NLS-1$ // #endif /** * If we receive a hello request then a handshake must be * re-negotiated. But ignore it if were already performing a * handshake operation */ if (currentHandshakeStep == HANDSHAKE_PENDING_OR_COMPLETE) { startHandshake(); } break; case SERVER_HELLO_MSG: // #ifdef DEBUG log.debug(Messages.getString("SSLHandshakeProtocol.receivedServerHELLO")); //$NON-NLS-1$ // #endif if (currentHandshakeStep != CLIENT_HELLO_MSG) { throw new SSLException(SSLException.PROTOCOL_VIOLATION, MessageFormat.format( Messages.getString("SSLHandshakeProtocol.receivedUnexpectedServerHello"), //$NON-NLS-1$ new Object[] { new Integer(currentHandshakeStep) })); } onServerHelloMsg(msg); break; case CERTIFICATE_MSG: // #ifdef DEBUG log.debug(Messages.getString("SSLHandshakeProtocol.receivedServerCertificate")); //$NON-NLS-1$ // #endif if (currentHandshakeStep != SERVER_HELLO_MSG) { throw new SSLException(SSLException.PROTOCOL_VIOLATION, MessageFormat.format( Messages.getString("SSLHandshakeProtocol.unexpectedCertificateMessageReceived"), //$NON-NLS-1$ new Object[] { new Integer(currentHandshakeStep) })); } onCertificateMsg(msg); break; case KEY_EXCHANGE_MSG: // #ifdef DEBUG log.debug(Messages.getString("SSLHandshakeProtocol.receivedUnsupportedServerKEX")); //$NON-NLS-1$ // #endif throw new SSLException(SSLException.UNSUPPORTED_OPERATION, Messages.getString("SSLHandshakeProtocol.kexNotSupported")); //$NON-NLS-1$ case CERTIFICATE_REQUEST_MSG: // #ifdef DEBUG log.debug(Messages.getString("SSLHandshakeProtocol.receivedUnsupportedClientCert")); //$NON-NLS-1$ // #endif wantsClientAuth = true; break; case SERVER_HELLO_DONE_MSG: // #ifdef DEBUG log.debug(Messages.getString("SSLHandshakeProtocol.helloDone")); //$NON-NLS-1$ // #endif if (currentHandshakeStep != CERTIFICATE_MSG) { throw new SSLException(SSLException.PROTOCOL_VIOLATION, MessageFormat.format( Messages.getString("SSLHandshakeProtocol.unexpectedServerHelloDone"), //$NON-NLS-1$ new Object[] { new Integer(currentHandshakeStep) })); } if (wantsClientAuth) { // #ifdef DEBUG log.debug(Messages.getString("SSLHandshakeProtocol.sendingNoCert")); //$NON-NLS-1$ // #endif socket.sendMessage(SSLTransportImpl.ALERT_PROTOCOL, new byte[] { (byte) SSLTransportImpl.WARNING_ALERT, (byte) SSLException.NO_CERTIFICATE }); } onServerHelloDoneMsg(); break; case FINISHED_MSG: // #ifdef DEBUG log.debug(Messages.getString("SSLHandshakeProtocol.receivedServerFinished")); //$NON-NLS-1$ // #endif if (currentHandshakeStep != FINISHED_MSG) { throw new SSLException(SSLException.PROTOCOL_VIOLATION); } currentHandshakeStep = HANDSHAKE_PENDING_OR_COMPLETE; break; default: } } }
From source file:org.paxle.crawler.fs.impl.FsCrawler.java
public ICrawlerDocument request(URI location) { final ICrawlerContext ctx = this.contextLocal.getCurrentContext(); ICrawlerDocument cdoc = null;// w w w . j a v a 2 s . co m try { // creating an empty crawler-document cdoc = ctx.createDocument(); final ICommandProfile cmdProfile = ctx.getCommandProfile(); boolean omitHidden = true; boolean inclParent = false; int readMode = VAL_READ_MODE_STD; if (cmdProfile != null) { Serializable val; if ((val = cmdProfile.getProperty(PROP_VALIDATE_NOT_HIDDEN)) != null) omitHidden = ((Boolean) val).booleanValue(); if ((val = cmdProfile.getProperty(PROP_READ_MODE)) != null) readMode = ((Integer) val).intValue(); if ((val = cmdProfile.getProperty(PROP_INCLUDE_PARENT_DIR)) != null) inclParent = ((Boolean) val).booleanValue(); } ICrawlerDocument.Status status = ICrawlerDocument.Status.OK; String err = null; final File file = new File(location); if (!file.exists()) { err = "File not found"; status = ICrawlerDocument.Status.NOT_FOUND; } else if (!file.canRead()) { err = "Read permission denied"; status = ICrawlerDocument.Status.UNKNOWN_FAILURE;/* java 1.6 } else if (file.isDirectory() && !file.canExecute()) { err = "Permission to enter directory denied"; status = ICrawlerDocument.Status.UNKNOWN_FAILURE;*/ } else if (omitHidden && file.isHidden()) { err = "Hidden"; status = ICrawlerDocument.Status.UNKNOWN_FAILURE; } cdoc.setStatus(status); if (err != null) { logger.warn(String.format("Error crawling %s: %s", location, err)); cdoc.setStatusText(err); return cdoc; } cdoc.setCrawlerDate(new Date()); cdoc.setLastModDate(new Date(file.lastModified())); cdoc.setLocation(location); final ICrawlerTools crawlerTools = ctx.getCrawlerTools(); if (file.isDirectory()) { final File[] list = file.listFiles(); final Iterator<DirlistEntry> dirlistIt = new DirlistIterator(list, omitHidden); try { crawlerTools.saveListing(cdoc, dirlistIt, inclParent, list.length > 0); } catch (IOException e) { final String msg = String.format("Error saving dir-listing for '%s': %s", location, e.getMessage()); logger.error(msg, e); cdoc.setStatus(ICrawlerDocument.Status.UNKNOWN_FAILURE, msg); return cdoc; } } else { final File contentFile = generateContentFile(readMode, file, cdoc); cdoc.setContent(contentFile); } } catch (Exception e) { final String msg = String.format("Unexpected %s while crawling '%s'", e.getClass().getName(), location); logger.error(msg, e); if (cdoc != null) { cdoc.setStatus(ICrawlerDocument.Status.UNKNOWN_FAILURE, msg); } } return cdoc; }
From source file:org.apache.hadoop.hive.cli.CliDriver.java
int processLocalCmd(String cmd, CommandProcessor proc, CliSessionState ss) { int tryCount = 0; boolean needRetry; int ret = 0;//from w w w .j a va 2 s . c o m do { try { needRetry = false; if (proc != null) { if (proc instanceof Driver) { Driver qp = (Driver) proc; PrintStream out = ss.out; long start = System.currentTimeMillis(); if (ss.getIsVerbose()) { out.println(cmd); } qp.setTryCount(tryCount); ret = qp.run(cmd).getResponseCode(); if (ret != 0) { qp.close(); return ret; } // query has run capture the time long end = System.currentTimeMillis(); double timeTaken = (end - start) / 1000.0; ArrayList<String> res = new ArrayList<String>(); printHeader(qp, out); // print the results int counter = 0; try { if (out instanceof FetchConverter) { ((FetchConverter) out).fetchStarted(); } while (qp.getResults(res)) { for (String r : res) { out.println(r); } counter += res.size(); res.clear(); if (out.checkError()) { break; } } } catch (IOException e) { console.printError( "Failed with exception " + e.getClass().getName() + ":" + e.getMessage(), "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); ret = 1; } int cret = qp.close(); if (ret == 0) { ret = cret; } if (out instanceof FetchConverter) { ((FetchConverter) out).fetchFinished(); } console.printInfo("Time taken: " + timeTaken + " seconds" + (counter == 0 ? "" : ", Fetched: " + counter + " row(s)")); } else { String firstToken = tokenizeCmd(cmd.trim())[0]; String cmd_1 = getFirstCmd(cmd.trim(), firstToken.length()); if (ss.getIsVerbose()) { ss.out.println(firstToken + " " + cmd_1); } CommandProcessorResponse res = proc.run(cmd_1); if (res.getResponseCode() != 0) { ss.out.println("Query returned non-zero code: " + res.getResponseCode() + ", cause: " + res.getErrorMessage()); } if (res.getConsoleMessages() != null) { for (String consoleMsg : res.getConsoleMessages()) { console.printInfo(consoleMsg); } } ret = res.getResponseCode(); } } } catch (CommandNeedRetryException e) { console.printInfo("Retry query with a different approach..."); tryCount++; needRetry = true; } } while (needRetry); return ret; }
From source file:photosharing.api.conx.FileDefinition.java
/** * manages the thumbnail access/*from w ww .ja va2 s . c om*/ * * @param bearer * @param request * @param response */ public void getThumbnail(String bearer, HttpServletRequest request, HttpServletResponse response) { String pid = request.getParameter("pid"); String lid = request.getParameter("lid"); if (pid == null || lid == null || pid.isEmpty() || lid.isEmpty()) { logger.warning("bad parameters"); response.setStatus(HttpStatus.SC_BAD_REQUEST); } else { String apiUrl = getThumbnailApiUrl(pid, lid); Request get = Request.Get(apiUrl); get.addHeader("Authorization", "Bearer " + bearer); try { Executor exec = ExecutorUtil.getExecutor(); Response apiResponse = exec.execute(get); HttpResponse hr = apiResponse.returnResponse(); /** * Check the status codes */ int code = hr.getStatusLine().getStatusCode(); // Session is no longer valid or access token is expired if (code == HttpStatus.SC_FORBIDDEN) { response.sendRedirect("./api/logout"); } // User is not authorized else if (code == HttpStatus.SC_UNAUTHORIZED) { response.setStatus(HttpStatus.SC_UNAUTHORIZED); } // Default to SC_OK (200) else if (code == HttpStatus.SC_OK) { response.setContentType(hr.getFirstHeader("Content-Type").getValue()); response.setHeader("content-length", hr.getFirstHeader("content-length").getValue()); response.setStatus(HttpStatus.SC_OK); // Streams InputStream in = hr.getEntity().getContent(); IOUtils.copy(in, response.getOutputStream()); IOUtils.closeQuietly(in); IOUtils.closeQuietly(response.getOutputStream()); } } catch (IOException e) { response.setHeader("X-Application-Error", e.getClass().getName()); response.setStatus(HttpStatus.SC_INTERNAL_SERVER_ERROR); logger.severe("Issue with read file " + e.toString()); } } }
From source file:ezbake.services.provenance.thrift.ProvenanceServiceImpl.java
private ResultsAndContinuation fetchUDorDU(ezbake.base.thrift.EzSecurityToken securityToken, String userPrincipal, ezbake.base.thrift.DateTime startDateTime, ezbake.base.thrift.DateTime stopDateTime, int numToFetch, AccumuloContinuationPoint continuationPoint) throws TException, ezbake.base.thrift.EzSecurityTokenException { AuditEvent evt = event(AuditEventType.FileObjectAccess.getName(), securityToken).arg("event", "fetchUDorDU") .arg("userPrincipal", userPrincipal); Connector connector;//from ww w. j a va2s . c om Key startKey; Key stopKey; String startDateTimeString = String.format("%019d", convertDateTime2Millis(startDateTime)); String stopDateTimeString = String.format("%019d", convertDateTime2Millis(stopDateTime)); ResultsAndContinuation rtn = new ResultsAndContinuation(); rtn.continuationPoint = new AccumuloContinuationPoint(); rtn.continuationPoint.startAtBeginning = false; rtn.results = new ArrayList<String>(); String[] authlist = securityToken.authorizations.formalAuthorizations .toArray(new String[securityToken.authorizations.formalAuthorizations.size()]); if (numToFetch > 10000) { numToFetch = 10000; } try { connector = getAccumuloConnector(); // do a lookup in the lookup table first Scanner scanner = connector.createScanner(TABLE, new Authorizations(authlist)); if (continuationPoint.startAtBeginning) { startKey = new Key(new Text(userPrincipal), new Text(startDateTimeString)); } else { // move on to the following key startKey = new Key(new Text(continuationPoint.rowId), new Text(continuationPoint.colFam), new Text(continuationPoint.colQual)).followingKey(PartialKey.ROW_COLFAM_COLQUAL); } stopKey = new Key(new Text(userPrincipal), new Text(stopDateTimeString)); scanner.setRange(new Range(startKey, stopKey)); Iterator<Map.Entry<Key, Value>> lookupIter = scanner.iterator(); int i = 0; Map.Entry<Key, Value> current; while (lookupIter.hasNext() && (i < numToFetch)) { current = lookupIter.next(); rtn.results.add(current.getValue().toString()); rtn.continuationPoint.rowId = current.getKey().getRow().toString(); rtn.continuationPoint.colFam = current.getKey().getColumnFamily().toString(); rtn.continuationPoint.colQual = current.getKey().getColumnQualifier().toString(); i++; } } catch (IOException e) { evt.failed(); evt.arg(e.getClass().getName(), e); logger.error("Unexpected IOException thrown getting accumulo connector", e); throw new RegistrationException(e.getMessage()); } catch (TableNotFoundException e) { evt.failed(); evt.arg(e.getClass().getName(), e); logger.error("Registrations table does not exist", e); throw new RegistrationException(e.getMessage()); } catch (Exception e) { evt.failed(); evt.arg(e.getClass().getName(), e); throw e; } finally { auditLogger.logEvent(evt); } return rtn; }