List of usage examples for java.util Deque peek
E peek();
From source file:org.alfresco.repo.content.transform.TransformerDebug.java
private void push(String transformerName, String fromUrl, String sourceMimetype, String targetMimetype, long sourceSize, TransformationOptions options, Call callType) { Deque<Frame> ourStack = ThreadInfo.getStack(); Frame frame = ourStack.peek(); if (callType == Call.TRANSFORM && frame != null && frame.callType == Call.AVAILABLE) { frame.setTransformerName(transformerName); frame.setSourceSize(sourceSize); frame.callType = Call.AVAILABLE_AND_TRANSFORM; }//from w w w. ja v a 2 s . com // Create a new frame. Logging level is set to trace if the file size is 0 boolean origDebugOutput = ThreadInfo.setDebugOutput(ThreadInfo.getDebugOutput() && sourceSize != 0); frame = new Frame(frame, transformerName, fromUrl, sourceMimetype, targetMimetype, sourceSize, options, callType, origDebugOutput); ourStack.push(frame); if (callType == Call.TRANSFORM) { // Log the basic info about this transformation logBasicDetails(frame, sourceSize, options.getUse(), transformerName, (ourStack.size() == 1)); } }
From source file:org.alfresco.repo.content.transform.TransformerDebug.java
/** * Called to identify a transformer that cannot be used during working out * available transformers.//from w w w . j a va2 s.co m */ public void unavailableTransformer(ContentTransformer transformer, String sourceMimetype, String targetMimetype, long maxSourceSizeKBytes) { if (isEnabled()) { Deque<Frame> ourStack = ThreadInfo.getStack(); Frame frame = ourStack.peek(); if (frame != null) { Deque<String> isTransformableStack = ThreadInfo.getIsTransformableStack(); String name = (!isTransformableStack.isEmpty()) ? isTransformableStack.getFirst() : getName(transformer); boolean debug = (maxSourceSizeKBytes != 0); if (frame.unavailableTransformers == null) { frame.unavailableTransformers = new TreeSet<UnavailableTransformer>(); } String priority = gePriority(transformer, sourceMimetype, targetMimetype); frame.unavailableTransformers .add(new UnavailableTransformer(name, priority, maxSourceSizeKBytes, debug)); } } }
From source file:org.alfresco.repo.content.transform.TransformerDebug.java
/** * Called once all available transformers have been identified. */// w w w .j a v a2s. c om public void availableTransformers(List<ContentTransformer> transformers, long sourceSize, TransformationOptions options, String calledFrom) { if (isEnabled()) { Deque<Frame> ourStack = ThreadInfo.getStack(); Frame frame = ourStack.peek(); boolean firstLevel = ourStack.size() == 1; // Override setDebugOutput(false) to allow debug when there are transformers but they are all unavailable // Note once turned on we don't turn it off again. if (transformers.size() == 0) { frame.setFailureReason(NO_TRANSFORMERS); if (frame.unavailableTransformers != null && frame.unavailableTransformers.size() != 0) { ThreadInfo.setDebugOutput(true); } } frame.setSourceSize(sourceSize); // Log the basic info about this transformation logBasicDetails(frame, sourceSize, options.getUse(), calledFrom + ((transformers.size() == 0) ? " NO transformers" : ""), firstLevel); // Report available and unavailable transformers char c = 'a'; int longestNameLength = getLongestTransformerNameLength(transformers, frame); for (ContentTransformer trans : transformers) { String name = getName(trans); int padName = longestNameLength - name.length() + 1; long maxSourceSizeKBytes = trans.getMaxSourceSizeKBytes(frame.sourceMimetype, frame.targetMimetype, frame.options); String size = maxSourceSizeKBytes > 0 ? "< " + fileSize(maxSourceSizeKBytes * 1024) : ""; int padSize = 10 - size.length(); String priority = gePriority(trans, frame.sourceMimetype, frame.targetMimetype); log((c == 'a' ? "**" : " ") + (c++) + ") " + priority + ' ' + name + spaces(padName) + size + spaces(padSize) + ms(trans.getTransformationTime(frame.sourceMimetype, frame.targetMimetype))); } if (frame.unavailableTransformers != null) { for (UnavailableTransformer unavailable : frame.unavailableTransformers) { int pad = longestNameLength - unavailable.name.length(); String reason = "> " + fileSize(unavailable.maxSourceSizeKBytes * 1024); if (unavailable.debug || logger.isTraceEnabled()) { log("--" + (c++) + ") " + unavailable.priority + ' ' + unavailable.name + spaces(pad + 1) + reason, unavailable.debug); } } } } }
From source file:org.alfresco.repo.content.transform.TransformerDebug.java
private void pop(Call callType, boolean suppressFinish) { Deque<Frame> ourStack = ThreadInfo.getStack(); if (!ourStack.isEmpty()) { Frame frame = ourStack.peek(); if ((frame.callType == callType) || (frame.callType == Call.AVAILABLE_AND_TRANSFORM && callType == Call.AVAILABLE)) { int size = ourStack.size(); String ms = ms(System.currentTimeMillis() - frame.start); logInfo(frame, size, ms);/*from w w w . jav a 2 s . c o m*/ boolean firstLevel = size == 1; if (!suppressFinish && (firstLevel || logger.isTraceEnabled())) { log(FINISHED_IN + ms + (frame.callType == Call.AVAILABLE ? " Transformer NOT called" : "") + (firstLevel ? "\n" : ""), firstLevel); } setDebugOutput(frame.origDebugOutput); ourStack.pop(); } } }
From source file:org.alfresco.repo.content.transform.TransformerDebug.java
/** * Log a message prefixed with the current transformation reference * and include a exception, suppressing the stack trace if repeated * as we return up the stack of transformers. * @param message/*from w ww . jav a 2 s. c o m*/ */ public void debug(String message, Throwable t) { if (isEnabled()) { // Trim messages of the form: "Failed... : \n reader:...\n writer:..." String msg = t.getMessage(); if (msg != null) { int i = msg.indexOf(": \n"); if (i != -1) { msg = msg.substring(0, i); } log(message + ' ' + msg); } else { log(message); } Deque<Frame> ourStack = ThreadInfo.getStack(); if (!ourStack.isEmpty()) { Frame frame = ourStack.peek(); frame.setFailureReason(message + ' ' + getRootCauseMessage(t)); } } }
From source file:org.apache.hadoop.hbase.tool.LoadIncrementalHFiles.java
private Map<LoadQueueItem, ByteBuffer> performBulkLoad(Admin admin, Table table, RegionLocator regionLocator, Deque<LoadQueueItem> queue, ExecutorService pool, SecureBulkLoadClient secureClient, boolean copyFile) throws IOException { int count = 0; fsDelegationToken.acquireDelegationToken(queue.peek().getFilePath().getFileSystem(getConf())); bulkToken = secureClient.prepareBulkLoad(admin.getConnection()); Pair<Multimap<ByteBuffer, LoadQueueItem>, Set<String>> pair = null; Map<LoadQueueItem, ByteBuffer> item2RegionMap = new HashMap<>(); // Assumes that region splits can happen while this occurs. while (!queue.isEmpty()) { // need to reload split keys each iteration. final Pair<byte[][], byte[][]> startEndKeys = regionLocator.getStartEndKeys(); if (count != 0) { LOG.info("Split occurred while grouping HFiles, retry attempt " + count + " with " + queue.size() + " files remaining to group or split"); }//from w w w.j av a2 s . co m int maxRetries = getConf().getInt(HConstants.BULKLOAD_MAX_RETRIES_NUMBER, 10); maxRetries = Math.max(maxRetries, startEndKeys.getFirst().length + 1); if (maxRetries != 0 && count >= maxRetries) { throw new IOException("Retry attempted " + count + " times without completing, bailing out"); } count++; // Using ByteBuffer for byte[] equality semantics pair = groupOrSplitPhase(table, pool, queue, startEndKeys); Multimap<ByteBuffer, LoadQueueItem> regionGroups = pair.getFirst(); if (!checkHFilesCountPerRegionPerFamily(regionGroups)) { // Error is logged inside checkHFilesCountPerRegionPerFamily. throw new IOException("Trying to load more than " + maxFilesPerRegionPerFamily + " hfiles to one family of one region"); } bulkLoadPhase(table, admin.getConnection(), pool, queue, regionGroups, copyFile, item2RegionMap); // NOTE: The next iteration's split / group could happen in parallel to // atomic bulkloads assuming that there are splits and no merges, and // that we can atomically pull out the groups we want to retry. } if (!queue.isEmpty()) { throw new RuntimeException( "Bulk load aborted with some files not yet loaded." + "Please check log for more details."); } return item2RegionMap; }
From source file:org.apache.hadoop.hive.ql.parse.ASTNode.java
private StringBuilder dump(StringBuilder sb) { Deque<ASTNode> stack = new ArrayDeque<ASTNode>(); stack.push(this); int tabLength = 0; while (!stack.isEmpty()) { ASTNode next = stack.peek(); if (!next.visited) { sb.append(StringUtils.repeat(" ", tabLength * 3)); sb.append(next.toString());//from ww w . j av a 2 s.com sb.append("\n"); if (next.children != null) { for (int i = next.children.size() - 1; i >= 0; i--) { stack.push((ASTNode) next.children.get(i)); } } tabLength++; next.visited = true; } else { tabLength--; next.visited = false; stack.pop(); } } return sb; }
From source file:org.apache.hadoop.hive.ql.parse.ASTNode.java
private String toStringTree(ASTNode rootNode) { Deque<ASTNode> stack = new ArrayDeque<ASTNode>(); stack.push(this); while (!stack.isEmpty()) { ASTNode next = stack.peek(); if (!next.visited) { if (next.parent != null && next.parent.getChildCount() > 1 && next != next.parent.getChild(0)) { rootNode.addtoMemoizedString(" "); }/* ww w. j a va 2 s .c o m*/ next.rootNode = rootNode; next.startIndx = rootNode.getMemoizedStringLen(); // Leaf if (next.children == null || next.children.size() == 0) { String str = next.toString(); rootNode.addtoMemoizedString( next.getType() != HiveParser.StringLiteral ? str.toLowerCase() : str); next.endIndx = rootNode.getMemoizedStringLen(); stack.pop(); continue; } if (!next.isNil()) { rootNode.addtoMemoizedString("("); String str = next.toString(); rootNode.addtoMemoizedString( (next.getType() == HiveParser.StringLiteral || null == str) ? str : str.toLowerCase()); rootNode.addtoMemoizedString(" "); } if (next.children != null) { for (int i = next.children.size() - 1; i >= 0; i--) { stack.push((ASTNode) next.children.get(i)); } } next.visited = true; } else { if (!next.isNil()) { rootNode.addtoMemoizedString(")"); } next.endIndx = rootNode.getMemoizedStringLen(); next.visited = false; stack.pop(); } } return rootNode.getMemoizedSubString(startIndx, endIndx); }
From source file:org.apache.nifi.processors.standard.util.FTPUtils.java
/** * Handles the logic required to change to the given directory RELATIVE TO THE CURRENT DIRECTORY which can include creating new directories needed. * * This will first attempt to change to the full path of the given directory outright. If that fails, then it will attempt to change from the top of the tree of the given directory all the way * down to the final leaf node of the given directory. * * @param client - the ftp client with an already active connection * @param dirPath - the path to change or create directories to * @param createDirs - if true will attempt to create any missing directories * @param processor - used solely for targeting logging output. * @throws IOException if any access problem occurs *///from ww w .j av a 2 s .c om public static void changeWorkingDirectory(final FTPClient client, final String dirPath, final boolean createDirs, final Processor processor) throws IOException { final String currentWorkingDirectory = client.printWorkingDirectory(); final File dir = new File(dirPath); logger.debug(processor + " attempting to change directory from " + currentWorkingDirectory + " to " + dir.getPath()); boolean dirExists = false; final String forwardPaths = dir.getPath().replaceAll(Matcher.quoteReplacement("\\"), Matcher.quoteReplacement("/")); //always use forward paths for long string attempt try { dirExists = client.changeWorkingDirectory(forwardPaths); if (dirExists) { logger.debug(processor + " changed working directory to '" + forwardPaths + "' from '" + currentWorkingDirectory + "'"); } else { logger.debug(processor + " could not change directory to '" + forwardPaths + "' from '" + currentWorkingDirectory + "' so trying the hard way."); } } catch (final IOException ioe) { logger.debug(processor + " could not change directory to '" + forwardPaths + "' from '" + currentWorkingDirectory + "' so trying the hard way."); } if (!dirExists) { //couldn't navigate directly...begin hard work final Deque<String> stack = new LinkedList<>(); File fakeFile = new File(dir.getPath()); do { stack.push(fakeFile.getName()); } while ((fakeFile = fakeFile.getParentFile()) != null); String dirName = null; while ((dirName = stack.peek()) != null) { stack.pop(); //find out if exists, if not make it if configured to do so or throw exception dirName = ("".equals(dirName.trim())) ? "/" : dirName; boolean exists = false; try { exists = client.changeWorkingDirectory(dirName); } catch (final IOException ioe) { exists = false; } if (!exists && createDirs) { logger.debug(processor + " creating new directory and changing to it " + dirName); client.makeDirectory(dirName); if (!(client.makeDirectory(dirName) || client.changeWorkingDirectory(dirName))) { throw new IOException( processor + " could not create and change to newly created directory " + dirName); } else { logger.debug(processor + " successfully changed working directory to " + dirName); } } else if (!exists) { throw new IOException(processor + " could not change directory to '" + dirName + "' from '" + currentWorkingDirectory + "'"); } } } }
From source file:org.apache.nifi.processors.standard.util.SFTPUtils.java
public static void changeWorkingDirectory(final ChannelSftp sftp, final String dirPath, final boolean createDirs, final Processor proc) throws IOException { final Deque<String> stack = new LinkedList<>(); File dir = new File(dirPath); String currentWorkingDirectory = null; boolean dirExists = false; final String forwardPaths = dir.getPath().replaceAll(Matcher.quoteReplacement("\\"), Matcher.quoteReplacement("/")); try {/* www. j a v a 2 s .co m*/ currentWorkingDirectory = sftp.pwd(); logger.debug(proc + " attempting to change directory from " + currentWorkingDirectory + " to " + dir.getPath()); //always use forward paths for long string attempt sftp.cd(forwardPaths); dirExists = true; logger.debug(proc + " changed working directory to '" + forwardPaths + "' from '" + currentWorkingDirectory + "'"); } catch (final SftpException sftpe) { logger.debug(proc + " could not change directory to '" + forwardPaths + "' from '" + currentWorkingDirectory + "' so trying the hard way."); } if (dirExists) { return; } if (!createDirs) { throw new IOException("Unable to change to requested working directory \'" + forwardPaths + "\' but not configured to create dirs."); } do { stack.push(dir.getName()); } while ((dir = dir.getParentFile()) != null); String dirName = null; while ((dirName = stack.peek()) != null) { stack.pop(); //find out if exists, if not make it if configured to do so or throw exception dirName = ("".equals(dirName.trim())) ? "/" : dirName; try { sftp.cd(dirName); } catch (final SftpException sftpe) { logger.debug(proc + " creating new directory and changing to it " + dirName); try { sftp.mkdir(dirName); sftp.cd(dirName); } catch (final SftpException e) { throw new IOException(proc + " could not make/change directory to [" + dirName + "] [" + e.getLocalizedMessage() + "]", e); } } } }