List of usage examples for java.util Queue offer
boolean offer(E e);
From source file:it.scoppelletti.mobilepower.app.FragmentLayoutController.java
/** * Ripristina lo stato dell’istanza. * /*ww w . j a va2s.com*/ * <P>L’attività ripristina lo stato dell’istanza * {@code FragmentLayoutController} all’interno del proprio metodo * {@code onRestoreInstanceState}.</P> * * @param savedInstanceState Stato dell’istanza. * @param fragmentCollector Collettore dei frammenti di dettaglio. */ public void onRestoreInstanceState(Bundle savedInstanceState, FragmentLayoutController.FragmentCollector fragmentCollector) { int n, oldPanelCount, tnId; String tag; ActivitySupport activitySupport; FragmentSupport fragment; FragmentManager fragmentMgr; FragmentLayoutController.BackStackChangedListener backStackListener; Queue<FragmentSupport> fragmentQueue; Queue<FragmentLayoutController.FragmentEntry> clonedQueue; if (savedInstanceState == null) { throw new NullPointerException("Argument savedInstanceState is null."); } if (fragmentCollector == null) { throw new NullPointerException("Argument fragmentCollector is null."); } if (!(myActivity instanceof ActivitySupport)) { myLogger.warn("Activity not implement interface ActivitySupport."); return; } oldPanelCount = savedInstanceState.getInt(FragmentLayoutController.STATE_PANELCOUNT, 0); if (oldPanelCount < 1) { myLogger.warn("Unexpected {}={} in saved instance state.", FragmentLayoutController.STATE_PANELCOUNT, oldPanelCount); return; } myLogger.debug("{}: current={}, saved instance state={}.", new Object[] { FragmentLayoutController.STATE_PANELCOUNT, myFrameCount, oldPanelCount }); if (oldPanelCount == myFrameCount) { // Il numero di pannelli non e' cambiato: // Il sistema ha gia' ripristinato correttamente i frammenti. return; } fragmentQueue = new ArrayDeque<FragmentSupport>(); fragmentCollector.collectFragments(fragmentQueue); // Ad ogni frammento associo il tag con il quale è stato // inserito clonedQueue = new ArrayDeque<FragmentLayoutController.FragmentEntry>(); while (!fragmentQueue.isEmpty()) { fragment = fragmentQueue.remove(); if (fragment == null) { myLogger.warn("Ignoring null."); continue; } tag = fragment.asFragment().getTag(); if (StringUtils.isBlank(tag)) { myLogger.warn("Ignoring fragment with empty tag."); continue; } clonedQueue.offer(new FragmentLayoutController.FragmentEntry(fragment.cloneFragment(), tag)); } fragmentQueue = null; // free memory activitySupport = (ActivitySupport) myActivity; fragmentMgr = activitySupport.getSupportFragmentManager(); // Ripristino la configurazione dei frammenti iniziale for (n = fragmentMgr.getBackStackEntryCount(); n > 0; n--) { fragmentMgr.popBackStack(); } if (myFrameCount > 1) { tnId = arrangeFragments(fragmentMgr, clonedQueue); } else { tnId = arrangePanel(fragmentMgr, clonedQueue); } if (Build.VERSION.SDK_INT < BuildCompat.VERSION_CODES.HONEYCOMB) { return; } // - Android 4.1.2 // La barra delle azioni non e' correttamente aggiornata forse perche' // si assume che non ce ne sia bisogno con transazioni schedulate // durante il ripristino dell'attivita' (o magari perche' non e' proprio // previsto che si schedulino transazioni durante il ripristino // dell'attivita'): // Visto che l'esecuzione delle transazioni e' asincrona, devo // utilizzare un gestore degli eventi di modifica del back stack che // gestisca l’ultima transazione che ho schedulato. backStackListener = new FragmentLayoutController.BackStackChangedListener(myActivity, fragmentMgr, tnId); fragmentMgr.addOnBackStackChangedListener(backStackListener); }
From source file:org.neo4j.io.pagecache.PageCacheTest.java
@SafeVarargs private static <E> Queue<E> queue(E... items) { Queue<E> queue = new ConcurrentLinkedQueue<>(); for (E item : items) { queue.offer(item); }//from w w w .j a va 2s .c om return queue; }
From source file:org.zkoss.ganttz.data.GanttDiagramGraph.java
List<Recalculation> getRecalculationsNeededFrom(V task) { List<Recalculation> result = new ArrayList<>(); Set<Recalculation> parentRecalculationsAlreadyDone = new HashSet<>(); Recalculation first = recalculationFor(allPointsPotentiallyModified(task)); first.couldHaveBeenModifiedBeforehand(); result.addAll(getParentsRecalculations(parentRecalculationsAlreadyDone, first.taskPoint)); result.add(first);//from ww w. ja v a 2s. co m Queue<Recalculation> pendingOfVisit = new LinkedList<>(); pendingOfVisit.offer(first); Map<Recalculation, Recalculation> alreadyVisited = new HashMap<>(); alreadyVisited.put(first, first); while (!pendingOfVisit.isEmpty()) { Recalculation current = pendingOfVisit.poll(); for (TaskPoint each : current.taskPoint.getImmediateSuccessors()) { if (each.isImmediatelyDerivedFrom(current.taskPoint)) { continue; } Recalculation recalculationToAdd = getRecalcualtionToAdd(each, alreadyVisited); recalculationToAdd.comesFromPredecessor(current); if (!alreadyVisited.containsKey(recalculationToAdd)) { result.addAll(getParentsRecalculations(parentRecalculationsAlreadyDone, each)); result.add(recalculationToAdd); pendingOfVisit.offer(recalculationToAdd); alreadyVisited.put(recalculationToAdd, recalculationToAdd); } } } return topologicalSorter.sort(result); }
From source file:hudson.plugins.emailext.plugins.content.BuildLogRegexContent.java
String getContent(BufferedReader reader) throws IOException { final boolean asHtml = matchedLineHtmlStyle != null; escapeHtml = asHtml || escapeHtml;//from w ww. j a va 2 s . c o m final Pattern pattern = Pattern.compile(regex); final StringBuffer buffer = new StringBuffer(); int numLinesTruncated = 0; int numMatches = 0; int numLinesStillNeeded = 0; boolean insidePre = false; Queue<String> linesBeforeList = new LinkedList<String>(); String line = null; while ((line = reader.readLine()) != null) { // Remove console notes (JENKINS-7402) line = ConsoleNote.removeNotes(line); // Remove any lines before that are no longer needed. while (linesBeforeList.size() > linesBefore) { linesBeforeList.remove(); ++numLinesTruncated; } final Matcher matcher = pattern.matcher(line); final StringBuffer sb = new StringBuffer(); boolean matched = false; while (matcher.find()) { matched = true; if (substText != null) { matcher.appendReplacement(sb, substText); } else { break; } } if (matched) { // The current line matches. if (showTruncatedLines == true && numLinesTruncated > 0) { // Append information about truncated lines. insidePre = stopPre(buffer, insidePre); appendLinesTruncated(buffer, numLinesTruncated, asHtml); numLinesTruncated = 0; } if (asHtml) { insidePre = startPre(buffer, insidePre); } while (!linesBeforeList.isEmpty()) { appendContextLine(buffer, linesBeforeList.remove(), escapeHtml); } // Append the (possibly transformed) current line. if (substText != null) { matcher.appendTail(sb); line = sb.toString(); } appendMatchedLine(buffer, line, escapeHtml, matchedLineHtmlStyle, addNewline); ++numMatches; // Set up to add numLinesStillNeeded numLinesStillNeeded = linesAfter; } else { // The current line did not match. if (numLinesStillNeeded > 0) { // Append this line as a line after. appendContextLine(buffer, line, escapeHtml); --numLinesStillNeeded; } else { // Store this line as a possible line before. linesBeforeList.offer(line); } } if (maxMatches != 0 && numMatches >= maxMatches && numLinesStillNeeded == 0) { break; } } if (showTruncatedLines == true) { // Count the rest of the lines. // Include any lines in linesBefore. while (linesBeforeList.size() > 0) { linesBeforeList.remove(); ++numLinesTruncated; } if (line != null) { // Include the rest of the lines that haven't been read in. while ((line = reader.readLine()) != null) { ++numLinesTruncated; } } if (numLinesTruncated > 0) { insidePre = stopPre(buffer, insidePre); appendLinesTruncated(buffer, numLinesTruncated, asHtml); } } insidePre = stopPre(buffer, insidePre); if (buffer.length() == 0) { return defaultValue; } return buffer.toString(); }
From source file:org.apereo.portal.io.xml.JaxbPortalDataHandlerService.java
@Override public void importDataDirectory(File directory, String pattern, final BatchImportOptions options) { if (!directory.exists()) { throw new IllegalArgumentException("The specified directory '" + directory + "' does not exist"); }/*w w w. j a va 2s . c om*/ //Create the file filter to use when searching for files to import final FileFilter fileFilter; if (pattern != null) { fileFilter = new AntPatternFileFilter(true, false, pattern, this.dataFileExcludes); } else { fileFilter = new AntPatternFileFilter(true, false, this.dataFileIncludes, this.dataFileExcludes); } //Determine the parent directory to log to final File logDirectory = determineLogDirectory(options, "import"); //Setup reporting file final File importReport = new File(logDirectory, "data-import.txt"); final PrintWriter reportWriter; try { reportWriter = new PrintWriter(new PeriodicFlushingBufferedWriter(500, new FileWriter(importReport))); } catch (IOException e) { throw new RuntimeException("Failed to create FileWriter for: " + importReport, e); } //Convert directory to URI String to provide better logging output final URI directoryUri = directory.toURI(); final String directoryUriStr = directoryUri.toString(); IMPORT_BASE_DIR.set(directoryUriStr); try { //Scan the specified directory for files to import logger.info("Scanning for files to Import from: {}", directory); final PortalDataKeyFileProcessor fileProcessor = new PortalDataKeyFileProcessor(this.dataKeyTypes, options); this.directoryScanner.scanDirectoryNoResults(directory, fileFilter, fileProcessor); final long resourceCount = fileProcessor.getResourceCount(); logger.info("Found {} files to Import from: {}", resourceCount, directory); //See if the import should fail on error final boolean failOnError = options != null ? options.isFailOnError() : true; //Map of files to import, grouped by type final ConcurrentMap<PortalDataKey, Queue<Resource>> dataToImport = fileProcessor.getDataToImport(); //Import the data files for (final PortalDataKey portalDataKey : this.dataKeyImportOrder) { final Queue<Resource> files = dataToImport.remove(portalDataKey); if (files == null) { continue; } final Queue<ImportFuture<?>> importFutures = new LinkedList<ImportFuture<?>>(); final List<FutureHolder<?>> failedFutures = new LinkedList<FutureHolder<?>>(); final int fileCount = files.size(); logger.info("Importing {} files of type {}", fileCount, portalDataKey); reportWriter.println(portalDataKey + "," + fileCount); while (!files.isEmpty()) { final Resource file = files.poll(); //Check for completed futures on every iteration, needed to fail as fast as possible on an import exception final List<FutureHolder<?>> newFailed = waitForFutures(importFutures, reportWriter, logDirectory, false); failedFutures.addAll(newFailed); final AtomicLong importTime = new AtomicLong(-1); //Create import task final Callable<Object> task = new CallableWithoutResult() { @Override protected void callWithoutResult() { IMPORT_BASE_DIR.set(directoryUriStr); importTime.set(System.nanoTime()); try { importData(file, portalDataKey); } finally { importTime.set(System.nanoTime() - importTime.get()); IMPORT_BASE_DIR.remove(); } } }; //Submit the import task final Future<?> importFuture = this.importExportThreadPool.submit(task); //Add the future for tracking importFutures.offer(new ImportFuture(importFuture, file, portalDataKey, importTime)); } //Wait for all of the imports on of this type to complete final List<FutureHolder<?>> newFailed = waitForFutures(importFutures, reportWriter, logDirectory, true); failedFutures.addAll(newFailed); if (failOnError && !failedFutures.isEmpty()) { throw new RuntimeException( failedFutures.size() + " " + portalDataKey + " entities failed to import.\n\n" + "\tPer entity exception logs and a full report can be found in " + logDirectory + "\n"); } reportWriter.flush(); } if (!dataToImport.isEmpty()) { throw new IllegalStateException( "The following PortalDataKeys are not listed in the dataTypeImportOrder List: " + dataToImport.keySet()); } logger.info("For a detailed report on the data import see " + importReport); } catch (InterruptedException e) { throw new RuntimeException("Interrupted while waiting for entities to import", e); } finally { IOUtils.closeQuietly(reportWriter); IMPORT_BASE_DIR.remove(); } }
From source file:org.jasig.portal.io.xml.JaxbPortalDataHandlerService.java
@Override public void importData(File directory, String pattern, final BatchImportOptions options) { if (!directory.exists()) { throw new IllegalArgumentException("The specified directory '" + directory + "' does not exist"); }/*from w w w . j a va2 s .c o m*/ //Create the file filter to use when searching for files to import final FileFilter fileFilter; if (pattern != null) { fileFilter = new AntPatternFileFilter(true, false, pattern, this.dataFileExcludes); } else { fileFilter = new AntPatternFileFilter(true, false, this.dataFileIncludes, this.dataFileExcludes); } //Determine the parent directory to log to final File logDirectory = determineLogDirectory(options, "import"); //Setup reporting file final File importReport = new File(logDirectory, "data-import.txt"); final PrintWriter reportWriter; try { reportWriter = new PrintWriter(new PeriodicFlushingBufferedWriter(500, new FileWriter(importReport))); } catch (IOException e) { throw new RuntimeException("Failed to create FileWriter for: " + importReport, e); } //Convert directory to URI String to provide better logging output final URI directoryUri = directory.toURI(); final String directoryUriStr = directoryUri.toString(); IMPORT_BASE_DIR.set(directoryUriStr); try { //Scan the specified directory for files to import logger.info("Scanning for files to Import from: {}", directory); final PortalDataKeyFileProcessor fileProcessor = new PortalDataKeyFileProcessor(this.dataKeyTypes, options); this.directoryScanner.scanDirectoryNoResults(directory, fileFilter, fileProcessor); final long resourceCount = fileProcessor.getResourceCount(); logger.info("Found {} files to Import from: {}", resourceCount, directory); //See if the import should fail on error final boolean failOnError = options != null ? options.isFailOnError() : true; //Map of files to import, grouped by type final ConcurrentMap<PortalDataKey, Queue<Resource>> dataToImport = fileProcessor.getDataToImport(); //Import the data files for (final PortalDataKey portalDataKey : this.dataKeyImportOrder) { final Queue<Resource> files = dataToImport.remove(portalDataKey); if (files == null) { continue; } final Queue<ImportFuture<?>> importFutures = new LinkedList<ImportFuture<?>>(); final List<FutureHolder<?>> failedFutures = new LinkedList<FutureHolder<?>>(); final int fileCount = files.size(); logger.info("Importing {} files of type {}", fileCount, portalDataKey); reportWriter.println(portalDataKey + "," + fileCount); while (!files.isEmpty()) { final Resource file = files.poll(); //Check for completed futures on every iteration, needed to fail as fast as possible on an import exception final List<FutureHolder<?>> newFailed = waitForFutures(importFutures, reportWriter, logDirectory, false); failedFutures.addAll(newFailed); final AtomicLong importTime = new AtomicLong(-1); //Create import task final Callable<Object> task = new CallableWithoutResult() { @Override protected void callWithoutResult() { IMPORT_BASE_DIR.set(directoryUriStr); importTime.set(System.nanoTime()); try { importData(file, portalDataKey); } finally { importTime.set(System.nanoTime() - importTime.get()); IMPORT_BASE_DIR.remove(); } } }; //Submit the import task final Future<?> importFuture = this.importExportThreadPool.submit(task); //Add the future for tracking importFutures.offer(new ImportFuture(importFuture, file, portalDataKey, importTime)); } //Wait for all of the imports on of this type to complete final List<FutureHolder<?>> newFailed = waitForFutures(importFutures, reportWriter, logDirectory, true); failedFutures.addAll(newFailed); if (failOnError && !failedFutures.isEmpty()) { throw new RuntimeException( failedFutures.size() + " " + portalDataKey + " entities failed to import.\n\n" + "\tPer entity exception logs and a full report can be found in " + logDirectory + "\n"); } reportWriter.flush(); } if (!dataToImport.isEmpty()) { throw new IllegalStateException( "The following PortalDataKeys are not listed in the dataTypeImportOrder List: " + dataToImport.keySet()); } logger.info("For a detailed report on the data import see " + importReport); } catch (InterruptedException e) { throw new RuntimeException("Interrupted while waiting for entities to import", e); } finally { IOUtils.closeQuietly(reportWriter); IMPORT_BASE_DIR.remove(); } }
From source file:tigase.xmpp.impl.JabberIqRegister.java
/**{@inheritDoc} * * <br><br>//from w w w .java 2 s . com * * TODO: Implement registration form configurable and loading * all the fields from the registration form TODO: rewrite the * plugin using the XMPPProcessorAbstract API */ @Override public void process(Packet packet, XMPPResourceConnection session, NonAuthUserRepository repo, Queue<Packet> results, Map<String, Object> settings) throws XMPPException { if (log.isLoggable(Level.FINEST)) { log.finest("Processing packet: " + packet.toString()); } if (session == null) { if (log.isLoggable(Level.FINEST)) { log.finest("Session is null, ignoring"); } return; } // end of if (session == null) BareJID id = session.getDomainAsJID().getBareJID(); if (packet.getStanzaTo() != null) { id = packet.getStanzaTo().getBareJID(); } try { // I think it does not make sense to check the 'to', just the // connection // ID // if ((id.equals(session.getDomain()) || // id.equals(session.getUserId().toString())) // && packet.getFrom().equals(session.getConnectionId())) { // Wrong thinking. The user may send an request from his own account // to register with a transport or any other service, then the // connection // ID matches the session id but this is still not a request to the // local // server. The TO address must be checked too..... // if (packet.getPacketFrom().equals(session.getConnectionId())) { if ((packet.getPacketFrom() != null) && packet.getPacketFrom().equals(session.getConnectionId()) && (!session.isAuthorized() || (session.isUserId(id) || session.isLocalDomain(id.toString(), false)))) { // We want to allow password change but not user registration if // registration is disabled. The only way to tell apart // registration // from password change is to check whether the user is // authenticated. // For authenticated user the request means password change, // otherwise // registration attempt. // Account deregistration is also called under authenticated // session, so // it should be blocked here if registration for domain is // disabled. // Assuming if user cannot register account he cannot also // deregister account Element request = packet.getElement(); boolean remove = request.findChildStaticStr(IQ_QUERY_REMOVE_PATH) != null; if (!session.isAuthorized() || remove) { if (!isRegistrationAllowedForConnection(packet.getFrom())) { results.offer(Authorization.NOT_ALLOWED.getResponseMessage(packet, "Registration is not allowed for this connection.", true)); ++statsInvalidRegistrations; return; } if (!session.getDomain().isRegisterEnabled()) { results.offer(Authorization.NOT_ALLOWED.getResponseMessage(packet, "Registration is not allowed for this domain.", true)); ++statsInvalidRegistrations; return; } } Authorization result = Authorization.NOT_AUTHORIZED; StanzaType type = packet.getType(); switch (type) { case set: // Is it registration cancel request? Element elem = request.findChildStaticStr(IQ_QUERY_REMOVE_PATH); if (elem != null) { // Yes this is registration cancel request // According to JEP-0077 there must not be any // more subelements apart from <remove/> elem = request.findChildStaticStr(Iq.IQ_QUERY_PATH); if (elem.getChildren().size() > 1) { result = Authorization.BAD_REQUEST; } else { try { result = session.unregister(packet.getStanzaFrom().toString()); Packet ok_result = packet.okResult((String) null, 0); // We have to set SYSTEM priority for the packet // here, // otherwise the network connection is closed // before the // client received a response ok_result.setPriority(Priority.SYSTEM); results.offer(ok_result); Packet close_cmd = Command.CLOSE.getPacket(session.getSMComponentId(), session.getConnectionId(), StanzaType.set, session.nextStanzaId()); close_cmd.setPacketTo(session.getConnectionId()); close_cmd.setPriority(Priority.LOWEST); results.offer(close_cmd); } catch (NotAuthorizedException e) { results.offer(Authorization.NOT_AUTHORIZED.getResponseMessage(packet, "You must authorize session first.", true)); } // end of try-catch } } else { String user_name; String password; String email; if (signedFormRequired) { final String expectedToken = UUID .nameUUIDFromBytes( (session.getConnectionId() + "|" + session.getSessionId()).getBytes()) .toString(); FormSignatureVerifier verifier = new FormSignatureVerifier(oauthConsumerKey, oauthConsumerSecret); Element queryEl = request.getChild("query", "jabber:iq:register"); Element formEl = queryEl == null ? null : queryEl.getChild("x", "jabber:x:data"); if (formEl == null) { results.offer(Authorization.BAD_REQUEST.getResponseMessage(packet, "Use Signed Registration Form", true)); ++statsInvalidRegistrations; return; } Form form = new Form(formEl); if (!expectedToken.equals(form.getAsString("oauth_token"))) { log.finest("Received oauth_token is different that sent one."); results.offer(Authorization.BAD_REQUEST.getResponseMessage(packet, "Unknown oauth_token", true)); ++statsInvalidRegistrations; return; } if (!oauthConsumerKey.equals(form.getAsString("oauth_consumer_key"))) { log.finest("Unknown oauth_consumer_key"); results.offer(Authorization.BAD_REQUEST.getResponseMessage(packet, "Unknown oauth_consumer_key", true)); ++statsInvalidRegistrations; return; } try { long timestamp = verifier.verify(packet.getStanzaTo(), form); user_name = form.getAsString("username"); password = form.getAsString("password"); email = form.getAsString("email"); } catch (FormSignerException e) { log.fine("Form Signature Validation Problem: " + e.getMessage()); results.offer(Authorization.BAD_REQUEST.getResponseMessage(packet, "Invalid form signature", true)); ++statsInvalidRegistrations; return; } } else { // No, so assuming this is registration of a new // user or change registration details for existing // user user_name = request.getChildCDataStaticStr(IQ_QUERY_USERNAME_PATH); password = request.getChildCDataStaticStr(IQ_QUERY_PASSWORD_PATH); email = request.getChildCDataStaticStr(IQ_QUERY_EMAIL_PATH); } String pass_enc = null; if (null != password) { pass_enc = XMLUtils.unescape(password); } Map<String, String> reg_params = null; if ((email != null) && !email.trim().isEmpty()) { reg_params = new LinkedHashMap<String, String>(); reg_params.put("email", email); } result = session.register(user_name, pass_enc, reg_params); if (result == Authorization.AUTHORIZED) { results.offer(result.getResponseMessage(packet, null, false)); } else { ++statsInvalidRegistrations; results.offer( result.getResponseMessage(packet, "Unsuccessful registration attempt", true)); } } break; case get: { if (signedFormRequired) { results.offer(packet.okResult(prepareRegistrationForm(session), 0)); } else results.offer(packet.okResult( "<instructions>" + "Choose a user name and password for use with this service." + "Please provide also your e-mail address." + "</instructions>" + "<username/>" + "<password/>" + "<email/>", 1)); break; } case result: // It might be a registration request from transport for // example... Packet pack_res = packet.copyElementOnly(); pack_res.setPacketTo(session.getConnectionId()); results.offer(pack_res); break; default: results.offer(Authorization.BAD_REQUEST.getResponseMessage(packet, "Message type is incorrect", true)); break; } // end of switch (type) } else { if (session.isUserId(id)) { // It might be a registration request from transport for // example... Packet pack_res = packet.copyElementOnly(); pack_res.setPacketTo(session.getConnectionId()); results.offer(pack_res); } else { results.offer(packet.copyElementOnly()); } } } catch (TigaseStringprepException ex) { results.offer(Authorization.JID_MALFORMED.getResponseMessage(packet, "Incorrect user name, stringprep processing failed.", true)); } catch (NotAuthorizedException e) { results.offer(Authorization.NOT_AUTHORIZED.getResponseMessage(packet, "You are not authorized to change registration settings.\n" + e.getMessage(), true)); } catch (TigaseDBException e) { log.warning("Database problem: " + e); results.offer(Authorization.INTERNAL_SERVER_ERROR.getResponseMessage(packet, "Database access problem, please contact administrator.", true)); } // end of try-catch }
From source file:org.neo4j.io.pagecache.PageCacheTest.java
@Test(timeout = SEMI_LONG_TIMEOUT_MILLIS) public void backgroundThreadsMustGracefullyShutDown() throws Exception { assumeTrue("For some reason, this test is very flaky on Windows", !SystemUtils.IS_OS_WINDOWS); int iterations = 1000; List<WeakReference<PageCache>> refs = new LinkedList<>(); final Queue<Throwable> caughtExceptions = new ConcurrentLinkedQueue<>(); final Thread.UncaughtExceptionHandler exceptionHandler = (t, e) -> { e.printStackTrace();/*from w ww . ja v a2s .co m*/ caughtExceptions.offer(e); }; Thread.UncaughtExceptionHandler defaultUncaughtExceptionHandler = Thread .getDefaultUncaughtExceptionHandler(); Thread.setDefaultUncaughtExceptionHandler(exceptionHandler); try { generateFileWithRecords(file("a"), recordCount, recordSize); int filePagesInTotal = recordCount / recordsPerFilePage; for (int i = 0; i < iterations; i++) { PageCache cache = createPageCache(fs, maxPages, pageCachePageSize, PageCacheTracer.NULL); // Touch all the pages PagedFile pagedFile = cache.map(file("a"), filePageSize); try (PageCursor cursor = pagedFile.io(0, PF_SHARED_READ_LOCK)) { for (int j = 0; j < filePagesInTotal; j++) { assertTrue(cursor.next()); } } // We're now likely racing with the eviction thread pagedFile.close(); cache.close(); refs.add(new WeakReference<>(cache)); assertTrue(caughtExceptions.isEmpty()); } } finally { Thread.setDefaultUncaughtExceptionHandler(defaultUncaughtExceptionHandler); } // Once the page caches has been closed and all references presumably set to null, then the only thing that // could possibly strongly reference the cache is any lingering background thread. If we do a couple of // GCs, then we should observe that the WeakReference has been cleared by the garbage collector. If it // hasn't, then something must be keeping it alive, even though it has been closed. int maxChecks = 100; boolean passed; do { System.gc(); Thread.sleep(100); passed = true; for (WeakReference<PageCache> ref : refs) { if (ref.get() != null) { passed = false; } } } while (!passed && maxChecks-- > 0); if (!passed) { List<PageCache> nonNullPageCaches = new LinkedList<>(); for (WeakReference<PageCache> ref : refs) { PageCache pageCache = ref.get(); if (pageCache != null) { nonNullPageCaches.add(pageCache); } } if (!nonNullPageCaches.isEmpty()) { fail("PageCaches should not be held live after close: " + nonNullPageCaches); } } }
From source file:org.apereo.portal.io.xml.JaxbPortalDataHandlerService.java
@Override public void exportAllDataOfType(Set<String> typeIds, File directory, BatchExportOptions options) { final Queue<ExportFuture<?>> exportFutures = new ConcurrentLinkedQueue<ExportFuture<?>>(); final boolean failOnError = options != null ? options.isFailOnError() : true; //Determine the parent directory to log to final File logDirectory = determineLogDirectory(options, "export"); //Setup reporting file final File exportReport = new File(logDirectory, "data-export.txt"); final PrintWriter reportWriter; try {/*from ww w.ja v a2 s . co m*/ reportWriter = new PrintWriter(new BufferedWriter(new FileWriter(exportReport))); } catch (IOException e) { throw new RuntimeException("Failed to create FileWriter for: " + exportReport, e); } try { for (final String typeId : typeIds) { final List<FutureHolder<?>> failedFutures = new LinkedList<FutureHolder<?>>(); final File typeDir = new File(directory, typeId); logger.info("Adding all data of type {} to export queue: {}", typeId, typeDir); reportWriter.println(typeId + "," + typeDir); final Iterable<? extends IPortalData> dataForType = this.getPortalData(typeId); for (final IPortalData data : dataForType) { final String dataId = data.getDataId(); //Check for completed futures on every iteration, needed to fail as fast as possible on an import exception final List<FutureHolder<?>> newFailed = waitForFutures(exportFutures, reportWriter, logDirectory, false); failedFutures.addAll(newFailed); final AtomicLong exportTime = new AtomicLong(-1); //Create export task Callable<Object> task = new CallableWithoutResult() { @Override protected void callWithoutResult() { exportTime.set(System.nanoTime()); try { exportData(typeId, dataId, typeDir); } finally { exportTime.set(System.nanoTime() - exportTime.get()); } } }; //Submit the export task final Future<?> exportFuture = this.importExportThreadPool.submit(task); //Add the future for tracking final ExportFuture futureHolder = new ExportFuture(exportFuture, typeId, dataId, exportTime); exportFutures.offer(futureHolder); } final List<FutureHolder<?>> newFailed = waitForFutures(exportFutures, reportWriter, logDirectory, true); failedFutures.addAll(newFailed); reportWriter.flush(); if (failOnError && !failedFutures.isEmpty()) { throw new RuntimeException(failedFutures.size() + " " + typeId + " entities failed to export.\n" + "\tPer entity exception logs and a full report can be found in " + logDirectory); } } } catch (InterruptedException e) { throw new RuntimeException("Interrupted while waiting for entities to export", e); } finally { IOUtils.closeQuietly(reportWriter); } }
From source file:it.geosolutions.geobatch.flow.event.consumer.file.FileBasedEventConsumer.java
/*************************************************************************** * Main Thread cycle./*from w w w .j a va2 s.co m*/ * * <LI>Create needed dirs</LI> <LI>Optionally backup files</LI> <LI>Move * files into a job-specific working dir</LI> <LI>Run the actions</LI> */ public Queue<FileSystemEvent> call() throws Exception { this.canceled = false; boolean jobResultSuccessful = false; Throwable exceptionOccurred = null; getListenerForwarder().setTask("Configuring"); getListenerForwarder().started(); try { // create live working dir getListenerForwarder().progressing(10, "Managing events"); // // Management of current working directory // // if we work on the input directory, we do not move around // anything, unless we want to // perform a backup if (configuration.isPerformBackup() || !configuration.isPreserveInput()) { if (!flowInstanceTempDir.exists() && !flowInstanceTempDir.mkdirs()) { throw new IllegalStateException("Could not create consumer backup directory!"); } } // set the consumer running context // don't know how this running context will be used in a FileBased* hiererchy, anyway let's force the use of proper methods. setRunningContext("DONT_USE_AS_FILEPATH_" + flowInstanceTempDir.getAbsolutePath()); // create backup dir. Creation is deferred until first usage getListenerForwarder().progressing(20, "Creating backup dir"); final File backupDirectory = new File(flowInstanceTempDir, "backup"); if (configuration.isPerformBackup()) { if (!backupDirectory.exists() && !backupDirectory.mkdirs()) { throw new IllegalStateException("Could not create consumer backup directory!"); } } // // Cycling on all the input events // Queue<FileSystemEvent> fileEventList = new LinkedList<FileSystemEvent>(); int numProcessedFiles = 0; for (FileSystemEvent event : this.eventsQueue) { if (LOGGER.isInfoEnabled()) { LOGGER.info( "[" + Thread.currentThread().getName() + "]: new element retrieved from the MailBox."); } // get info for the input file event final File sourceDataFile = event.getSource(); final String fileBareName; if ((sourceDataFile != null) && sourceDataFile.exists()) { fileBareName = FilenameUtils.getName(sourceDataFile.toString()); getListenerForwarder().progressing(30 + (10f / this.eventsQueue.size() * numProcessedFiles++), "Preprocessing event " + fileBareName); // // copy input file/dir to current working directory // if (IOUtils.acquireLock(this, sourceDataFile)) { // // Backing up inputs? // if (this.configuration.isPerformBackup()) { // Backing up files and delete sources. getListenerForwarder().progressing( 30 + (10f / this.eventsQueue.size() * numProcessedFiles++), "Creating backup files"); // In case we do not work on the input as is, we // move it to our // current working directory final File destDataFile = new File(backupDirectory, fileBareName); if (sourceDataFile.isDirectory()) { FileUtils.copyDirectory(sourceDataFile, destDataFile); } else { FileUtils.copyFile(sourceDataFile, destDataFile); } } // // Working on input events directly without moving to // working dir? // if (!configuration.isPreserveInput()) { // In case we do not work on the input as is, we // move it to our current working directory final File destDataFile = new File(flowInstanceTempDir, fileBareName); if (sourceDataFile.isDirectory()) { FileUtils.moveDirectory(sourceDataFile, destDataFile); } else { FileUtils.moveFile(sourceDataFile, destDataFile); } // adjust event sources since we moved the files // locally fileEventList.offer(new FileSystemEvent(destDataFile, event.getEventType())); } else { // we are going to work directly on the input files fileEventList.offer(event); } if (LOGGER.isInfoEnabled()) { LOGGER.info( "[" + Thread.currentThread().getName() + "]: accepted file " + sourceDataFile); } } else { if (LOGGER.isErrorEnabled()) { LOGGER.error(new StringBuilder("[").append(Thread.currentThread().getName()) .append("]: could not lock file ").append(sourceDataFile).toString()); } /* * TODO: lock not acquired: what else? */ } } // event.getSource()!=null && sourceDataFile.exists() else { /* * event.getSource()==null || !sourceDataFile.exists() this * could be an empty file representing a POLLING event */ fileEventList.offer(event); } } // // // TODO if no further processing is necessary or can be // done due to some error, set eventConsumerStatus to Finished or // Failure. (etj: ???) // // if (LOGGER.isInfoEnabled()) { LOGGER.info("[" + Thread.currentThread().getName() + "]: new element processed."); } // // Finally, run the Actions on the files getListenerForwarder().progressing(50, "Running actions"); try { // apply actions into the actual context (currentRunDirectory) fileEventList = this.applyActions(fileEventList); this.setStatus(EventConsumerStatus.COMPLETED); jobResultSuccessful = true; } catch (ActionException ae) { this.setStatus(EventConsumerStatus.FAILED); throw ae; } return fileEventList; } catch (ActionException e) { String msg = "[" + Thread.currentThread().getName() + "] Error during " + e.getType().getSimpleName() + " execution: " + e.getLocalizedMessage(); if (LOGGER.isDebugEnabled()) { LOGGER.error(msg, e); } else { LOGGER.error(msg); } this.setStatus(EventConsumerStatus.FAILED); exceptionOccurred = e; } catch (IOException e) { if (LOGGER.isErrorEnabled()) { LOGGER.error("[" + Thread.currentThread().getName() + "] could not move file " + " due to the following IO error: " + e.getLocalizedMessage(), e); } this.setStatus(EventConsumerStatus.FAILED); exceptionOccurred = e; } catch (InterruptedException e) { if (LOGGER.isErrorEnabled()) { LOGGER.error("[" + Thread.currentThread().getName() + "] could not move file " + " due to an InterruptedException: " + e.getLocalizedMessage(), e); } this.setStatus(EventConsumerStatus.FAILED); exceptionOccurred = e; } catch (RuntimeException e) { exceptionOccurred = e; throw e; } finally { getListenerForwarder().progressing(100, "Completed"); if (LOGGER.isInfoEnabled()) { LOGGER.info(Thread.currentThread().getName() + " DONE!"); } // this.dispose(); if (jobResultSuccessful && (exceptionOccurred == null)) { getListenerForwarder().completed(); } else { getListenerForwarder().failed(exceptionOccurred); } } return null; }