List of usage examples for java.lang InterruptedException getCause
public synchronized Throwable getCause()
From source file:mondrian.olap.Util.java
/** * Calls {@link java.util.concurrent.Future#get()} and converts any * throwable into a non-checked exception. * * @param future Future//from w w w . j av a2s. c o m * @param message Message to qualify wrapped exception * @param <T> Result type * @return Result */ public static <T> T safeGet(Future<T> future, String message) { try { return future.get(); } catch (InterruptedException e) { throw newError(e, message); } catch (ExecutionException e) { final Throwable cause = e.getCause(); if (cause instanceof RuntimeException) { throw (RuntimeException) cause; } else if (cause instanceof Error) { throw (Error) cause; } else { throw newError(cause, message); } } }
From source file:com.sun.faban.harness.webclient.CLIServlet.java
private void sendLogs(String[] reqC, HttpServletResponse response) throws ServletException, IOException { if (reqC.length < 2) { response.sendError(HttpServletResponse.SC_BAD_REQUEST, "Missing RunId."); return;//from w w w. j a v a 2s . co m } RunId runId = new RunId(reqC[1]); boolean[] options = new boolean[2]; options[TAIL] = false; options[FOLLOW] = false; for (int i = 2; i < reqC.length; i++) { if ("tail".equals(reqC[i])) { options[TAIL] = true; } else if ("follow".equals(reqC[i])) { options[FOLLOW] = true; } else { response.sendError(HttpServletResponse.SC_BAD_REQUEST, "Invalid option \"" + reqC[i] + "\"."); ; return; } } File logFile = new File(Config.OUT_DIR + runId, "log.xml"); String status = null; response.setContentType("text/plain"); PrintWriter out = response.getWriter(); while (!logFile.exists()) { String[] pending = RunQ.listPending(); if (pending == null) { response.sendError(HttpServletResponse.SC_NOT_FOUND, "RunId " + runId + " not found"); return; } boolean queued = false; for (String run : pending) { if (run.equals(runId.toString())) { if (status == null) { status = "QUEUED"; out.println(status); response.flushBuffer(); } queued = true; try { Thread.sleep(1000); // Check back in one sec. } catch (InterruptedException e) { //Noop, just look it up again. } break; } } if (!queued) { // Either never queued or deleted from queue. // Check for 10x, 100ms each to allow for start time. for (int i = 0; i < 10; i++) { if (logFile.exists()) { status = "STARTED"; break; } try { Thread.sleep(100); } catch (InterruptedException e) { logger.log(Level.WARNING, "Interrupted checking existence of log file."); } } if (!"STARTED".equals(status)) { if ("QUEUED".equals(status)) { // was queued before status = "DELETED"; out.println(status); out.flush(); out.close(); return; } else { // Never queued or just removed. response.sendError(HttpServletResponse.SC_NOT_FOUND, "RunId " + runId + " not found"); return; } } } } LogOutputHandler handler = new LogOutputHandler(response, options); InputStream logInput; if (options[FOLLOW]) { // The XMLInputStream reads streaming XML and does not EOF. XMLInputStream input = new XMLInputStream(logFile); input.addEOFListener(handler); logInput = input; } else { logInput = new FileInputStream(logFile); } try { SAXParserFactory sFact = SAXParserFactory.newInstance(); sFact.setFeature("http://xml.org/sax/features/validation", false); sFact.setFeature("http://apache.org/xml/features/" + "allow-java-encodings", true); sFact.setFeature("http://apache.org/xml/features/nonvalidating/" + "load-dtd-grammar", false); sFact.setFeature("http://apache.org/xml/features/nonvalidating/" + "load-external-dtd", false); SAXParser parser = sFact.newSAXParser(); parser.parse(logInput, handler); handler.xmlComplete = true; // If we get here, the XML is good. } catch (ParserConfigurationException e) { throw new ServletException(e); } catch (SAXParseException e) { Throwable t = e.getCause(); // If it is caused by an IOException, we'll just throw it. if (t != null) { if (t instanceof IOException) throw (IOException) t; else if (options[FOLLOW]) throw new ServletException(t); } else if (options[FOLLOW]) { throw new ServletException(e); } } catch (SAXException e) { throw new ServletException(e); } finally { if (options[TAIL] && !options[FOLLOW]) // tail not yet printed handler.eof(); } }
From source file:org.kuali.test.runner.execution.HttpRequestOperationExecution.java
/** * /*from w w w . j a v a 2s . c om*/ * @param configuration * @param platform * @param testWrapper * @throws TestException */ @Override public void execute(KualiTestConfigurationDocument.KualiTestConfiguration configuration, Platform platform, KualiTestWrapper testWrapper) throws TestException { HtmlRequestOperation reqop = getOperation().getHtmlRequestOperation(); try { try { int delay = configuration.getDefaultTestWaitInterval(); if (testWrapper.getUseTestEntryTimes()) { delay = reqop.getDelay(); } Thread.sleep(delay); } catch (InterruptedException ex) { } ; TestExecutionContext tec = getTestExecutionContext(); tec.setCurrentOperationIndex(Integer.valueOf(getOperation().getIndex())); tec.setCurrentTest(testWrapper); WebRequest request = new WebRequest(new URL(reqop.getUrl()), HttpMethod.valueOf(reqop.getMethod())); request.setAdditionalHeader(Constants.TEST_OPERATION_INDEX, "" + getOperation().getIndex()); boolean multiPart = Utils.isMultipart(reqop); boolean urlFormEncoded = Utils.isUrlFormEncoded(reqop); if (reqop.getRequestHeaders() != null) { for (RequestHeader hdr : reqop.getRequestHeaders().getHeaderArray()) { if (HttpHeaders.CONTENT_TYPE.equals(hdr.getName())) { if (!multiPart) { request.setAdditionalHeader(hdr.getName(), hdr.getValue()); } } else { request.setAdditionalHeader(hdr.getName(), hdr.getValue()); } } } boolean requestSubmitted = false; if (request.getHttpMethod().equals(HttpMethod.POST)) { String params = Utils.getContentParameterFromRequestOperation(reqop); List<NameValuePair> nvplist = new ArrayList<NameValuePair>(); if (StringUtils.isNotBlank(params)) { if (urlFormEncoded) { nvplist = tec.getWebClient() .getUpdatedParameterList(Utils.getNameValuePairsFromUrlEncodedParams(params)); } else if (multiPart) { nvplist = tec.getWebClient() .getUpdatedParameterList(Utils.getNameValuePairsFromMultipartParams(params)); } } tec.setLastHttpSubmitElementName(null); HtmlElement submit = null; // this is a hack to handle KC backdoor login - no submit is available // so the logic attempts to wait for a specified time to find the submit // which really slows down the test if (!isBackdoorLogin(request)) { submit = tec.getWebClient().findFormSubmitElement(nvplist); } // see if we can find a submit element, if we can then // use click() call to submit if (submit != null) { tec.getWebClient().populateFormElements(tec, nvplist); tec.setLastHttpSubmitElementName(getSubmitElementName(submit)); submit.click(); requestSubmitted = true; } else { request.setRequestParameters(nvplist); } } // if we have not loaded web request to this point - load now if (!requestSubmitted) { tec.getWebClient().getPage(request); } } catch (IOException ex) { Throwable t = ex.getCause(); LOG.error(ex.toString(), ex); if ((t != null) && (t instanceof TestException)) { throw (TestException) t; } else { String uri = Constants.UNKNOWN; if (reqop != null) { uri = reqop.getUrl(); } throw new TestException("An IOException occured while processing http request: " + uri + ", error: " + ex.toString(), getOperation(), ex); } } }
From source file:org.apache.hadoop.hbase.regionserver.HRegion.java
private long initializeRegionStores(final CancelableProgressable reporter, MonitoredTask status) throws IOException, UnsupportedEncodingException { // Load in all the HStores. long maxSeqId = -1; // initialized to -1 so that we pick up MemstoreTS from column families long maxMemstoreTS = -1; if (!htableDescriptor.getFamilies().isEmpty()) { // initialize the thread pool for opening stores in parallel. ThreadPoolExecutor storeOpenerThreadPool = getStoreOpenAndCloseThreadPool( "StoreOpener-" + this.getRegionInfo().getShortNameToLog()); CompletionService<HStore> completionService = new ExecutorCompletionService<HStore>( storeOpenerThreadPool);/*from ww w . java2s .co m*/ // initialize each store in parallel for (final HColumnDescriptor family : htableDescriptor.getFamilies()) { status.setStatus("Instantiating store for column family " + family); completionService.submit(new Callable<HStore>() { @Override public HStore call() throws IOException { return instantiateHStore(family); } }); } boolean allStoresOpened = false; try { for (int i = 0; i < htableDescriptor.getFamilies().size(); i++) { Future<HStore> future = completionService.take(); HStore store = future.get(); this.stores.put(store.getColumnFamilyName().getBytes(), store); long storeMaxSequenceId = store.getMaxSequenceId(); maxSeqIdInStores.put(store.getColumnFamilyName().getBytes(), storeMaxSequenceId); if (maxSeqId == -1 || storeMaxSequenceId > maxSeqId) { maxSeqId = storeMaxSequenceId; } long maxStoreMemstoreTS = store.getMaxMemstoreTS(); if (maxStoreMemstoreTS > maxMemstoreTS) { maxMemstoreTS = maxStoreMemstoreTS; } } allStoresOpened = true; } catch (InterruptedException e) { throw (InterruptedIOException) new InterruptedIOException().initCause(e); } catch (ExecutionException e) { throw new IOException(e.getCause()); } finally { storeOpenerThreadPool.shutdownNow(); if (!allStoresOpened) { // something went wrong, close all opened stores LOG.error("Could not initialize all stores for the region=" + this); for (Store store : this.stores.values()) { try { store.close(); } catch (IOException e) { LOG.warn(e.getMessage()); } } } } } mvcc.initialize(maxMemstoreTS + 1); // Recover any edits if available. maxSeqId = Math.max(maxSeqId, replayRecoveredEditsIfAny(this.fs.getRegionDir(), maxSeqIdInStores, reporter, status)); return maxSeqId; }
From source file:org.apache.hadoop.hbase.regionserver.HRegion.java
private Map<byte[], List<StoreFile>> doClose(final boolean abort, MonitoredTask status) throws IOException { if (isClosed()) { LOG.warn("Region " + this + " already closed"); return null; }//from ww w . j av a 2 s. com if (coprocessorHost != null) { status.setStatus("Running coprocessor pre-close hooks"); this.coprocessorHost.preClose(abort); } status.setStatus("Disabling compacts and flushes for region"); synchronized (writestate) { // Disable compacting and flushing by background threads for this // region. writestate.writesEnabled = false; LOG.debug("Closing " + this + ": disabling compactions & flushes"); waitForFlushesAndCompactions(); } // If we were not just flushing, is it worth doing a preflush...one // that will clear out of the bulk of the memstore before we put up // the close flag? if (!abort && worthPreFlushing()) { status.setStatus("Pre-flushing region before close"); LOG.info("Running close preflush of " + this.getRegionNameAsString()); try { internalFlushcache(status); } catch (IOException ioe) { // Failed to flush the region. Keep going. status.setStatus("Failed pre-flush " + this + "; " + ioe.getMessage()); } } this.closing.set(true); status.setStatus("Disabling writes for close"); // block waiting for the lock for closing lock.writeLock().lock(); try { if (this.isClosed()) { status.abort("Already got closed by another process"); // SplitTransaction handles the null return null; } LOG.debug("Updates disabled for region " + this); // Don't flush the cache if we are aborting if (!abort) { int flushCount = 0; while (this.getMemstoreSize().get() > 0) { try { if (flushCount++ > 0) { int actualFlushes = flushCount - 1; if (actualFlushes > 5) { // If we tried 5 times and are unable to clear memory, abort // so we do not lose data throw new DroppedSnapshotException("Failed clearing memory after " + actualFlushes + " attempts on region: " + Bytes.toStringBinary(getRegionName())); } LOG.info("Running extra flush, " + actualFlushes + " (carrying snapshot?) " + this); } internalFlushcache(status); } catch (IOException ioe) { status.setStatus("Failed flush " + this + ", putting online again"); synchronized (writestate) { writestate.writesEnabled = true; } // Have to throw to upper layers. I can't abort server from here. throw ioe; } } } Map<byte[], List<StoreFile>> result = new TreeMap<byte[], List<StoreFile>>(Bytes.BYTES_COMPARATOR); if (!stores.isEmpty()) { // initialize the thread pool for closing stores in parallel. ThreadPoolExecutor storeCloserThreadPool = getStoreOpenAndCloseThreadPool( "StoreCloserThread-" + this.getRegionNameAsString()); CompletionService<Pair<byte[], Collection<StoreFile>>> completionService = new ExecutorCompletionService<Pair<byte[], Collection<StoreFile>>>( storeCloserThreadPool); // close each store in parallel for (final Store store : stores.values()) { assert abort || store.getFlushableSize() == 0; completionService.submit(new Callable<Pair<byte[], Collection<StoreFile>>>() { @Override public Pair<byte[], Collection<StoreFile>> call() throws IOException { return new Pair<byte[], Collection<StoreFile>>(store.getFamily().getName(), store.close()); } }); } try { for (int i = 0; i < stores.size(); i++) { Future<Pair<byte[], Collection<StoreFile>>> future = completionService.take(); Pair<byte[], Collection<StoreFile>> storeFiles = future.get(); List<StoreFile> familyFiles = result.get(storeFiles.getFirst()); if (familyFiles == null) { familyFiles = new ArrayList<StoreFile>(); result.put(storeFiles.getFirst(), familyFiles); } familyFiles.addAll(storeFiles.getSecond()); } } catch (InterruptedException e) { throw (InterruptedIOException) new InterruptedIOException().initCause(e); } catch (ExecutionException e) { throw new IOException(e.getCause()); } finally { storeCloserThreadPool.shutdownNow(); } } this.closed.set(true); if (memstoreSize.get() != 0) LOG.error("Memstore size is " + memstoreSize.get()); if (coprocessorHost != null) { status.setStatus("Running coprocessor post-close hooks"); this.coprocessorHost.postClose(abort); } if (this.metricsRegion != null) { this.metricsRegion.close(); } if (this.metricsRegionWrapper != null) { Closeables.closeQuietly(this.metricsRegionWrapper); } status.markComplete("Closed"); LOG.info("Closed " + this); return result; } finally { lock.writeLock().unlock(); } }
From source file:org.rhq.plugins.storage.StorageNodeComponent.java
private OperationResult moveDataFiles(Configuration params) { ResourceContext context = getResourceContext(); OperationResult result = new OperationResult(); log.info("Preparing to move " + this + "'s datafiles to new locations"); String newCommitLogDirectory = params.getSimpleValue("CommitLogLocation"); String newSavedCachesDirectory = params.getSimpleValue("SavedCachesLocation"); PropertyList allDataFileLocations = params.getList("AllDataFileLocations"); String newDataFileDirectory = null; if (allDataFileLocations != null) { List<String> dataDirectories = new LinkedList<String>(); for (Property property : allDataFileLocations.getList()) { PropertySimple dataFileLocation = (PropertySimple) property; dataDirectories.add(dataFileLocation.getStringValue()); }/* w w w .j a v a 2s . co m*/ if (dataDirectories.size() > 1) { result.setErrorMessage( "This process does not support more than one active directory for StorageNode data locations. "); return result; } newDataFileDirectory = dataDirectories.get(0); } if (newCommitLogDirectory == null && newSavedCachesDirectory == null && newDataFileDirectory == null) { return new OperationResult("No new directories were specified"); } log.info("Stopping storage node"); OperationResult shutdownResult = super.shutdownNode(); // CassandraNodeComponent.shutDownNode() does draining before shutting down try { waitForNodeToGoDown(); } catch (InterruptedException e) { log.error("Received " + e.getLocalizedMessage() + " while waiting for storage node " + getResourceContext().getResourceKey() + " to shutdown", e); result.setErrorMessage("Failed to stop the storage node. The storage node must be shut down in order " + "for the changes made by this operation to take effect. The attempt to stop shut down the storage " + "node failed with this error: " + shutdownResult.getErrorMessage()); return result; } if (shutdownResult.getErrorMessage() != null) { log.error("Failed to stop storage node " + getResourceContext().getResourceKey() + ". The storage node " + "must be shut down in order for the changes made by this operation to take effect."); result.setErrorMessage("Failed to stop the storage node. The storage node must be shut down in order " + "for the changes made by this operation to take effect. The attempt to stop shut down the storage " + "node failed with this error: " + shutdownResult.getErrorMessage()); return result; } log.info("Storage node shutdown, preparing to move datafiles"); List<String> originalDataDirectories = new LinkedList<String>(); List<String> createdDataDirectories = new LinkedList<String>(); ConfigEditor configEditor = getYamlConfigEditor(); try { configEditor.load(); // Moving the data directory List<String> dataFileDirectories = configEditor.getDataFileDirectories(); if (dataFileDirectories.size() > 1) { // We do not support this scenario log.error( "More than one datadirectory configured for the StorageNode. This operation mode is not supported by this tool"); StringBuilder pathListBuilder = new StringBuilder(); for (String dataFileDir : dataFileDirectories) { pathListBuilder.append(dataFileDir).append(", "); } result.setErrorMessage("Could not proceed with moving datafiles from " + pathListBuilder.toString() + "this tool does not support" + " multiple datafile paths."); return result; } else if (dataFileDirectories.size() == 1) { String currentDataFileLocation = dataFileDirectories.get(0); boolean dataFilesMoved = copyDataDirectoryIfChanged(currentDataFileLocation, newDataFileDirectory); if (dataFilesMoved) { originalDataDirectories.add(currentDataFileLocation); createdDataDirectories.add(newDataFileDirectory); List<String> newDataFileDirectories = new LinkedList<String>(); newDataFileDirectories.add(newDataFileDirectory); configEditor.setDataFileDirectories(newDataFileDirectories); } } // In theory we wouldn't need to copy these, as draining should empty these String currentCommitLogDirectory = configEditor.getCommitLogDirectory(); boolean commitLogCopied = copyDataDirectoryIfChanged(currentCommitLogDirectory, newCommitLogDirectory); if (commitLogCopied) { originalDataDirectories.add(currentCommitLogDirectory); createdDataDirectories.add(newCommitLogDirectory); configEditor.setCommitLogDirectory(newCommitLogDirectory); } // Not so dangerous if we lose these, but lets try to keep them String currentSavedCachesDirectory = configEditor.getSavedCachesDirectory(); boolean savedCachesCopied = copyDataDirectoryIfChanged(currentSavedCachesDirectory, newSavedCachesDirectory); if (savedCachesCopied) { originalDataDirectories.add(currentSavedCachesDirectory); createdDataDirectories.add(newSavedCachesDirectory); configEditor.setSavedCachesDirectory(newSavedCachesDirectory); } log.info(this + " datafiles have been moved. Restarting storage node..."); OperationResult startResult = startNode(); if (startResult.getErrorMessage() != null) { log.error("Failed to restart storage node:\n" + startResult.getErrorMessage()); result.setErrorMessage("Failed to restart storage node:\n" + startResult.getErrorMessage()); // rollback here configEditor.restore(); purgeDirectories(createdDataDirectories); } else { result.setSimpleResult("The storage node was succesfully updated."); // Commit changes, remove old directories configEditor.save(); // This can still throw an exception, in which case we need to rollback purgeDirectories(originalDataDirectories); } return result; } catch (ConfigEditorException e) { log.error("There was an error while trying to update " + configEditor.getConfigFile(), e); if (e.getCause() instanceof YAMLException) { log.info("Attempting to restore " + configEditor.getConfigFile()); try { configEditor.restore(); purgeDirectories(createdDataDirectories); result.setErrorMessage("Failed to update configuration file [" + configEditor.getConfigFile() + "]: " + ThrowableUtil.getAllMessages(e.getCause())); } catch (ConfigEditorException e1) { log.error("Failed to restore " + configEditor.getConfigFile() + ". A copy of the file prior to any modifications " + "can be found at " + configEditor.getBackupFile()); result.setErrorMessage("There was an error updating [" + configEditor.getConfigFile() + "] and undoing the changes " + "Failed. A copy of the file can be found at " + configEditor.getBackupFile() + ". See the " + "agent logs for more details"); } } EmsConnection emsConnection = getEmsConnection(); EmsBean storageService = emsConnection.getBean("org.apache.cassandra.db:type=StorageService"); EmsAttribute attribute = storageService.getAttribute("OperationMode"); String operationMode = (String) attribute.refresh(); if (!operationMode.equals("NORMAL")) { result.setErrorMessage( "Bootstrapping " + getHost() + " failed. The StorageService is reporting " + operationMode + " for its operation mode but it should be reporting NORMAL. The StorageService " + "operation mode is not to be confused with the Storage Node operation mode."); } return result; } catch (IOException e) { log.error("Moving datafiles failed", e); purgeDirectories(createdDataDirectories); configEditor.restore(); result.setErrorMessage("Failed to move all the files to new destinations, " + e.getLocalizedMessage() + ". StorageService was left offline" + ", investigate before restarting the node"); // OperationResult startResult = startNode(); // return the StorageNode online, but what if IOException was out of diskspace? return result; } }
From source file:de.fiz.ddb.aas.auxiliaryoperations.ThreadOrganisationUpdate.java
private void updateOrg() throws NameNotFoundException, AASUnauthorizedException, AttributeModificationException, ExecutionException {//w ww . j a va 2s. c o m boolean vChange = false; InitialLdapContext vCtx = null; try { if (this._oldOrganisation == null) { LOG.log(Level.WARNING, "No such organization ''{0}'' with oid: ''{1}''.", new Object[] { this._organisation.getDisplayName(), this._organisation.getOIDs() }); throw new NameNotFoundException("No such organization '" + this._organisation.getDisplayName() + "' with oid: '" + this._organisation.getOIDs() + "'."); } GeoAdresse vGeoAdresse; String vLocalDispalyName = null; if (_submit != null) { // hier ist "GeoLocationDisplayName" breits ausgefhrt try { vGeoAdresse = _submit.get(10, TimeUnit.SECONDS); if (vGeoAdresse.getRequestStatus() == GeoRequestStatus.OK) { this._organisation.getAddress().setLatitude(vGeoAdresse.getLatitude()); this._organisation.getAddress().setLongitude(vGeoAdresse.getLongitude()); this._organisation.getAddress() .setLocationDisplayName(vGeoAdresse.getLocationDisplayName()); } else { LOG.log(Level.WARNING, "GeoRequestStatus: {0}, (organization id: {1})", new Object[] { vGeoAdresse.getRequestStatus(), this._organisation.getOIDs() }); } } catch (InterruptedException ex) { LOG.log(Level.WARNING, "Geocoding request exeption for organization id: " + this._organisation.getOIDs(), ex); } catch (TimeoutException ex) { LOG.log(Level.WARNING, "Geocoding request exeption for organization id: " + this._organisation.getOIDs(), ex); } } else if (_submitGeoLocDisplayName != null) { try { vLocalDispalyName = _submitGeoLocDisplayName.get(5, TimeUnit.SECONDS); this._organisation.getAddress().setLocationDisplayName(vLocalDispalyName); //LOG.info("LocalDisplayName='" + vLocalDispalyName + "'" + vLocalDispalyName + "'"); } catch (InterruptedException ex) { LOG.log(Level.WARNING, this._organisation.getOIDs() + " without location display name: " + ex.getMessage()); } catch (ExecutionException ex) { LOG.log(Level.WARNING, this._organisation.getOIDs() + " without location display name: " + ex.getMessage()); } catch (TimeoutException ex) { LOG.log(Level.WARNING, this._organisation.getOIDs() + " without location display name: " + ex.getMessage()); } } LOG.info("newOIDs: '" + this._organisation.getOIDs() + "'"); LOG.info("oldOIDs: '" + this._oldOrganisation.getOIDs() + "'"); if (this._organisation.getOrgRDN() == null) { // -- Ansonsten eine nicht gesetzte RDN kann zum Knall fhren... this._organisation.setOrgRDN(this._oldOrganisation.getOrgRDN()); } else if (!this._organisation.getOrgRDN().equals(this._oldOrganisation.getOrgRDN())) { // -- Hier ist etwas faul... LOG.log(Level.WARNING, "The organization ''{0}'' has RDN: ''{1}'', but there exist an organization ''{0}'' with RDN: ''{2}''!", new Object[] { this._organisation.getId(), this._organisation.getOrgRDN(), this._oldOrganisation.getOrgRDN() }); throw new NameNotFoundException("No such organization '" + this._organisation.getDisplayName() + "' with oid: '" + this._organisation.getOIDs() + "'."); } if (this.isPrivilegesUpdate()) { Set<PrivilegeEnum> removePrivileges = this.privilegeDiff(this._organisation.getPrivilegesSet(), this._oldOrganisation.getPrivilegesSet()); Set<PrivilegeEnum> addPrivileges = this.privilegeDiff(this._oldOrganisation.getPrivilegesSet(), this._organisation.getPrivilegesSet()); if (!removePrivileges.isEmpty() || !addPrivileges.isEmpty()) { vChange = true; for (PrivilegeEnum p : removePrivileges) { ThreadSinglePrivilegeDelete threadSinglePrivilegeDelete = new ThreadSinglePrivilegeDelete(p, this._organisation, this._performer); threadSinglePrivilegeDelete.call(); } for (PrivilegeEnum p : addPrivileges) { ThreadSinglePrivilegeCreate threadSinglePrivilegeCreate = new ThreadSinglePrivilegeCreate(p, this._organisation, this._performer); threadSinglePrivilegeCreate.call(); } } } Attributes orgAttributes = new BasicAttributes(true); Attributes orgRemoveAttributes = new BasicAttributes(true); if (vChange = this.convertOrganizationToLdapOrgAttrsForUpdate(this._organisation, this._oldOrganisation, orgAttributes, orgRemoveAttributes, getPerformer())) { // -- If any changes, the status is set to 'revised' // but not if status will be explicitly changed or by a update operation on Licenses directory if (!isChangeOfStatus() && !isUpdatingOfLicensedOrgs()) { if ((ConstEnumOrgStatus.approved.equals(this._organisation.getStatus()))) { // -- ...then go retrospectively into "revised" status: this._organisation.setStatus(ConstEnumOrgStatus.revised); orgAttributes.put(Constants.ldap_ddbOrg_Status, String.valueOf(this._organisation.getStatus().name())); } } } // --------------------------------------------------------------------- if (vChange) { // -- Save changes to the corresponding directory: StringBuilder vOrgEntryDN = (isUpdatingOfLicensedOrgs() ? this.getLicensedOrgsDN(this._organisation.getOIDs()) : this.getOrgDN(this._organisation.getOIDs())); LOG.log(Level.INFO, "DEBUG-Info: destination OrgEntryDN = '" + vOrgEntryDN + "'"); vCtx = LDAPConnector.getSingletonInstance().takeCtx(); if (orgRemoveAttributes.size() > 0) { vCtx.modifyAttributes(vOrgEntryDN.toString(), DirContext.REMOVE_ATTRIBUTE, orgRemoveAttributes); } vCtx.modifyAttributes(vOrgEntryDN.toString(), DirContext.REPLACE_ATTRIBUTE, orgAttributes); } else { throw new AttributeModificationException( "Not modified: oid = '" + this._organisation.getOIDs() + "'"); } } catch (RejectedExecutionException ex) { LOG.log(Level.SEVERE, "RejectedExecutionException\n{0}", ex); throw new ExecutionException(ex.getMessage(), ex.getCause()); } catch (IllegalAccessException ex) { LOG.log(Level.SEVERE, "Connection-Error\n{0}", ex); throw new ExecutionException(ex.getMessage(), ex.getCause()); } catch (NameNotFoundException ex) { LOG.log(Level.WARNING, null, ex); throw ex; } catch (AttributeModificationException ex) { LOG.log(Level.WARNING, "AttributeModificationException\n{0}", ex.getMessage()); // !!!!AttributeModificationException extends NamingExeption: //throw ex; throw new AttributeModificationException(ex.getMessage()); } catch (NamingException ne) { LOG.log(Level.SEVERE, "NamingException\n{0}", ne); throw new ExecutionException(ne.getMessage(), ne.getCause()); } finally { if (vCtx != null) { try { LDAPConnector.getSingletonInstance().putCtx(vCtx); } catch (Exception ex) { LOG.log(Level.SEVERE, "Exception", ex); } } } }
From source file:net.pms.dlna.DLNAMediaInfo.java
public void parse(InputFile inputFile, Format ext, int type, boolean thumbOnly) { int i = 0;/*from w ww . j a va2s .c o m*/ while (isParsing()) { if (i == 5) { setMediaparsed(true); break; } try { Thread.sleep(1000); } catch (InterruptedException e) { } i++; } if (isMediaparsed()) { return; } if (inputFile != null) { if (inputFile.getFile() != null) { setSize(inputFile.getFile().length()); } else { setSize(inputFile.getSize()); } ProcessWrapperImpl pw = null; boolean ffmpeg_parsing = true; if (type == Format.AUDIO || ext instanceof AudioAsVideo) { ffmpeg_parsing = false; DLNAMediaAudio audio = new DLNAMediaAudio(); if (inputFile.getFile() != null) { try { AudioFile af = AudioFileIO.read(inputFile.getFile()); AudioHeader ah = af.getAudioHeader(); if (ah != null && !thumbOnly) { int length = ah.getTrackLength(); int rate = ah.getSampleRateAsNumber(); if (ah.getEncodingType().toLowerCase().contains("flac 24")) { audio.setBitsperSample(24); } audio.setSampleFrequency("" + rate); setDuration((double) length); setBitrate((int) ah.getBitRateAsNumber()); audio.getAudioProperties().setNumberOfChannels(2); if (ah.getChannels() != null && ah.getChannels().toLowerCase().contains("mono")) { audio.getAudioProperties().setNumberOfChannels(1); } else if (ah.getChannels() != null && ah.getChannels().toLowerCase().contains("stereo")) { audio.getAudioProperties().setNumberOfChannels(2); } else if (ah.getChannels() != null) { audio.getAudioProperties().setNumberOfChannels(Integer.parseInt(ah.getChannels())); } audio.setCodecA(ah.getEncodingType().toLowerCase()); if (audio.getCodecA().contains("(windows media")) { audio.setCodecA(audio.getCodecA() .substring(0, audio.getCodecA().indexOf("(windows media")).trim()); } } Tag t = af.getTag(); if (t != null) { if (t.getArtworkList().size() > 0) { setThumb(t.getArtworkList().get(0).getBinaryData()); } else { if (configuration.getAudioThumbnailMethod() > 0) { setThumb(CoverUtil.get().getThumbnailFromArtistAlbum( configuration.getAudioThumbnailMethod() == 1 ? CoverUtil.AUDIO_AMAZON : CoverUtil.AUDIO_DISCOGS, audio.getArtist(), audio.getAlbum())); } } if (!thumbOnly) { audio.setAlbum(t.getFirst(FieldKey.ALBUM)); audio.setArtist(t.getFirst(FieldKey.ARTIST)); audio.setSongname(t.getFirst(FieldKey.TITLE)); String y = t.getFirst(FieldKey.YEAR); try { if (y.length() > 4) { y = y.substring(0, 4); } audio.setYear(Integer.parseInt(((y != null && y.length() > 0) ? y : "0"))); y = t.getFirst(FieldKey.TRACK); audio.setTrack(Integer.parseInt(((y != null && y.length() > 0) ? y : "1"))); audio.setGenre(t.getFirst(FieldKey.GENRE)); } catch (Throwable e) { logger.debug("Error parsing unimportant metadata: " + e.getMessage()); } } } } catch (Throwable e) { logger.debug("Error parsing audio file: {} - {}", e.getMessage(), e.getCause() != null ? e.getCause().getMessage() : ""); ffmpeg_parsing = false; } if (audio.getSongname() == null || audio.getSongname().length() == 0) { audio.setSongname(inputFile.getFile().getName()); } if (!ffmpeg_parsing) { getAudioTracksList().add(audio); } } } if (type == Format.IMAGE && inputFile.getFile() != null) { try { ffmpeg_parsing = false; ImageInfo info = Sanselan.getImageInfo(inputFile.getFile()); setWidth(info.getWidth()); setHeight(info.getHeight()); setBitsPerPixel(info.getBitsPerPixel()); String formatName = info.getFormatName(); if (formatName.startsWith("JPEG")) { setCodecV("jpg"); IImageMetadata meta = Sanselan.getMetadata(inputFile.getFile()); if (meta != null && meta instanceof JpegImageMetadata) { JpegImageMetadata jpegmeta = (JpegImageMetadata) meta; TiffField tf = jpegmeta.findEXIFValue(TiffConstants.EXIF_TAG_MODEL); if (tf != null) { setModel(tf.getStringValue().trim()); } tf = jpegmeta.findEXIFValue(TiffConstants.EXIF_TAG_EXPOSURE_TIME); if (tf != null) { setExposure((int) (1000 * tf.getDoubleValue())); } tf = jpegmeta.findEXIFValue(TiffConstants.EXIF_TAG_ORIENTATION); if (tf != null) { setOrientation(tf.getIntValue()); } tf = jpegmeta.findEXIFValue(TiffConstants.EXIF_TAG_ISO); if (tf != null) { // Galaxy Nexus jpg pictures may contain multiple values, take the first int[] isoValues = tf.getIntArrayValue(); setIso(isoValues[0]); } } } else if (formatName.startsWith("PNG")) { setCodecV("png"); } else if (formatName.startsWith("GIF")) { setCodecV("gif"); } else if (formatName.startsWith("TIF")) { setCodecV("tiff"); } setContainer(getCodecV()); } catch (Throwable e) { logger.info("Error parsing image ({}) with Sanselan, switching to FFmpeg.", inputFile.getFile().getAbsolutePath()); } } if (configuration.getImageThumbnailsEnabled() && type != Format.VIDEO && type != Format.AUDIO) { try { File thumbDir = new File(configuration.getTempFolder(), THUMBNAIL_DIRECTORY_NAME); logger.trace("Generating thumbnail for: {}", inputFile.getFile().getAbsolutePath()); if (!thumbDir.exists() && !thumbDir.mkdirs()) { logger.warn("Could not create thumbnail directory: {}", thumbDir.getAbsolutePath()); } else { File thumbFile = new File(thumbDir, inputFile.getFile().getName() + ".jpg"); String thumbFilename = thumbFile.getAbsolutePath(); logger.trace("Creating (temporary) thumbnail: {}", thumbFilename); // Create the thumbnail image using the Thumbnailator library final Builder<File> thumbnail = Thumbnails.of(inputFile.getFile()); thumbnail.size(320, 180); thumbnail.outputFormat("jpg"); thumbnail.outputQuality(1.0f); try { thumbnail.toFile(thumbFilename); } catch (IIOException e) { logger.debug("Error generating thumbnail for: " + inputFile.getFile().getName()); logger.debug("The full error was: " + e); } File jpg = new File(thumbFilename); if (jpg.exists()) { InputStream is = new FileInputStream(jpg); int sz = is.available(); if (sz > 0) { setThumb(new byte[sz]); is.read(getThumb()); } is.close(); if (!jpg.delete()) { jpg.deleteOnExit(); } } } } catch (UnsupportedFormatException ufe) { logger.debug("Thumbnailator does not support the format of {}: {}", inputFile.getFile().getAbsolutePath(), ufe.getMessage()); } catch (Exception e) { logger.debug("Thumbnailator could not generate a thumbnail for: {}", inputFile.getFile().getAbsolutePath(), e); } } if (ffmpeg_parsing) { if (!thumbOnly || !configuration.isUseMplayerForVideoThumbs()) { pw = getFFmpegThumbnail(inputFile); } String input = "-"; boolean dvrms = false; if (inputFile.getFile() != null) { input = ProcessUtil.getShortFileNameIfWideChars(inputFile.getFile().getAbsolutePath()); dvrms = inputFile.getFile().getAbsolutePath().toLowerCase().endsWith("dvr-ms"); } if (!ffmpeg_failure && !thumbOnly) { if (input.equals("-")) { input = "pipe:"; } boolean matchs = false; ArrayList<String> lines = (ArrayList<String>) pw.getResults(); int langId = 0; int subId = 0; ListIterator<String> FFmpegMetaData = lines.listIterator(); for (String line : lines) { FFmpegMetaData.next(); line = line.trim(); if (line.startsWith("Output")) { matchs = false; } else if (line.startsWith("Input")) { if (line.indexOf(input) > -1) { matchs = true; setContainer(line.substring(10, line.indexOf(",", 11)).trim()); } else { matchs = false; } } else if (matchs) { if (line.indexOf("Duration") > -1) { StringTokenizer st = new StringTokenizer(line, ","); while (st.hasMoreTokens()) { String token = st.nextToken().trim(); if (token.startsWith("Duration: ")) { String durationStr = token.substring(10); int l = durationStr.substring(durationStr.indexOf(".") + 1).length(); if (l < 4) { durationStr = durationStr + "00".substring(0, 3 - l); } if (durationStr.indexOf("N/A") > -1) { setDuration(null); } else { setDuration(parseDurationString(durationStr)); } } else if (token.startsWith("bitrate: ")) { String bitr = token.substring(9); int spacepos = bitr.indexOf(" "); if (spacepos > -1) { String value = bitr.substring(0, spacepos); String unit = bitr.substring(spacepos + 1); setBitrate(Integer.parseInt(value)); if (unit.equals("kb/s")) { setBitrate(1024 * getBitrate()); } if (unit.equals("mb/s")) { setBitrate(1048576 * getBitrate()); } } } } } else if (line.indexOf("Audio:") > -1) { StringTokenizer st = new StringTokenizer(line, ","); int a = line.indexOf("("); int b = line.indexOf("):", a); DLNAMediaAudio audio = new DLNAMediaAudio(); audio.setId(langId++); if (a > -1 && b > a) { audio.setLang(line.substring(a + 1, b)); } else { audio.setLang(DLNAMediaLang.UND); } // Get TS IDs a = line.indexOf("[0x"); b = line.indexOf("]", a); if (a > -1 && b > a + 3) { String idString = line.substring(a + 3, b); try { audio.setId(Integer.parseInt(idString, 16)); } catch (NumberFormatException nfe) { logger.debug("Error parsing Stream ID: " + idString); } } while (st.hasMoreTokens()) { String token = st.nextToken().trim(); Integer nChannels; if (token.startsWith("Stream")) { audio.setCodecA(token.substring(token.indexOf("Audio: ") + 7)); } else if (token.endsWith("Hz")) { audio.setSampleFrequency(token.substring(0, token.indexOf("Hz")).trim()); } else if ((nChannels = audioChannelLayout.get(token)) != null) { audio.getAudioProperties().setNumberOfChannels(nChannels); } else if (token.matches("\\d+(?:\\s+channels?)")) { // implicitly anchored at both ends e.g. ^ ... $ // setNumberOfChannels(String) parses the number out of the string audio.getAudioProperties().setNumberOfChannels(token); } else if (token.equals("s32")) { audio.setBitsperSample(32); } else if (token.equals("s24")) { audio.setBitsperSample(24); } else if (token.equals("s16")) { audio.setBitsperSample(16); } } int FFmpegMetaDataNr = FFmpegMetaData.nextIndex(); if (FFmpegMetaDataNr > -1) { line = lines.get(FFmpegMetaDataNr); } if (line.indexOf("Metadata:") > -1) { FFmpegMetaDataNr = FFmpegMetaDataNr + 1; line = lines.get(FFmpegMetaDataNr); while (line.indexOf(" ") == 0) { if (line.toLowerCase().indexOf("title :") > -1) { int aa = line.indexOf(": "); int bb = line.length(); if (aa > -1 && bb > aa) { audio.setFlavor(line.substring(aa + 2, bb)); break; } } else { FFmpegMetaDataNr = FFmpegMetaDataNr + 1; line = lines.get(FFmpegMetaDataNr); } } } getAudioTracksList().add(audio); } else if (line.indexOf("Video:") > -1) { StringTokenizer st = new StringTokenizer(line, ","); while (st.hasMoreTokens()) { String token = st.nextToken().trim(); if (token.startsWith("Stream")) { setCodecV(token.substring(token.indexOf("Video: ") + 7)); } else if ((token.indexOf("tbc") > -1 || token.indexOf("tb(c)") > -1)) { // A/V sync issues with newest FFmpeg, due to the new tbr/tbn/tbc outputs // Priority to tb(c) String frameRateDoubleString = token.substring(0, token.indexOf("tb")) .trim(); try { if (!frameRateDoubleString.equals(getFrameRate())) {// tbc taken into account only if different than tbr Double frameRateDouble = Double.parseDouble(frameRateDoubleString); setFrameRate( String.format(Locale.ENGLISH, "%.2f", frameRateDouble / 2)); } } catch (NumberFormatException nfe) { // Could happen if tbc is "1k" or something like that, no big deal logger.debug( "Could not parse frame rate \"" + frameRateDoubleString + "\""); } } else if ((token.indexOf("tbr") > -1 || token.indexOf("tb(r)") > -1) && getFrameRate() == null) { setFrameRate(token.substring(0, token.indexOf("tb")).trim()); } else if ((token.indexOf("fps") > -1 || token.indexOf("fps(r)") > -1) && getFrameRate() == null) { // dvr-ms ? setFrameRate(token.substring(0, token.indexOf("fps")).trim()); } else if (token.indexOf("x") > -1) { String resolution = token.trim(); if (resolution.indexOf(" [") > -1) { resolution = resolution.substring(0, resolution.indexOf(" [")); } try { setWidth(Integer .parseInt(resolution.substring(0, resolution.indexOf("x")))); } catch (NumberFormatException nfe) { logger.debug("Could not parse width from \"" + resolution.substring(0, resolution.indexOf("x")) + "\""); } try { setHeight(Integer .parseInt(resolution.substring(resolution.indexOf("x") + 1))); } catch (NumberFormatException nfe) { logger.debug("Could not parse height from \"" + resolution.substring(resolution.indexOf("x") + 1) + "\""); } } } } else if (line.indexOf("Subtitle:") > -1 && !line.contains("tx3g")) { DLNAMediaSubtitle lang = new DLNAMediaSubtitle(); lang.setType((line.contains("dvdsub") && Platform.isWindows() ? SubtitleType.VOBSUB : SubtitleType.UNKNOWN)); int a = line.indexOf("("); int b = line.indexOf("):", a); if (a > -1 && b > a) { lang.setLang(line.substring(a + 1, b)); } else { lang.setLang(DLNAMediaLang.UND); } lang.setId(subId++); int FFmpegMetaDataNr = FFmpegMetaData.nextIndex(); if (FFmpegMetaDataNr > -1) { line = lines.get(FFmpegMetaDataNr); } if (line.indexOf("Metadata:") > -1) { FFmpegMetaDataNr = FFmpegMetaDataNr + 1; line = lines.get(FFmpegMetaDataNr); while (line.indexOf(" ") == 0) { if (line.toLowerCase().indexOf("title :") > -1) { int aa = line.indexOf(": "); int bb = line.length(); if (aa > -1 && bb > aa) { lang.setFlavor(line.substring(aa + 2, bb)); break; } } else { FFmpegMetaDataNr = FFmpegMetaDataNr + 1; line = lines.get(FFmpegMetaDataNr); } } } getSubtitleTracksList().add(lang); } } } } if (!thumbOnly && getContainer() != null && inputFile.getFile() != null && getContainer().equals("mpegts") && isH264() && getDurationInSeconds() == 0) { // Parse the duration try { int length = MpegUtil.getDurationFromMpeg(inputFile.getFile()); if (length > 0) { setDuration((double) length); } } catch (IOException e) { logger.trace("Error retrieving length: " + e.getMessage()); } } if (configuration.isUseMplayerForVideoThumbs() && type == Format.VIDEO && !dvrms) { try { getMplayerThumbnail(inputFile); String frameName = "" + inputFile.hashCode(); frameName = configuration.getTempFolder() + "/mplayer_thumbs/" + frameName + "00000001/00000001.jpg"; frameName = frameName.replace(',', '_'); File jpg = new File(frameName); if (jpg.exists()) { InputStream is = new FileInputStream(jpg); int sz = is.available(); if (sz > 0) { setThumb(new byte[sz]); is.read(getThumb()); } is.close(); if (!jpg.delete()) { jpg.deleteOnExit(); } // Try and retry if (!jpg.getParentFile().delete() && !jpg.getParentFile().delete()) { logger.debug("Failed to delete \"" + jpg.getParentFile().getAbsolutePath() + "\""); } } } catch (IOException e) { logger.debug("Caught exception", e); } } if (type == Format.VIDEO && pw != null && getThumb() == null) { InputStream is; try { is = pw.getInputStream(0); int sz = is.available(); if (sz > 0) { setThumb(new byte[sz]); is.read(getThumb()); } is.close(); if (sz > 0 && !net.pms.PMS.isHeadless()) { BufferedImage image = ImageIO.read(new ByteArrayInputStream(getThumb())); if (image != null) { Graphics g = image.getGraphics(); g.setColor(Color.WHITE); g.setFont(new Font("Arial", Font.PLAIN, 14)); int low = 0; if (getWidth() > 0) { if (getWidth() == 1920 || getWidth() == 1440) { g.drawString("1080p", 0, low += 18); } else if (getWidth() == 1280) { g.drawString("720p", 0, low += 18); } } ByteArrayOutputStream out = new ByteArrayOutputStream(); ImageIO.write(image, "jpeg", out); setThumb(out.toByteArray()); } } } catch (IOException e) { logger.debug("Error while decoding thumbnail: " + e.getMessage()); } } } finalize(type, inputFile); setMediaparsed(true); } }