List of usage examples for java.lang IllegalArgumentException getClass
@HotSpotIntrinsicCandidate public final native Class<?> getClass();
From source file:org.opencastproject.util.ZipUtilTest.java
/** Check the behavior before bad arguments for the zip signature File[], String */ @Test//ww w . ja v a2s. c o m public void badInputZipFileStr() throws Exception { File destFile = new File(destDir.getCanonicalPath(), "badInputFileStr.zip"); try { // Null input File array, correct destination filename try { ZipUtil.zip((File[]) null, destFile.getCanonicalPath(), true, ZipUtil.DEFAULT_COMPRESSION); logger.error("Zip should fail when input File array is null"); Assert.fail("Zip should fail when input File array is null"); } catch (IllegalArgumentException e) { logger.debug("Detecting null input File array (File, String): OK"); } // Null some of the input files, correct destination filename try { ZipUtil.zip(new File[] { srcFile, null, nestedSrcFile }, destFile.getCanonicalPath(), true, ZipUtil.NO_COMPRESSION); logger.error("Zip should fail when any input file is null"); Assert.fail("Zip should fail when any input file is null"); } catch (IllegalArgumentException e) { logger.debug("Detecting null input filename (File, String): OK"); } // Non-existing some of the input files, correct destination filename try { ZipUtil.zip(new File[] { srcFile, dummieFile, nestedSrcFile }, destFile.getCanonicalPath(), true, ZipUtil.NO_COMPRESSION); logger.error("Zip should fail when any input file does not exist"); Assert.fail("Zip should fail when any input file does not exist"); } catch (FileNotFoundException e) { logger.debug("Detecting non-existing input filename (File, String): OK"); } // Correct input Files, null destination filename try { ZipUtil.zip(new File[] { srcFile, nestedSrcFile }, (String) null, true, ZipUtil.DEFAULT_COMPRESSION); logger.error("Zip should fail when destination filename is null"); Assert.fail("Zip should fail when destination filename is null"); } catch (IllegalArgumentException e) { logger.debug("Detecting null destination filename (File, String): OK"); } // Correct input Files, empty destination filename try { ZipUtil.zip(new File[] { srcFile, nestedSrcFile }, "", true, ZipUtil.DEFAULT_COMPRESSION); logger.error("Zip should fail when destination filename is empty"); Assert.fail("Zip should fail when destination filename is empty"); } catch (IllegalArgumentException e) { logger.debug("Detecting empty destination filename (File, String): OK"); } // Correct input filenames, existing destination filename try { ZipUtil.zip(new File[] { srcFile, nestedSrcFile }, sampleZip.getCanonicalPath(), true, ZipUtil.DEFAULT_COMPRESSION); logger.error("Zip should fail when destination filename already exists"); Assert.fail("Zip should fail when destination filename already exists"); } catch (IllegalArgumentException e) { logger.debug("Detecting existing destination File (File, String): OK"); } // Correct input Files, invalid name for the zip file try { ZipUtil.zip(new File[] { srcFile, nestedSrcFile }, dummieFile.getCanonicalPath(), true, ZipUtil.DEFAULT_COMPRESSION); logger.error("Zip should fail when the destination filename does not represent a zip file"); Assert.fail("Zip should fail when the destination filename does not represent a zip file"); } catch (IllegalArgumentException e) { logger.debug("Detecting destination filename not representing a valid zip file (File, String): OK"); } } catch (Exception e) { logger.error("Another exception was expected, but got {} instead: {}", e.getClass().getName(), e.getMessage()); Assert.fail("Another exception was expected, but got " + e.getClass().getName() + " instead: " + e.getMessage()); } }
From source file:org.opencastproject.util.ZipUtilTest.java
/** Check the behavior with bad arguments for the zip signature String[], String */ @Test//from w w w .j a va2 s. c o m public void badInputZipStrStr() throws Exception { File destFile = new File(destDir, "badInputStrStr.zip"); try { // Null input filenames array, correct destination filename try { ZipUtil.zip((String[]) null, destFile.getCanonicalPath(), true, ZipUtil.NO_COMPRESSION); logger.error("Zip should fail when input String array is null"); Assert.fail("Zip should fail when input String array is null"); } catch (IllegalArgumentException e) { logger.debug("Detecting null input String array (String, String): OK"); } // Null some of the input filenames, correct destination filename try { ZipUtil.zip(new String[] { srcFile.getCanonicalPath(), null, nestedSrcFile.getCanonicalPath() }, destFile.getCanonicalPath(), true, ZipUtil.NO_COMPRESSION); logger.error("Zip should fail when any input filename is null"); Assert.fail("Zip should fail when any input filename is null"); } catch (IllegalArgumentException e) { logger.debug("Detecting null input filename (String, String): OK"); } // Non-existing some of the input filenames, correct destination filename try { ZipUtil.zip( new String[] { srcFile.getCanonicalPath(), dummieFile.getCanonicalPath(), nestedSrcFile.getCanonicalPath() }, destFile.getCanonicalPath(), true, ZipUtil.NO_COMPRESSION); logger.error("Zip should fail when any input filename does not exist"); Assert.fail("Zip should fail when any input filename does not exist"); } catch (FileNotFoundException e) { logger.debug("Detecting non-existing input filename (String, String): OK"); } // Correct input filenames array, null destination filename try { ZipUtil.zip(new String[] { srcFile.getCanonicalPath(), nestedSrcFile.getCanonicalPath() }, (String) null, true, ZipUtil.NO_COMPRESSION); logger.error("Zip should fail when destination filename is null"); Assert.fail("Zip should fail when destination filename is null"); } catch (IllegalArgumentException e) { logger.debug("Detecting null destination filename (String, String): OK"); } // Correct input filenames array, empty destination filename try { ZipUtil.zip(new String[] { srcFile.getCanonicalPath(), nestedSrcFile.getCanonicalPath() }, "", true, ZipUtil.NO_COMPRESSION); logger.error("Zip should fail when destination filename is empty"); Assert.fail("Zip should fail when destination filename is empty"); } catch (IllegalArgumentException e) { logger.debug("Detecting empty destination filename (String, String): OK"); } // Correct input filenames, existing destination filename try { ZipUtil.zip(new String[] { srcFile.getCanonicalPath(), nestedSrcFile.getCanonicalPath() }, sampleZip.getCanonicalPath(), true, ZipUtil.NO_COMPRESSION); logger.error("Zip should fail when destination filename already exists"); Assert.fail("Zip should fail when destination filename already exists"); } catch (IllegalArgumentException e) { logger.debug("Detecting existing destination filename (String, String): OK"); } // Correct input filenames, invalid name for the zip file try { ZipUtil.zip(new String[] { srcFile.getCanonicalPath(), nestedSrcFile.getCanonicalPath() }, dummieFile.getCanonicalPath(), true, ZipUtil.NO_COMPRESSION); logger.error("Zip should fail when the destination filename does not represent a zip file"); Assert.fail("Zip should fail when the destination filename does not represent a zip file"); } catch (IllegalArgumentException e) { logger.debug( "Detecting destination filename not representing a valid zip file (String, String): OK"); } } catch (Exception e) { logger.error("Another exception was expected, but got {} instead: {}", e.getClass().getName(), e.getMessage()); Assert.fail("Another exception was expected, but got " + e.getClass().getName() + " instead: " + e.getMessage()); } }
From source file:com.opengamma.masterdb.batch.DbBatchWriter.java
@SuppressWarnings("unchecked") public synchronized void addJobResultsInTransaction(TransactionStatus transactionStatus, ObjectId runId, ViewComputationResultModel resultModel) { ArgumentChecker.notNull(runId, "runId"); ArgumentChecker.notNull(resultModel, "resultModel"); final long riskRunId = extractOid(runId); ArgumentChecker.notNull(riskRunId, "riskRunId"); Map<ComputeFailureKey, ComputeFailure> computeFailureCache = _computeFailureCacheByRunId.get(riskRunId); Map<Pair<Long, Long>, StatusEntry> statusCache = _statusCacheByRunId.get(riskRunId); Map<ValueSpecification, BatchResultWriterFailure> errorCache = populateErrorCache(computeFailureCache, resultModel.getAllResults()); RiskRun run = _riskRunsByIds.get(riskRunId); if (run.getSnapshotMode().equals(SnapshotMode.WRITE_THROUGH)) { addComputedValuesToMarketDataInTransaction(run.getMarketData().getObjectId(), resultModel.getAllMarketData()); }//from w ww .java 2 s.c o m for (String calcConfigName : resultModel.getCalculationConfigurationNames()) { ViewCalculationResultModel viewCalculationResultModel = resultModel .getCalculationResult(calcConfigName); final Set<ComputationTargetSpecification> successfulTargets = newHashSet(); final Set<ComputationTargetSpecification> failedTargets = newHashSet(); List<SqlParameterSource> targetProperties = newArrayList(); List<SqlParameterSource> successes = newArrayList(); List<SqlParameterSource> failures = newArrayList(); List<SqlParameterSource> failureReasons = newArrayList(); Instant evalInstant = Instant.now(); long calcConfId = _calculationConfigurations.get(calcConfigName); for (final ComputationTargetSpecification targetSpec : viewCalculationResultModel.getAllTargets()) { final long computationTargetId = _computationTargets.get(targetSpec); boolean specFailures = false; for (final ComputedValueResult computedValue : viewCalculationResultModel .getAllValues(targetSpec)) { ResultConverter<Object> resultConverter = null; if (!(computedValue.getValue() instanceof MissingValue)) { try { resultConverter = (ResultConverter<Object>) _resultConverterCache .getConverter(computedValue.getValue()); } catch (IllegalArgumentException e) { s_logger.info("No converter for value of type " + computedValue.getValue().getClass() + " for " + computedValue.getSpecification()); } } final ValueSpecification specification = computedValue.getSpecification(); if (!_riskValueSpecifications.containsKey(specification)) { s_logger.error("Unexpected result specification " + specification + ". Result cannot be written. Result value was " + computedValue.getValue()); continue; } final long valueSpecificationId = _riskValueSpecifications.get(specification); final long functionUniqueId = getFunctionUniqueIdInTransaction( specification.getFunctionUniqueId()).getId(); final long computeNodeId = getOrCreateComputeNode(computedValue.getComputeNodeId()).getId(); if (resultConverter != null && computedValue.getInvocationResult() == InvocationResult.SUCCESS) { s_logger.debug("Writing value {} for value spec {}", computedValue.getValue(), specification); Map<String, Double> valueAsDoublesMap = resultConverter .convert(computedValue.getSpecification().getValueName(), computedValue.getValue()); for (Map.Entry<String, Double> valueEntry : valueAsDoublesMap.entrySet()) { final String valueName = valueEntry.getKey(); final Double doubleValue = ensureDatabasePrecision(valueEntry.getValue()); final long successId = nextId(RSK_SEQUENCE_NAME); successes.add(getSuccessArgs(successId, riskRunId, evalInstant, calcConfId, computationTargetId, valueSpecificationId, functionUniqueId, computeNodeId, valueName, doubleValue)); } } else { s_logger.info("Writing failure for {} with invocation result {}, {} ", newArray(computedValue.getSpecification(), computedValue.getInvocationResult(), computedValue.getAggregatedExecutionLog())); specFailures = true; final long failureId = nextId(RSK_SEQUENCE_NAME); failures.add(getFailureArgs(failureId, riskRunId, evalInstant, calcConfId, computationTargetId, valueSpecificationId, functionUniqueId, computeNodeId, specification.getValueName())); BatchResultWriterFailure cachedFailure = errorCache.get(specification); if (cachedFailure != null) { for (long computeFailureId : cachedFailure.getComputeFailureIds()) { ArgumentChecker.notNull(computeFailureId, "computeFailureId"); final long failureReasonId = nextId(RSK_SEQUENCE_NAME); failureReasons .add(getFailureReasonArgs(failureReasonId, failureId, computeFailureId)); } } } } StatusEntry.Status status = getStatus(statusCache, calcConfigName, targetSpec); if (specFailures || status == StatusEntry.Status.FAILURE) { successfulTargets.remove(targetSpec); failedTargets.add(targetSpec); } else { successfulTargets.add(targetSpec); } // storing target data ComputationTarget computationTarget = _computationTargetResolver.resolve(targetSpec, VersionCorrection.LATEST); Object targetValue = computationTarget.getValue(); if (targetValue instanceof Bean) { Bean bean = (Bean) targetValue; for (String propertyName : bean.propertyNames()) { Property<Object> property = bean.property(propertyName); final long targetPropertyId = nextId(RSK_SEQUENCE_NAME); targetProperties.add(getTargetPropertyArgs(targetPropertyId, computationTargetId, propertyName, property.get() == null ? "NULL" : property.get().toString())); } } } if (successes.isEmpty() && failures.isEmpty() && failureReasons.isEmpty() && successfulTargets.isEmpty() && failedTargets.isEmpty()) { s_logger.debug("Nothing to write to DB for {}", resultModel); return; } Object preSuccessSavepoint = transactionStatus.createSavepoint(); try { getJdbcTemplate().batchUpdate(getElSqlBundle().getSql("InsertRiskSuccess"), successes.toArray(new DbMapSqlParameterSource[successes.size()])); } catch (Exception e) { s_logger.error("Failed to write successful calculations to batch database. Converting to failures.", e); transactionStatus.rollbackToSavepoint(preSuccessSavepoint); if (!successes.isEmpty()) { String exceptionClass = e.getClass().getName(); String exceptionMsg = e.getMessage(); final StringBuilder buffer = new StringBuilder(); for (StackTraceElement element : e.getStackTrace()) { buffer.append(element.toString()).append("\n"); } final String stackTrace = buffer.toString(); for (SqlParameterSource success : successes) { failures.add(convertSuccessToFailure(success)); final long failureId = getId(success); final long functionId = getFunctionId(success); ComputeFailureKey computeFailureKey = new ComputeFailureKey(String.valueOf(functionId), exceptionClass, exceptionMsg, stackTrace); ComputeFailure computeFailure = getComputeFailureFromDb(computeFailureCache, computeFailureKey); final long failureReasonId = nextId(RSK_SEQUENCE_NAME); failureReasons .add(getFailureReasonArgs(failureReasonId, failureId, computeFailure.getId())); } failedTargets.addAll(successfulTargets); successes.clear(); successfulTargets.clear(); targetProperties.clear(); } } Object preTargetPropertiesFailureSavepoint = transactionStatus.createSavepoint(); try { getJdbcTemplate().batchUpdate(getElSqlBundle().getSql("InsertTargetProperties"), targetProperties.toArray(new DbMapSqlParameterSource[targetProperties.size()])); } catch (Exception e) { s_logger.error("Failed to write target properties to batch database", e); transactionStatus.rollbackToSavepoint(preTargetPropertiesFailureSavepoint); } Object preFailureSavepoint = transactionStatus.createSavepoint(); try { getJdbcTemplate().batchUpdate(getElSqlBundle().getSql("InsertRiskFailure"), failures.toArray(new DbMapSqlParameterSource[failures.size()])); getJdbcTemplate().batchUpdate(getElSqlBundle().getSql("InsertRiskFailureReason"), failureReasons.toArray(new DbMapSqlParameterSource[failureReasons.size()])); } catch (Exception e) { s_logger.error("Failed to write failures to batch database", e); transactionStatus.rollbackToSavepoint(preFailureSavepoint); } updateStatusEntries(riskRunId, statusCache, calcConfigName, StatusEntry.Status.SUCCESS, successfulTargets); updateStatusEntries(riskRunId, statusCache, calcConfigName, StatusEntry.Status.FAILURE, failedTargets); } }
From source file:com.evolveum.midpoint.provisioning.ucf.impl.connid.ConnectorInstanceConnIdImpl.java
@Override public int count(ObjectClassComplexTypeDefinition objectClassDefinition, final ObjectQuery query, PagedSearchCapabilityType pagedSearchCapabilityType, StateReporter reporter, OperationResult parentResult) throws CommunicationException, GenericFrameworkException, SchemaException, UnsupportedOperationException { // Result type for this operation final OperationResult result = parentResult.createSubresult(ConnectorInstance.class.getName() + ".count"); result.addArbitraryObjectAsParam("objectClass", objectClassDefinition); result.addContext("connector", connectorType); if (objectClassDefinition == null) { result.recordFatalError("Object class not defined"); throw new IllegalArgumentException("objectClass not defined"); }/*from w w w . j a va 2 s. com*/ ObjectClass icfObjectClass = connIdNameMapper.objectClassToIcf(objectClassDefinition, getSchemaNamespace(), connectorType, legacySchema); if (icfObjectClass == null) { IllegalArgumentException ex = new IllegalArgumentException( "Unable to determine object class from QName " + objectClassDefinition + " while attempting to search objects by " + ObjectTypeUtil.toShortString(connectorType)); result.recordFatalError("Unable to determine object class", ex); throw ex; } final boolean useConnectorPaging = pagedSearchCapabilityType != null; if (!useConnectorPaging) { throw new UnsupportedOperationException( "ConnectorInstanceIcfImpl.count operation is supported only in combination with connector-implemented paging"); } OperationOptionsBuilder optionsBuilder = new OperationOptionsBuilder(); optionsBuilder.setAttributesToGet(Name.NAME); optionsBuilder.setPagedResultsOffset(1); optionsBuilder.setPageSize(1); if (pagedSearchCapabilityType.getDefaultSortField() != null) { String orderByIcfName = connIdNameMapper.convertAttributeNameToIcf( pagedSearchCapabilityType.getDefaultSortField(), objectClassDefinition, "(default sorting field)"); boolean isAscending = pagedSearchCapabilityType .getDefaultSortDirection() != OrderDirectionType.DESCENDING; optionsBuilder.setSortKeys(new SortKey(orderByIcfName, isAscending)); } OperationOptions options = optionsBuilder.build(); // Connector operation cannot create result for itself, so we need to // create result for it OperationResult icfResult = result.createSubresult(ConnectorFacade.class.getName() + ".search"); icfResult.addArbitraryObjectAsParam("objectClass", icfObjectClass); icfResult.addContext("connector", connIdConnectorFacade.getClass()); int retval; try { Filter filter = convertFilterToIcf(query, objectClassDefinition); final Holder<Integer> fetched = new Holder<>(0); ResultsHandler icfHandler = new ResultsHandler() { @Override public boolean handle(ConnectorObject connectorObject) { fetched.setValue(fetched.getValue() + 1); // actually, this should execute at most once return false; } }; InternalMonitor.recordConnectorOperation("search"); recordIcfOperationStart(reporter, ProvisioningOperation.ICF_SEARCH, objectClassDefinition); SearchResult searchResult = connIdConnectorFacade.search(icfObjectClass, filter, icfHandler, options); recordIcfOperationEnd(reporter, ProvisioningOperation.ICF_SEARCH, objectClassDefinition); if (searchResult == null || searchResult.getRemainingPagedResults() == -1) { throw new UnsupportedOperationException( "Connector does not seem to support paged searches or does not provide object count information"); } else { retval = fetched.getValue() + searchResult.getRemainingPagedResults(); } icfResult.recordSuccess(); } catch (IntermediateException inex) { recordIcfOperationEnd(reporter, ProvisioningOperation.ICF_SEARCH, objectClassDefinition, inex); SchemaException ex = (SchemaException) inex.getCause(); icfResult.recordFatalError(ex); result.recordFatalError(ex); throw ex; } catch (UnsupportedOperationException uoe) { recordIcfOperationEnd(reporter, ProvisioningOperation.ICF_SEARCH, objectClassDefinition, uoe); icfResult.recordFatalError(uoe); result.recordFatalError(uoe); throw uoe; } catch (Throwable ex) { recordIcfOperationEnd(reporter, ProvisioningOperation.ICF_SEARCH, objectClassDefinition, ex); Throwable midpointEx = processConnIdException(ex, this, icfResult); result.computeStatus(); // Do some kind of acrobatics to do proper throwing of checked // exception if (midpointEx instanceof CommunicationException) { throw (CommunicationException) midpointEx; } else if (midpointEx instanceof GenericFrameworkException) { throw (GenericFrameworkException) midpointEx; } else if (midpointEx instanceof SchemaException) { throw (SchemaException) midpointEx; } else if (midpointEx instanceof RuntimeException) { throw (RuntimeException) midpointEx; } else if (midpointEx instanceof Error) { throw (Error) midpointEx; } else { throw new SystemException( "Got unexpected exception: " + ex.getClass().getName() + ": " + ex.getMessage(), ex); } } if (result.isUnknown()) { result.recordSuccess(); } return retval; }
From source file:com.evolveum.midpoint.provisioning.ucf.impl.ConnectorInstanceIcfImpl.java
@Override public int count(ObjectClassComplexTypeDefinition objectClassDefinition, final ObjectQuery query, PagedSearchCapabilityType pagedSearchCapabilityType, StateReporter reporter, OperationResult parentResult) throws CommunicationException, GenericFrameworkException, SchemaException, UnsupportedOperationException { // Result type for this operation final OperationResult result = parentResult.createSubresult(ConnectorInstance.class.getName() + ".count"); result.addParam("objectClass", objectClassDefinition); result.addContext("connector", connectorType); if (objectClassDefinition == null) { result.recordFatalError("Object class not defined"); throw new IllegalArgumentException("objectClass not defined"); }/*from w w w.ja v a 2 s.c om*/ ObjectClass icfObjectClass = icfNameMapper.objectClassToIcf(objectClassDefinition, getSchemaNamespace(), connectorType, legacySchema); if (icfObjectClass == null) { IllegalArgumentException ex = new IllegalArgumentException( "Unable to determine object class from QName " + objectClassDefinition + " while attempting to search objects by " + ObjectTypeUtil.toShortString(connectorType)); result.recordFatalError("Unable to determine object class", ex); throw ex; } final boolean useConnectorPaging = pagedSearchCapabilityType != null; if (!useConnectorPaging) { throw new UnsupportedOperationException( "ConnectorInstanceIcfImpl.count operation is supported only in combination with connector-implemented paging"); } OperationOptionsBuilder optionsBuilder = new OperationOptionsBuilder(); optionsBuilder.setAttributesToGet(Name.NAME); optionsBuilder.setPagedResultsOffset(1); optionsBuilder.setPageSize(1); if (pagedSearchCapabilityType.getDefaultSortField() != null) { String orderByIcfName = icfNameMapper.convertAttributeNameToIcf( pagedSearchCapabilityType.getDefaultSortField(), objectClassDefinition); boolean isAscending = pagedSearchCapabilityType .getDefaultSortDirection() != OrderDirectionType.DESCENDING; optionsBuilder.setSortKeys(new SortKey(orderByIcfName, isAscending)); } OperationOptions options = optionsBuilder.build(); // Connector operation cannot create result for itself, so we need to // create result for it OperationResult icfResult = result.createSubresult(ConnectorFacade.class.getName() + ".search"); icfResult.addArbitraryObjectAsParam("objectClass", icfObjectClass); icfResult.addContext("connector", icfConnectorFacade.getClass()); int retval; try { Filter filter = convertFilterToIcf(query, objectClassDefinition); final Holder<Integer> fetched = new Holder<>(0); ResultsHandler icfHandler = new ResultsHandler() { @Override public boolean handle(ConnectorObject connectorObject) { fetched.setValue(fetched.getValue() + 1); // actually, this should execute at most once return false; } }; InternalMonitor.recordConnectorOperation("search"); recordIcfOperationStart(reporter, ProvisioningOperation.ICF_SEARCH, objectClassDefinition); SearchResult searchResult = icfConnectorFacade.search(icfObjectClass, filter, icfHandler, options); recordIcfOperationEnd(reporter, ProvisioningOperation.ICF_SEARCH, objectClassDefinition); if (searchResult == null || searchResult.getRemainingPagedResults() == -1) { throw new UnsupportedOperationException( "Connector does not seem to support paged searches or does not provide object count information"); } else { retval = fetched.getValue() + searchResult.getRemainingPagedResults(); } icfResult.recordSuccess(); } catch (IntermediateException inex) { recordIcfOperationEnd(reporter, ProvisioningOperation.ICF_SEARCH, objectClassDefinition, inex); SchemaException ex = (SchemaException) inex.getCause(); icfResult.recordFatalError(ex); result.recordFatalError(ex); throw ex; } catch (UnsupportedOperationException uoe) { recordIcfOperationEnd(reporter, ProvisioningOperation.ICF_SEARCH, objectClassDefinition, uoe); icfResult.recordFatalError(uoe); result.recordFatalError(uoe); throw uoe; } catch (Throwable ex) { recordIcfOperationEnd(reporter, ProvisioningOperation.ICF_SEARCH, objectClassDefinition, ex); Throwable midpointEx = processIcfException(ex, this, icfResult); result.computeStatus(); // Do some kind of acrobatics to do proper throwing of checked // exception if (midpointEx instanceof CommunicationException) { throw (CommunicationException) midpointEx; } else if (midpointEx instanceof GenericFrameworkException) { throw (GenericFrameworkException) midpointEx; } else if (midpointEx instanceof SchemaException) { throw (SchemaException) midpointEx; } else if (midpointEx instanceof RuntimeException) { throw (RuntimeException) midpointEx; } else if (midpointEx instanceof Error) { throw (Error) midpointEx; } else { throw new SystemException( "Got unexpected exception: " + ex.getClass().getName() + ": " + ex.getMessage(), ex); } } if (result.isUnknown()) { result.recordSuccess(); } return retval; }
From source file:org.kawanfw.sql.servlet.ServerSqlDispatch.java
/** * Execute the client sent sql request//from w w w. j a v a2 s . com * * @param request * the http request * @param response * the http response * @param servletContextTempDir * The temp dir used by Servlets * @param commonsConfigurator * the client commons http configurator specific class * @param fileConfigurator * the client file http configurator specific class * @param sqlConfigurator * the client sql http configurator specific class * @param connection * the Sql Jdbc Connection * @throws IOException * if any IOException occurs */ public void executeRequest(HttpServletRequest request, HttpServletResponse response, File servletContextTempDir, CommonsConfigurator commonsConfigurator, FileConfigurator fileConfigurator, SqlConfigurator sqlConfigurator) throws IOException { // Immediate catch if we are asking a file upload, because parameters // are // in unknown sequence. We know it's a upload action if it's mime // multipart if (ServletFileUpload.isMultipartContent(request)) { ServerFileUploadAction serverFileUploadAction = new ServerFileUploadAction(); serverFileUploadAction.executeAction(request, response, servletContextTempDir, commonsConfigurator, fileConfigurator); return; } OutputStream out = null; try { debug("executeRequest Start"); // Prepare the response response.setContentType("text/html"); // Get the send string debug("ACTION retrieval"); String action = null; // We must trap the IllegalArgumentException to rethrow properly to // client // This happens if there is an encryption problem try { action = request.getParameter(Parameter.ACTION); } catch (IllegalArgumentException e) { debug("IllegalArgumentException : " + e.toString()); out = response.getOutputStream(); throw e; } String username = request.getParameter(Parameter.USERNAME); debug("Before if (action.equals(Action.LOGIN_ACTION"); if (action.equals(Action.LOGIN_ACTION) || action.equals(Action.BEFORE_LOGIN_ACTION)) { ServerLoginActionSql serverLoginActionSql = new ServerLoginActionSql(); serverLoginActionSql.executeAction(request, response, commonsConfigurator, sqlConfigurator, action); return; } debug("ACTION : " + action); // Redirect to Awake FILE if it's a File request (Blobs/Clobs) if (isActionForAwakeFile(action)) { ServerFileDispatch serverFileDispatch = new ServerFileDispatch(); serverFileDispatch.executeRequest(request, response, servletContextTempDir, commonsConfigurator, fileConfigurator); return; } debug("After isActionForAwakeFile"); out = response.getOutputStream(); if (action == null || action.equals("")) { // out.println(HttpTransferOne.SEND_FAILED + SPACE + // ERR_ACTION_NOT_SET); // out.println(TransferStatus.SEND_FAILED); // out.println(ERR_ACTION_NOT_SET); ServerSqlManager.writeLine(out, TransferStatus.SEND_FAILED); ServerSqlManager.writeLine(out, ERR_ACTION_NOT_SET); return; } debug("Before if (! ServerFileDispatch.isTokenValid(username, token, commonsConfigurator"); String token = request.getParameter(Parameter.TOKEN); if (!ServerFileDispatch.isTokenValid(username, token, commonsConfigurator)) { //out.println(TransferStatus.SEND_OK); //out.println(ReturnCode.INVALID_LOGIN_OR_PASSWORD); ServerSqlManager.writeLine(out, TransferStatus.SEND_OK); ServerSqlManager.writeLine(out, ReturnCode.INVALID_LOGIN_OR_PASSWORD); return; } String connectionId = request.getParameter(ConnectionParms.CONNECTION_ID); // If we are in normal mode (not server stateless mode), transaction // id is > 0 // So Extract a Connection from the pool and store it memory if (action.equals(SqlActionTransaction.ACTION_SQL_INIT_REMOTE_CONNECTION) && !connectionId.equals("0")) { // Create the Connection & store it ConnectionStore connectionStore = new ConnectionStore(username, connectionId); Connection connection = commonsConfigurator.getConnection(); connectionStore.put(connection); debug("ACTION_SQL_INIT_REMOTE_CONNECTION"); debug("username :" + username + ":"); debug("connectionId :" + connectionId + ":"); debug("connection :" + connection + ":"); //out.println(TransferStatus.SEND_OK); ServerSqlManager.writeLine(out, TransferStatus.SEND_OK); return; } // If we are not in stateless mode, clean old connections with a // Thread if (!connectionId.equals("0") && !ConnectionStoreCleaner.IS_RUNNING) { ConnectionStoreCleaner cleaner = new ConnectionStoreCleaner(sqlConfigurator); cleaner.start(); } // Notify to Kawan in async mode using a secured Thread that // the user has successfully logged (done once in JVM session per // username). // No notification is done if user.home/.kawansoft/no_notify.txt exists // or web server name is localhost or 127.0.0.1 if (!KawanNotifier.existsNoNotifyTxt() && !KawanNotifier.usernameAlreadyLogged(username) && !KawanNotifier.serverNameIsLocalhost()) { KawanNotifier kawanNotifier = new KawanNotifier(username, "AceQL_" + VersionValues.VERSION); kawanNotifier.start(); } if (action.equals(SqlAction.ACTION_SQL_STATEMENT)) { executeStatement(request, commonsConfigurator, fileConfigurator, sqlConfigurator, out); } else if (action.equals(SqlAction.ACTION_SQL_STATEMENT_BATCH)) { executeStatementBatch(request, commonsConfigurator, fileConfigurator, sqlConfigurator, out); } else if (action.equals(SqlAction.ACTION_SQL_PREP_STATEMENT_BATCH)) { executePrepStatementBatch(request, commonsConfigurator, fileConfigurator, sqlConfigurator, out); } else if (action.equals(SqlAction.ACTION_SQL_GET_METADATA)) { executeGetMetadata(request, commonsConfigurator, sqlConfigurator, out); } else if (action.equals(SqlAction.ACTION_SQL_EXECUTE_RAW)) { executeRaw(request, commonsConfigurator, fileConfigurator, sqlConfigurator, out); } else if (action.equals(SqlActionTransaction.ACTION_SQL_GET_TRANSACTION_ISOLATION)) { getTransactionIsolation(request, commonsConfigurator, sqlConfigurator, out); } else if (action.equals(SqlActionCallable.ACTION_SQL_CALLABLE_EXECUTE_RAW)) { callableExecute(request, commonsConfigurator, fileConfigurator, sqlConfigurator, out); } else if (action.equals(SqlActionCallable.ACTION_SQL_CALLABLE_EXECUTE_QUERY)) { callableExecuteQuery(request, commonsConfigurator, fileConfigurator, sqlConfigurator, out); } else if (action.equals(SqlActionTransaction.ACTION_SQL_COMMIT) || action.equals(SqlActionTransaction.ACTION_SQL_ROLLBACK) || action.equals(SqlActionTransaction.ACTION_SQL_CON_CLOSE) ) { setCommitRollbackCloseExecute(request, commonsConfigurator, sqlConfigurator, out, action); } else if (action.equals(SqlActionTransaction.ACTION_SQL_SET_AUTOCOMMIT) || action.equals(SqlActionTransaction.ACTION_SQL_SET_READ_ONLY) || action.equals(SqlActionTransaction.ACTION_SQL_SET_HOLDABILITY) || action.equals(SqlActionTransaction.ACTION_SQL_SET_TRANSACTION_ISOLATION) ) { setAutocommitReadOnlyHoldabilityTransactionInsolationExecute(request, commonsConfigurator, out, action); } else if (action.equals(SqlActionTransaction.ACTION_SQL_IS_READ_ONLY) || action.equals(SqlActionTransaction.ACTION_SQL_GET_HOLDABILITY) || action.equals(SqlActionTransaction.ACTION_SQL_GET_AUTOCOMMIT)) { getAutocommitReadOnlyHoldabilityExecute(request, commonsConfigurator, out, action); } else if (action.equals(SqlActionTransaction.ACTION_SQL_SET_SAVEPOINT) || action.equals(SqlActionTransaction.ACTION_SQL_SET_SAVEPOINT_NAME) || action.equals(SqlActionTransaction.ACTION_SQL_SET_ROLLBACK_SAVEPOINT) || action.equals(SqlActionTransaction.ACTION_SQL_SET_RELEASE_SAVEPOINT)) { setSavepointExecute(request, commonsConfigurator, out, action); } else if (action.equals(SqlAction.ACTION_SQL_IS_VALID) || action.equals(SqlAction.ACTION_SQL_SET_CLIENT_INFO_NAME) || action.equals(SqlAction.ACTION_SQL_SET_CLIENT_INFO_PROP) || action.equals(SqlAction.ACTION_SQL_GET_CLIENT_INFO_NAME) || action.equals(SqlAction.ACTION_SQL_GET_CLIENT_INFO) || action.equals(SqlAction.ACTION_SQL_CREATE_ARRAY_OF)) { connectionInfoExecute(request, commonsConfigurator, out, action); } else { throw new IllegalArgumentException("Invalid Sql Action: " + action); } } catch (Exception e) { // out.println(TransferStatus.SEND_FAILED); // out.println(e.getClass().getName()); // out.println(ServerUserThrowable.getMessage(e)); // out.println(ExceptionUtils.getStackTrace(e)); ServerSqlManager.writeLine(out, TransferStatus.SEND_FAILED); ServerSqlManager.writeLine(out, e.getClass().getName()); ServerSqlManager.writeLine(out, ServerUserThrowable.getMessage(e)); ServerSqlManager.writeLine(out, ExceptionUtils.getStackTrace(e)); try { ServerLogger.getLogger().log(Level.WARNING, Tag.PRODUCT_EXCEPTION_RAISED + " " + ServerUserThrowable.getMessage(e)); ServerLogger.getLogger().log(Level.WARNING, Tag.PRODUCT_EXCEPTION_RAISED + " " + ExceptionUtils.getStackTrace(e)); } catch (Exception e1) { e1.printStackTrace(); e1.printStackTrace(System.out); } } }
From source file:com.evolveum.midpoint.provisioning.ucf.impl.ConnectorInstanceIcfImpl.java
@Override public <T extends ShadowType> SearchResultMetadata search( final ObjectClassComplexTypeDefinition objectClassDefinition, final ObjectQuery query, final ResultHandler<T> handler, AttributesToReturn attributesToReturn, PagedSearchCapabilityType pagedSearchCapabilityType, SearchHierarchyConstraints searchHierarchyConstraints, final StateReporter reporter, OperationResult parentResult) throws CommunicationException, GenericFrameworkException, SchemaException, SecurityViolationException, ObjectNotFoundException { // Result type for this operation final OperationResult result = parentResult.createSubresult(ConnectorInstance.class.getName() + ".search"); result.addParam("objectClass", objectClassDefinition); result.addContext("connector", connectorType); if (objectClassDefinition == null) { result.recordFatalError("Object class not defined"); throw new IllegalArgumentException("objectClass not defined"); }/*from w w w. j a v a2 s . c om*/ ObjectClass icfObjectClass = icfNameMapper.objectClassToIcf(objectClassDefinition, getSchemaNamespace(), connectorType, legacySchema); if (icfObjectClass == null) { IllegalArgumentException ex = new IllegalArgumentException( "Unable to determine object class from QName " + objectClassDefinition + " while attempting to search objects by " + ObjectTypeUtil.toShortString(connectorType)); result.recordFatalError("Unable to determine object class", ex); throw ex; } final PrismObjectDefinition<T> objectDefinition = toShadowDefinition(objectClassDefinition); if (pagedSearchCapabilityType == null) { pagedSearchCapabilityType = getCapability(PagedSearchCapabilityType.class); } final boolean useConnectorPaging = pagedSearchCapabilityType != null; if (!useConnectorPaging && query != null && query.getPaging() != null && (query.getPaging().getOffset() != null || query.getPaging().getMaxSize() != null)) { InternalMonitor.recordConnectorSimulatedPagingSearchCount(); } final Holder<Integer> countHolder = new Holder<>(0); ResultsHandler icfHandler = new ResultsHandler() { @Override public boolean handle(ConnectorObject connectorObject) { // Convert ICF-specific connector object to a generic // ResourceObject recordIcfOperationSuspend(reporter, ProvisioningOperation.ICF_SEARCH, objectClassDefinition); int count = countHolder.getValue(); countHolder.setValue(count + 1); if (!useConnectorPaging) { if (query != null && query.getPaging() != null && query.getPaging().getOffset() != null && query.getPaging().getMaxSize() != null) { if (count < query.getPaging().getOffset()) { recordResume(); return true; } if (count == (query.getPaging().getOffset() + query.getPaging().getMaxSize())) { recordResume(); return false; } } } PrismObject<T> resourceObject; try { resourceObject = icfConvertor.convertToResourceObject(connectorObject, objectDefinition, false, caseIgnoreAttributeNames); } catch (SchemaException e) { recordResume(); throw new IntermediateException(e); } // .. and pass it to the handler boolean cont = handler.handle(resourceObject); if (!cont) { result.recordPartialError("Stopped on request from the handler"); } recordResume(); return cont; } private void recordResume() { recordIcfOperationResume(reporter, ProvisioningOperation.ICF_SEARCH, objectClassDefinition); } }; OperationOptionsBuilder optionsBuilder = new OperationOptionsBuilder(); convertToIcfAttrsToGet(objectClassDefinition, attributesToReturn, optionsBuilder); if (query != null && query.isAllowPartialResults()) { optionsBuilder.setAllowPartialResults(query.isAllowPartialResults()); } // preparing paging-related options if (useConnectorPaging && query != null && query.getPaging() != null) { ObjectPaging paging = query.getPaging(); if (paging.getOffset() != null) { optionsBuilder.setPagedResultsOffset(paging.getOffset() + 1); // ConnId API says the numbering starts at 1 } if (paging.getMaxSize() != null) { optionsBuilder.setPageSize(paging.getMaxSize()); } QName orderByAttributeName; boolean isAscending; ItemPath orderByPath = paging.getOrderBy(); if (orderByPath != null && !orderByPath.isEmpty()) { orderByAttributeName = ShadowUtil.getAttributeName(orderByPath, "OrderBy path"); if (SchemaConstants.C_NAME.equals(orderByAttributeName)) { orderByAttributeName = SchemaConstants.ICFS_NAME; } isAscending = paging.getDirection() != OrderDirection.DESCENDING; } else { orderByAttributeName = pagedSearchCapabilityType.getDefaultSortField(); isAscending = pagedSearchCapabilityType.getDefaultSortDirection() != OrderDirectionType.DESCENDING; } if (orderByAttributeName != null) { String orderByIcfName = icfNameMapper.convertAttributeNameToIcf(orderByAttributeName, objectClassDefinition); optionsBuilder.setSortKeys(new SortKey(orderByIcfName, isAscending)); } } if (searchHierarchyConstraints != null) { ResourceObjectIdentification baseContextIdentification = searchHierarchyConstraints.getBaseContext(); // Only LDAP connector really supports base context. And this one will work better with // DN. And DN is usually stored in icfs:name. This is ugly, but practical. It works around ConnId problems. ResourceAttribute<?> secondaryIdentifier = ShadowUtil.getSecondaryIdentifier(objectClassDefinition, baseContextIdentification.getIdentifiers()); String secondaryIdentifierValue = secondaryIdentifier.getRealValue(String.class); ObjectClass baseContextIcfObjectClass = icfNameMapper.objectClassToIcf( baseContextIdentification.getObjectClassDefinition(), getSchemaNamespace(), connectorType, legacySchema); QualifiedUid containerQualifiedUid = new QualifiedUid(baseContextIcfObjectClass, new Uid(secondaryIdentifierValue)); optionsBuilder.setContainer(containerQualifiedUid); } OperationOptions options = optionsBuilder.build(); Filter filter; try { filter = convertFilterToIcf(query, objectClassDefinition); } catch (SchemaException | RuntimeException e) { result.recordFatalError(e); throw e; } // Connector operation cannot create result for itself, so we need to // create result for it OperationResult icfResult = result.createSubresult(ConnectorFacade.class.getName() + ".search"); icfResult.addArbitraryObjectAsParam("objectClass", icfObjectClass); icfResult.addContext("connector", icfConnectorFacade.getClass()); SearchResult icfSearchResult; try { InternalMonitor.recordConnectorOperation("search"); recordIcfOperationStart(reporter, ProvisioningOperation.ICF_SEARCH, objectClassDefinition); icfSearchResult = icfConnectorFacade.search(icfObjectClass, filter, icfHandler, options); recordIcfOperationEnd(reporter, ProvisioningOperation.ICF_SEARCH, objectClassDefinition); icfResult.recordSuccess(); } catch (IntermediateException inex) { recordIcfOperationEnd(reporter, ProvisioningOperation.ICF_SEARCH, objectClassDefinition, inex); SchemaException ex = (SchemaException) inex.getCause(); icfResult.recordFatalError(ex); result.recordFatalError(ex); throw ex; } catch (Throwable ex) { recordIcfOperationEnd(reporter, ProvisioningOperation.ICF_SEARCH, objectClassDefinition, ex); Throwable midpointEx = processIcfException(ex, this, icfResult); result.computeStatus(); // Do some kind of acrobatics to do proper throwing of checked // exception if (midpointEx instanceof CommunicationException) { throw (CommunicationException) midpointEx; } else if (midpointEx instanceof ObjectNotFoundException) { throw (ObjectNotFoundException) midpointEx; } else if (midpointEx instanceof GenericFrameworkException) { throw (GenericFrameworkException) midpointEx; } else if (midpointEx instanceof SchemaException) { throw (SchemaException) midpointEx; } else if (midpointEx instanceof SecurityViolationException) { throw (SecurityViolationException) midpointEx; } else if (midpointEx instanceof RuntimeException) { throw (RuntimeException) midpointEx; } else if (midpointEx instanceof Error) { throw (Error) midpointEx; } else { throw new SystemException( "Got unexpected exception: " + ex.getClass().getName() + ": " + ex.getMessage(), ex); } } SearchResultMetadata metadata = null; if (icfSearchResult != null) { metadata = new SearchResultMetadata(); metadata.setPagingCookie(icfSearchResult.getPagedResultsCookie()); if (icfSearchResult.getRemainingPagedResults() >= 0) { metadata.setApproxNumberOfAllResults(icfSearchResult.getRemainingPagedResults()); } if (!icfSearchResult.isAllResultsReturned()) { metadata.setPartialResults(true); } } if (result.isUnknown()) { result.recordSuccess(); } return metadata; }
From source file:com.evolveum.midpoint.provisioning.ucf.impl.connid.ConnectorInstanceConnIdImpl.java
@Override public SearchResultMetadata search(final ObjectClassComplexTypeDefinition objectClassDefinition, final ObjectQuery query, final ShadowResultHandler handler, AttributesToReturn attributesToReturn, PagedSearchCapabilityType pagedSearchCapabilityType, SearchHierarchyConstraints searchHierarchyConstraints, final StateReporter reporter, OperationResult parentResult) throws CommunicationException, GenericFrameworkException, SecurityViolationException, SchemaException, ObjectNotFoundException { // Result type for this operation final OperationResult result = parentResult.createSubresult(ConnectorInstance.class.getName() + ".search"); result.addArbitraryObjectAsParam("objectClass", objectClassDefinition); result.addContext("connector", connectorType); if (objectClassDefinition == null) { result.recordFatalError("Object class not defined"); throw new IllegalArgumentException("objectClass not defined"); }// ww w . j a va2 s . c o m ObjectClass icfObjectClass = connIdNameMapper.objectClassToIcf(objectClassDefinition, getSchemaNamespace(), connectorType, legacySchema); if (icfObjectClass == null) { IllegalArgumentException ex = new IllegalArgumentException( "Unable to determine object class from QName " + objectClassDefinition + " while attempting to search objects by " + ObjectTypeUtil.toShortString(connectorType)); result.recordFatalError("Unable to determine object class", ex); throw ex; } final PrismObjectDefinition<ShadowType> objectDefinition = toShadowDefinition(objectClassDefinition); if (pagedSearchCapabilityType == null) { pagedSearchCapabilityType = getCapability(PagedSearchCapabilityType.class); } final boolean useConnectorPaging = pagedSearchCapabilityType != null; if (!useConnectorPaging && query != null && query.getPaging() != null && (query.getPaging().getOffset() != null || query.getPaging().getMaxSize() != null)) { InternalMonitor.recordCount(InternalCounters.CONNECTOR_SIMULATED_PAGING_SEARCH_COUNT); } final Holder<Integer> countHolder = new Holder<>(0); ResultsHandler icfHandler = new ResultsHandler() { @Override public boolean handle(ConnectorObject connectorObject) { // Convert ConnId-specific connector object to a generic // ResourceObject recordIcfOperationSuspend(reporter, ProvisioningOperation.ICF_SEARCH, objectClassDefinition); int count = countHolder.getValue(); countHolder.setValue(count + 1); if (!useConnectorPaging) { // TODO allow offset or maxSize be null if (query != null && query.getPaging() != null && query.getPaging().getOffset() != null && query.getPaging().getMaxSize() != null) { if (count < query.getPaging().getOffset()) { recordResume(); return true; } if (count == (query.getPaging().getOffset() + query.getPaging().getMaxSize())) { recordResume(); return false; } } } PrismObject<ShadowType> resourceObject; Validate.notNull(connectorObject, "null connector object"); try { resourceObject = connIdConvertor.convertToResourceObject(connectorObject, objectDefinition, false, caseIgnoreAttributeNames, legacySchema); } catch (SchemaException e) { recordResume(); throw new IntermediateException(e); } Validate.notNull(resourceObject, "null resource object"); // .. and pass it to the handler boolean cont = handler.handle(resourceObject); if (!cont) { result.recordWarning("Stopped on request from the handler"); } recordResume(); return cont; } private void recordResume() { recordIcfOperationResume(reporter, ProvisioningOperation.ICF_SEARCH, objectClassDefinition); } }; OperationOptionsBuilder optionsBuilder = new OperationOptionsBuilder(); try { convertToIcfAttrsToGet(objectClassDefinition, attributesToReturn, optionsBuilder); if (query != null && query.isAllowPartialResults()) { optionsBuilder.setAllowPartialResults(query.isAllowPartialResults()); } // preparing paging-related options if (useConnectorPaging && query != null && query.getPaging() != null) { ObjectPaging paging = query.getPaging(); if (paging.getOffset() != null) { optionsBuilder.setPagedResultsOffset(paging.getOffset() + 1); // ConnId API says the numbering starts at 1 } if (paging.getMaxSize() != null) { optionsBuilder.setPageSize(paging.getMaxSize()); } QName orderByAttributeName; boolean isAscending; ItemPath orderByPath = paging.getOrderBy(); String desc; if (orderByPath != null && !orderByPath.isEmpty()) { orderByAttributeName = ShadowUtil.getAttributeName(orderByPath, "OrderBy path"); if (SchemaConstants.C_NAME.equals(orderByAttributeName)) { orderByAttributeName = SchemaConstants.ICFS_NAME; } isAscending = paging.getDirection() != OrderDirection.DESCENDING; desc = "(explicitly specified orderBy attribute)"; } else { orderByAttributeName = pagedSearchCapabilityType.getDefaultSortField(); isAscending = pagedSearchCapabilityType .getDefaultSortDirection() != OrderDirectionType.DESCENDING; desc = "(default orderBy attribute from capability definition)"; } if (orderByAttributeName != null) { String orderByIcfName = connIdNameMapper.convertAttributeNameToIcf(orderByAttributeName, objectClassDefinition, desc); optionsBuilder.setSortKeys(new SortKey(orderByIcfName, isAscending)); } } if (searchHierarchyConstraints != null) { ResourceObjectIdentification baseContextIdentification = searchHierarchyConstraints .getBaseContext(); // Only LDAP connector really supports base context. And this one will work better with // DN. And DN is secondary identifier (__NAME__). This is ugly, but practical. It works around ConnId problems. ResourceAttribute<?> secondaryIdentifier = baseContextIdentification.getSecondaryIdentifier(); if (secondaryIdentifier == null) { SchemaException e = new SchemaException( "No secondary identifier in base context identification " + baseContextIdentification); result.recordFatalError(e); throw e; } String secondaryIdentifierValue = secondaryIdentifier.getRealValue(String.class); ObjectClass baseContextIcfObjectClass = connIdNameMapper.objectClassToIcf( baseContextIdentification.getObjectClassDefinition(), getSchemaNamespace(), connectorType, legacySchema); QualifiedUid containerQualifiedUid = new QualifiedUid(baseContextIcfObjectClass, new Uid(secondaryIdentifierValue)); optionsBuilder.setContainer(containerQualifiedUid); } } catch (SchemaException e) { result.recordFatalError(e); throw e; } // Relax completeness requirements. This is a search, not get. So it is OK to // return incomplete member lists and similar attributes. optionsBuilder.setAllowPartialAttributeValues(true); OperationOptions options = optionsBuilder.build(); Filter filter; try { filter = convertFilterToIcf(query, objectClassDefinition); } catch (SchemaException | RuntimeException e) { result.recordFatalError(e); throw e; } // Connector operation cannot create result for itself, so we need to // create result for it OperationResult icfResult = result.createSubresult(ConnectorFacade.class.getName() + ".search"); icfResult.addArbitraryObjectAsParam("objectClass", icfObjectClass); icfResult.addContext("connector", connIdConnectorFacade.getClass()); SearchResult icfSearchResult; try { InternalMonitor.recordConnectorOperation("search"); recordIcfOperationStart(reporter, ProvisioningOperation.ICF_SEARCH, objectClassDefinition); icfSearchResult = connIdConnectorFacade.search(icfObjectClass, filter, icfHandler, options); recordIcfOperationEnd(reporter, ProvisioningOperation.ICF_SEARCH, objectClassDefinition); icfResult.recordSuccess(); } catch (IntermediateException inex) { recordIcfOperationEnd(reporter, ProvisioningOperation.ICF_SEARCH, objectClassDefinition, inex); SchemaException ex = (SchemaException) inex.getCause(); icfResult.recordFatalError(ex); result.recordFatalError(ex); throw ex; } catch (Throwable ex) { recordIcfOperationEnd(reporter, ProvisioningOperation.ICF_SEARCH, objectClassDefinition, ex); Throwable midpointEx = processConnIdException(ex, this, icfResult); result.computeStatus(); // Do some kind of acrobatics to do proper throwing of checked // exception if (midpointEx instanceof CommunicationException) { throw (CommunicationException) midpointEx; } else if (midpointEx instanceof ObjectNotFoundException) { throw (ObjectNotFoundException) midpointEx; } else if (midpointEx instanceof GenericFrameworkException) { throw (GenericFrameworkException) midpointEx; } else if (midpointEx instanceof SchemaException) { throw (SchemaException) midpointEx; } else if (midpointEx instanceof SecurityViolationException) { throw (SecurityViolationException) midpointEx; } else if (midpointEx instanceof RuntimeException) { throw (RuntimeException) midpointEx; } else if (midpointEx instanceof Error) { throw (Error) midpointEx; } else { throw new SystemException( "Got unexpected exception: " + ex.getClass().getName() + ": " + ex.getMessage(), ex); } } SearchResultMetadata metadata = null; if (icfSearchResult != null) { metadata = new SearchResultMetadata(); metadata.setPagingCookie(icfSearchResult.getPagedResultsCookie()); if (icfSearchResult.getRemainingPagedResults() >= 0) { metadata.setApproxNumberOfAllResults(icfSearchResult.getRemainingPagedResults()); } if (!icfSearchResult.isAllResultsReturned()) { metadata.setPartialResults(true); } } if (result.isUnknown()) { result.recordSuccess(); } return metadata; }