List of usage examples for java.util Collection clear
void clear();
From source file:com.nextep.designer.synch.services.impl.DataCaptureService.java
private IVersionable<IDataSet> fetchDataSet(Connection conn, DBVendor vendor, IBasicTable t, List<IBasicColumn> columns, IProgressMonitor m) throws SQLException { final String taskName = "Capturing " + t.getName() + " data"; SubMonitor monitor = SubMonitor.convert(m, taskName, 100); monitor.subTask(taskName);//from w ww . ja v a 2 s .c o m final IVersionable<IDataSet> v = VersionableFactory.createVersionable(IDataSet.class); final IDataSet dataSet = v.getVersionnedObject().getModel(); final Collection<IDataLine> datalineBuffer = new ArrayList<IDataLine>(BUFFER_SIZE); // Configuring dataset dataSet.setTable(t); // Aligning captured data set with repository dataset name if (!t.getDataSets().isEmpty()) { // Taking first one final IDataSet set = t.getDataSets().iterator().next(); // Captured data set will be named just like the repository dataset to force name synch dataSet.setName(set.getName()); // Captured columns are restricted to defined data set columns only columns = set.getColumns(); } else { dataSet.setName(t.getName()); } for (IBasicColumn c : columns) { dataSet.addColumn(c); } // Fetching data Statement stmt = null; ResultSet rset = null; long counter = 0; try { stmt = conn.createStatement(); final String dataSelect = buildDataSelect(vendor, t, columns); monitor.subTask(taskName + " - querying data"); rset = stmt.executeQuery(dataSelect); final ResultSetMetaData md = rset.getMetaData(); int bufferCount = 0; while (rset.next()) { // Handling cancellation if (monitor.isCanceled()) { return v; } else { if (counter++ % 100 == 0) { monitor.worked(100); } } // Preparing dataline final IDataLine line = typedObjectFactory.create(IDataLine.class); // Iterating over result set columns for (int i = 1; i <= md.getColumnCount(); i++) { // Fetching result set column value Object value = null; try { value = rset.getObject(i); } catch (SQLException e) { LOGGER.error("Data import problem on " + t.getName() + " column " + i + " of line " + counter + " failed to fetch data, NULL will be used instead [" + e.getMessage() + "]", e); //$NON-NLS-1$ } // Preparing column value final IColumnValue colValue = typedObjectFactory.create(IColumnValue.class); colValue.setDataLine(line); colValue.setColumn(columns.get(i - 1)); colValue.setValue(value); line.addColumnValue(colValue); } datalineBuffer.add(line); if (++bufferCount >= BUFFER_SIZE) { dataService.addDataline(dataSet, datalineBuffer.toArray(new IDataLine[datalineBuffer.size()])); datalineBuffer.clear(); bufferCount = 0; monitor.subTask(taskName + " - " + counter + " lines fetched"); //$NON-NLS-1$ } } // Flushing end of buffer if (!datalineBuffer.isEmpty()) { dataService.addDataline(dataSet, datalineBuffer.toArray(new IDataLine[datalineBuffer.size()])); } LOGGER.info("Captured " + counter + " data lines from " + t.getName()); } catch (SQLException e) { LOGGER.error("Unable to fetch data from table " + t.getName() + ": this table may need structure synchronization: " + e.getMessage(), e); } finally { if (rset != null) { rset.close(); } if (stmt != null) { stmt.close(); } } monitor.done(); // Only returning dataset if at least one row was fetched return counter == 0 ? null : v; }
From source file:tv.floe.metronome.classification.logisticregression.iterativereduce.POLRMasterNode.java
@Override public ParameterVectorUpdatable compute(Collection<ParameterVectorUpdatable> workerUpdates, Collection<ParameterVectorUpdatable> masterUpdates) { System.out.println("\nMaster Compute: SuperStep - Worker Info ----- "); int x = 0;//from w ww .j av a 2 s .c o m // reset //this.Global_Min_IterationCount = this.NumberPasses; boolean iterationComplete = true; this.global_parameter_vector.parameter_vector = new DenseMatrix(this.num_categories - 1, this.FeatureVectorSize); for (ParameterVectorUpdatable i : workerUpdates) { // not sure we still need this --------------- if (i.get().SrcWorkerPassCount > this.GlobalMaxPassCount) { this.GlobalMaxPassCount = i.get().SrcWorkerPassCount; } // if any worker is not done with hte iteration, trip the flag if (i.get().IterationComplete == 0) { //this.Global_Min_IterationCount = i.get().IterationCount; iterationComplete = false; } System.out.println("[Master] WorkerReport[" + x + "]: I: " + i.get().CurrentIteration + ", IC: " + i.get().IterationComplete + " Trained Recs: " + i.get().TrainedRecords + " AvgLogLikelihood: " + i.get().AvgLogLikelihood + " PercentCorrect: " + i.get().PercentCorrect); if (i.get().IterationComplete == 1) { System.out.println("> worker " + x + " is done with current iteration"); } x++; // accumulate gradient of parameter vectors //this.global_parameter_vector.AccumulateGradient(i.get().parameter_vector); this.global_parameter_vector.AccumulateParameterVector(i.get().parameter_vector); } // now average the parameter vectors together //this.global_parameter_vector.AverageAccumulations(workerUpdates.size()); this.global_parameter_vector.AverageParameterVectors(workerUpdates.size()); LOG.debug("Master node accumulating and averaging " + workerUpdates.size() + " worker updates."); ParameterVector gradient_msg = new ParameterVector(); gradient_msg.GlobalPassCount = this.GlobalMaxPassCount; /* if (iterationComplete) { gradient_msg.IterationComplete = 1; System.out.println( "> Master says: Iteration Complete" ); } else { gradient_msg.IterationComplete = 0; } */ gradient_msg.parameter_vector = this.global_parameter_vector.parameter_vector.clone(); ParameterVectorUpdatable return_msg = new ParameterVectorUpdatable(); return_msg.set(gradient_msg); // set the master copy! this.polr.SetBeta(this.global_parameter_vector.parameter_vector.clone()); // THIS NEEDS TO BE DONE, probably automated! workerUpdates.clear(); return return_msg; }
From source file:org.jspresso.framework.model.component.basic.AbstractComponentInvocationHandler.java
@SuppressWarnings("unchecked") private void setProperty(Object proxy, IPropertyDescriptor propertyDescriptor, Object newProperty) { String propertyName = propertyDescriptor.getName(); Object oldProperty;// w w w . j a va 2s .c om try { oldProperty = accessorFactory .createPropertyAccessor(propertyName, componentDescriptor.getComponentContract()) .getValue(proxy); } catch (IllegalAccessException | NoSuchMethodException ex) { throw new ComponentException(ex); } catch (InvocationTargetException ex) { if (ex.getCause() instanceof RuntimeException) { throw (RuntimeException) ex.getCause(); } throw new ComponentException(ex.getCause()); } Object actualNewProperty; if (propertyProcessorsEnabled) { actualNewProperty = propertyDescriptor.interceptSetter(proxy, newProperty); } else { actualNewProperty = newProperty; } if (isInitialized(oldProperty) && isInitialized(actualNewProperty) && ObjectUtils.equals(oldProperty, actualNewProperty)) { return; } if (propertyProcessorsEnabled) { propertyDescriptor.preprocessSetter(proxy, actualNewProperty); } if (propertyDescriptor instanceof IRelationshipEndPropertyDescriptor) { // It's a relation end IRelationshipEndPropertyDescriptor reversePropertyDescriptor = ((IRelationshipEndPropertyDescriptor) propertyDescriptor) .getReverseRelationEnd(); try { if (propertyDescriptor instanceof IReferencePropertyDescriptor) { // It's a 'one' relation end storeReferenceProperty(proxy, (IReferencePropertyDescriptor<?>) propertyDescriptor, oldProperty, actualNewProperty); if (reversePropertyDescriptor != null) { // It is bidirectional, so we are going to update the other end. if (reversePropertyDescriptor instanceof IReferencePropertyDescriptor) { // It's a one-to-one relationship if (proxy instanceof IEntity && oldProperty instanceof IEntity) { entityDetached((IEntity) proxy, (IEntity) oldProperty, ((IRelationshipEndPropertyDescriptor) propertyDescriptor)); } IAccessor reversePropertyAccessor = accessorFactory.createPropertyAccessor( reversePropertyDescriptor.getName(), ((IReferencePropertyDescriptor<?>) propertyDescriptor).getReferencedDescriptor() .getComponentContract()); if (oldProperty != null) { reversePropertyAccessor.setValue(oldProperty, null); } if (actualNewProperty != null) { reversePropertyAccessor.setValue(actualNewProperty, proxy); } } else if (reversePropertyDescriptor instanceof ICollectionPropertyDescriptor) { // It's a one-to-many relationship ICollectionAccessor reversePropertyAccessor = accessorFactory .createCollectionPropertyAccessor(reversePropertyDescriptor.getName(), ((IReferencePropertyDescriptor<?>) propertyDescriptor) .getReferencedDescriptor().getComponentContract(), ((ICollectionPropertyDescriptor<?>) reversePropertyDescriptor) .getCollectionDescriptor().getElementDescriptor() .getComponentContract()); if (reversePropertyAccessor instanceof IModelDescriptorAware) { ((IModelDescriptorAware) reversePropertyAccessor) .setModelDescriptor(reversePropertyDescriptor); } if (oldProperty != null) { reversePropertyAccessor.removeFromValue(oldProperty, proxy); } if (actualNewProperty != null) { reversePropertyAccessor.addToValue(actualNewProperty, proxy); } } } } else if (propertyDescriptor instanceof ICollectionPropertyDescriptor) { Collection<?> oldCollectionSnapshot = CollectionHelper .cloneCollection((Collection<?>) oldProperty); // It's a 'many' relation end Collection<Object> oldPropertyElementsToRemove = new THashSet<>(1); Collection<Object> newPropertyElementsToAdd = new TLinkedHashSet<>(1); Collection<Object> propertyElementsToKeep = new THashSet<>(1); if (oldProperty != null) { oldPropertyElementsToRemove.addAll((Collection<?>) oldProperty); propertyElementsToKeep.addAll((Collection<?>) oldProperty); } if (actualNewProperty != null) { newPropertyElementsToAdd.addAll((Collection<?>) actualNewProperty); } propertyElementsToKeep.retainAll(newPropertyElementsToAdd); oldPropertyElementsToRemove.removeAll(propertyElementsToKeep); newPropertyElementsToAdd.removeAll(propertyElementsToKeep); ICollectionAccessor propertyAccessor = accessorFactory.createCollectionPropertyAccessor( propertyName, componentDescriptor.getComponentContract(), ((ICollectionPropertyDescriptor<?>) propertyDescriptor).getCollectionDescriptor() .getElementDescriptor().getComponentContract()); boolean oldCollectionSortEnabled = collectionSortEnabled; boolean oldPropertyChangeEnabled = propertyChangeEnabled; boolean oldPropertyProcessorsEnabled = propertyProcessorsEnabled; try { // Delay sorting for performance reasons. collectionSortEnabled = false; // Block property changes for performance reasons; propertyChangeEnabled = false; // Block property processors propertyProcessorsEnabled = false; for (Object element : oldPropertyElementsToRemove) { propertyAccessor.removeFromValue(proxy, element); } for (Object element : newPropertyElementsToAdd) { propertyAccessor.addToValue(proxy, element); } inlineComponentFactory.sortCollectionProperty((IComponent) proxy, propertyName); } finally { collectionSortEnabled = oldCollectionSortEnabled; propertyChangeEnabled = oldPropertyChangeEnabled; propertyProcessorsEnabled = oldPropertyProcessorsEnabled; } // if the property is a list we may restore the element order and be // careful not to miss one... if (actualNewProperty instanceof List) { Collection<Object> currentProperty = (Collection<Object>) oldProperty; if (currentProperty instanceof List) { // Just check that only order differs Set<Object> temp = new THashSet<>(currentProperty); temp.removeAll((List<?>) actualNewProperty); if (currentProperty instanceof ICollectionWrapper) { currentProperty = ((ICollectionWrapper) currentProperty).getWrappedCollection(); } currentProperty.clear(); currentProperty.addAll((List<?>) actualNewProperty); currentProperty.addAll(temp); } } oldProperty = oldCollectionSnapshot; } } catch (RuntimeException ex) { rollbackProperty(proxy, propertyDescriptor, oldProperty); throw ex; } catch (InvocationTargetException ex) { rollbackProperty(proxy, propertyDescriptor, oldProperty); if (ex.getCause() instanceof RuntimeException) { throw (RuntimeException) ex.getCause(); } throw new ComponentException(ex.getCause()); } catch (IllegalAccessException | NoSuchMethodException ex) { throw new ComponentException(ex); } } else { storeProperty(propertyName, actualNewProperty); } doFirePropertyChange(proxy, propertyName, oldProperty, actualNewProperty); if (propertyProcessorsEnabled) { propertyDescriptor.postprocessSetter(proxy, oldProperty, actualNewProperty); } }
From source file:org.apache.bval.jsr.JsrMetaBeanFactory.java
/** * Process class annotations, field and method annotations. * // w w w . ja v a 2s .c o m * @param beanClass * @param metabean * @throws IllegalAccessException * @throws InvocationTargetException */ private void processClass(Class<?> beanClass, MetaBean metabean) throws IllegalAccessException, InvocationTargetException { // if NOT ignore class level annotations if (!factory.getAnnotationIgnores().isIgnoreAnnotations(beanClass)) { annotationProcessor.processAnnotations(null, beanClass, beanClass, null, new AppendValidationToMeta(metabean)); } final Collection<String> missingValid = new ArrayList<String>(); final Field[] fields = Reflection.getDeclaredFields(beanClass); for (final Field field : fields) { MetaProperty metaProperty = metabean.getProperty(field.getName()); // create a property for those fields for which there is not yet a // MetaProperty if (!factory.getAnnotationIgnores().isIgnoreAnnotations(field)) { AccessStrategy access = new FieldAccess(field); boolean create = metaProperty == null; if (create) { metaProperty = addMetaProperty(metabean, access); } if (!annotationProcessor.processAnnotations(metaProperty, beanClass, field, access, new AppendValidationToMeta(metaProperty)) && create) { metabean.putProperty(metaProperty.getName(), null); } if (field.getAnnotation(ConvertGroup.class) != null) { missingValid.add(field.getName()); } } } final Method[] methods = Reflection.getDeclaredMethods(beanClass); for (final Method method : methods) { if (method.isSynthetic() || method.isBridge()) { continue; } String propName = null; if (method.getParameterTypes().length == 0) { propName = MethodAccess.getPropertyName(method); } if (propName != null) { if (!factory.getAnnotationIgnores().isIgnoreAnnotations(method)) { AccessStrategy access = new MethodAccess(propName, method); MetaProperty metaProperty = metabean.getProperty(propName); boolean create = metaProperty == null; // create a property for those methods for which there is // not yet a MetaProperty if (create) { metaProperty = addMetaProperty(metabean, access); } if (!annotationProcessor.processAnnotations(metaProperty, beanClass, method, access, new AppendValidationToMeta(metaProperty)) && create) { metabean.putProperty(propName, null); } } } } addXmlConstraints(beanClass, metabean); for (final String name : missingValid) { final MetaProperty metaProperty = metabean.getProperty(name); if (metaProperty != null && metaProperty.getFeature(JsrFeatures.Property.REF_CASCADE) == null) { throw new ConstraintDeclarationException("@ConvertGroup needs @Valid"); } } missingValid.clear(); }
From source file:ubic.gemma.search.SearchServiceImpl.java
/** * @param settings// w w w .j av a2 s . c o m */ private Collection<SearchResult> characteristicExpressionExperimentSearch(final SearchSettings settings) { Collection<SearchResult> results = new HashSet<SearchResult>(); Collection<Class<?>> classToSearch = new ArrayList<Class<?>>(1); Queue<Class<?>> orderedClassesToSearch = new LinkedList<Class<?>>(); orderedClassesToSearch.add(ExpressionExperiment.class); orderedClassesToSearch.add(FactorValue.class); orderedClassesToSearch.add(BioMaterial.class); orderedClassesToSearch.add(Treatment.class); Collection<SearchResult> characterSearchResults = new HashSet<SearchResult>(); while (characterSearchResults.size() < SUFFICIENT_EXPERIMENT_RESULTS_FROM_CHARACTERISTICS && !orderedClassesToSearch.isEmpty()) { classToSearch.clear(); classToSearch.add(orderedClassesToSearch.poll()); Collection<SearchResult> classResults = ontologySearchAnnotatedObject(classToSearch, settings); characterSearchResults.addAll(classResults); String msg = "Found " + classResults.size() + " " + classToSearch.iterator().next().getSimpleName() + " results from characteristic search."; if (characterSearchResults.size() >= SUFFICIENT_EXPERIMENT_RESULTS_FROM_CHARACTERISTICS) { msg += " Total found > " + SUFFICIENT_EXPERIMENT_RESULTS_FROM_CHARACTERISTICS + ", will not search for more entities."; } log.info(msg); } StopWatch watch = new StopWatch(); watch.start(); // filter and get parents... int numEEs = 0; Collection<BioMaterial> biomaterials = new HashSet<BioMaterial>(); Collection<FactorValue> factorValues = new HashSet<FactorValue>(); Collection<Treatment> treatments = new HashSet<Treatment>(); for (SearchResult sr : characterSearchResults) { Class<?> resultClass = sr.getResultClass(); if (ExpressionExperiment.class.isAssignableFrom(resultClass)) { sr.setHighlightedText(sr.getHighlightedText() + " (characteristic)"); results.add(sr); numEEs++; } else if (BioMaterial.class.isAssignableFrom(resultClass)) { biomaterials.add((BioMaterial) sr.getResultObject()); } else if (FactorValue.class.isAssignableFrom(resultClass)) { factorValues.add((FactorValue) sr.getResultObject()); } else if (Treatment.class.isAssignableFrom(resultClass)) { treatments.add((Treatment) sr.getResultObject()); } } /* * Much faster to batch it... */ if (biomaterials.size() > 0) { Collection<ExpressionExperiment> ees = expressionExperimentService.findByBioMaterials(biomaterials); for (ExpressionExperiment ee : ees) { results.add(new SearchResult(ee, INDIRECT_DB_HIT_PENALTY, "BioMaterial characteristic")); } } if (factorValues.size() > 0) { Collection<ExpressionExperiment> ees = expressionExperimentService.findByFactorValues(factorValues); for (ExpressionExperiment ee : ees) { if (log.isDebugEnabled()) log.debug(ee); results.add(new SearchResult(ee, INDIRECT_DB_HIT_PENALTY, "Factor characteristic")); } } if (treatments.size() > 0) { log.info("Not processing treatments, but hits were found"); // Collection<ExpressionExperiment> ees = expressionExperimentService.findByTreatments( treatments ); // for ( ExpressionExperiment ee : ees ) { // if ( !results.contains( ee ) ) { // results.add( new SearchResult( ee, INDIRECT_DB_HIT_PENALTY, "Treatment" ) ); // } // } } if (log.isDebugEnabled()) { log.debug( "ExpressionExperiment search: " + settings + " -> " + results.size() + " characteristic hits"); } if (watch.getTime() > 1000) { log.info("Retrieving " + results.size() + " experiments from " + characterSearchResults.size() + " retrieved characteristics took " + watch.getTime() + " ms"); log.info("Breakdown: " + numEEs + " via direct association with EE; " + biomaterials.size() + " via association with Biomaterial; " + factorValues.size() + " via experimental design"); } return results; }
From source file:org.kuali.kra.questionnaire.answer.QuestionnaireAnswerServiceTest.java
@Test public void testcheckIfQuestionnaireIsActiveForModule() { // define a questionnaire ID, module code and sub-module code Integer questionnaireId = new Integer(4); String CORRECT_MODULE_CODE = "correct_module_code"; String CORRECT_SUB_MODULE_CODE = "correct_sub_module_code"; // define 'incorrect' module and sub-module codes String INCORRECT_MODULE_CODE = "incorrect_module_code"; String INCORRECT_SUB_MODULE_CODE = "inccorrect_sub_module_code"; // define 'incorrect' module and sub-module codes String NON_EXISTENT_MODULE_CODE = "nonexistent_module_code"; String NON_EXISTENT_SUB_MODULE_CODE = "nonexistent_sub_module_code"; // create a questionnaire, don't care about id---does not matter in this test Questionnaire questionnaire = new Questionnaire(); // create four questionnaire usages QuestionnaireUsage usage1 = new QuestionnaireUsage(); QuestionnaireUsage usage2 = new QuestionnaireUsage(); QuestionnaireUsage usage3 = new QuestionnaireUsage(); QuestionnaireUsage usage4 = new QuestionnaireUsage(); // set the usages into the questionnaire List<QuestionnaireUsage> usages = new ArrayList<QuestionnaireUsage>(); usages.add(usage1);//from w ww . j a v a 2s. co m usages.add(usage2); usages.add(usage3); usages.add(usage4); questionnaire.setQuestionnaireUsages(usages); // create the field values map for the mock service final Map<String, Long> fieldValues = new HashMap<String, Long>(); fieldValues.put(QuestionnaireConstants.QUESTIONNAIRE_SEQUENCE_ID_PARAMETER_NAME, Long.valueOf(questionnaireId)); // define the mock business object service final Collection<Questionnaire> questionnaires = new ArrayList<Questionnaire>(); questionnaires.add(questionnaire); final BusinessObjectService businessObjectService = context.mock(BusinessObjectService.class); context.checking(new Expectations() { { atLeast(1).of(businessObjectService).findMatchingOrderBy(Questionnaire.class, fieldValues, SEQUENCE_NUMBER, false); will(returnValue(questionnaires)); } }); // create QuestionnaireAnswerServiceImpl instance and set the mock service QuestionnaireAnswerServiceImpl questionnaireAnswerServiceImpl = new QuestionnaireAnswerServiceImpl(); questionnaireAnswerServiceImpl.setBusinessObjectService(businessObjectService); // case zero: set questionnaire isFinal to true and one particular usage (usage 3) to correct module code and sub-module code // with all other usages set to incorrect codes questionnaire.setActive(true); usage1.setModuleItemCode(INCORRECT_MODULE_CODE); usage1.setModuleSubItemCode(INCORRECT_SUB_MODULE_CODE); usage2.setModuleItemCode(INCORRECT_MODULE_CODE); usage2.setModuleSubItemCode(CORRECT_SUB_MODULE_CODE); usage3.setModuleItemCode(CORRECT_MODULE_CODE); usage3.setModuleSubItemCode(CORRECT_SUB_MODULE_CODE); usage4.setModuleItemCode(INCORRECT_MODULE_CODE); usage4.setModuleSubItemCode(INCORRECT_SUB_MODULE_CODE); Assert.assertTrue(questionnaireAnswerServiceImpl.checkIfQuestionnaireIsActiveForModule(questionnaireId, CORRECT_MODULE_CODE, CORRECT_SUB_MODULE_CODE)); // case one: set questionnaire isFinal to false questionnaire.setActive(false); Assert.assertFalse(questionnaireAnswerServiceImpl.checkIfQuestionnaireIsActiveForModule(questionnaireId, CORRECT_MODULE_CODE, CORRECT_SUB_MODULE_CODE)); // case two: set questionnaire isFinal to true and set the previously correct usage to incorrect module code questionnaire.setActive(true); usage3.setModuleItemCode(INCORRECT_MODULE_CODE); Assert.assertFalse(questionnaireAnswerServiceImpl.checkIfQuestionnaireIsActiveForModule(questionnaireId, CORRECT_MODULE_CODE, CORRECT_SUB_MODULE_CODE)); // case three: no module match amongst the usages questionnaire.setActive(true); usage3.setModuleItemCode(CORRECT_MODULE_CODE); usage3.setModuleSubItemCode(INCORRECT_SUB_MODULE_CODE); questionnaires.clear(); Assert.assertFalse(questionnaireAnswerServiceImpl.checkIfQuestionnaireIsActiveForModule(questionnaireId, NON_EXISTENT_MODULE_CODE, NON_EXISTENT_SUB_MODULE_CODE)); }
From source file:org.eclipse.equinox.http.servlet.tests.ServletTest.java
public void test_ServletContextHelperVisibility() throws Exception { String expected1 = "c"; BundleContext bundleContext = getBundleContext(); Bundle bundle = bundleContext.getBundle(); ServletContextHelper servletContextHelper = new ServletContextHelper(bundle) { };/*from w ww. j a va 2 s . c o m*/ Servlet s1 = new BaseServlet(expected1); Collection<ServiceRegistration<?>> registrations = new ArrayList<ServiceRegistration<?>>(); try { // register a hook that hides the helper from the registering bundle registrations.add(bundleContext.registerService(FindHook.class, new FindHook() { @Override public void find(BundleContext context, String name, String filter, boolean allServices, Collection<ServiceReference<?>> references) { if (ServletContextHelper.class.getName().equals(name) && context.getBundle().equals(getBundleContext().getBundle())) { references.clear(); } } }, null)); Dictionary<String, String> contextProps = new Hashtable<String, String>(); contextProps.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_CONTEXT_NAME, "a"); contextProps.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_CONTEXT_PATH, "/a"); registrations.add( bundleContext.registerService(ServletContextHelper.class, servletContextHelper, contextProps)); Dictionary<String, String> servletProps2 = new Hashtable<String, String>(); servletProps2.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_SERVLET_NAME, "S1"); servletProps2.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_SERVLET_PATTERN, "/s"); servletProps2.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_CONTEXT_SELECT, "(" + HttpWhiteboardConstants.HTTP_WHITEBOARD_CONTEXT_NAME + "=a)"); registrations.add(bundleContext.registerService(Servlet.class, s1, servletProps2)); try { requestAdvisor.request("a/s"); } catch (FileNotFoundException e) { // expected } } finally { for (ServiceRegistration<?> registration : registrations) { registration.unregister(); } } }
From source file:org.xenei.jdbc4sparql.sparql.SparqlQueryBuilder.java
/** * Get the SPARQL query.//w w w .j a v a 2s . c om * * @return The constructed SPARQL query. * @throws SQLDataException */ public Query build() throws SQLDataException { if (!isBuilt) { if (!catalog.isService()) { // apply the type filters to each subpart. for (final QueryTableInfo tableInfo : infoSet.getTables()) { try { tableInfo.addQueryFilters(infoSet); } catch (final SQLDataException e1) { throw new IllegalStateException(e1.getMessage(), e1); } } } // renumber the Bnodes. final Element e = new BnodeRenumber().renumber(query.getQueryPattern()); query.setQueryPattern(e); if (catalog.isService()) { // create a copy of the query so that we can verify that it is // good. final Query serviceCall = query.cloneQuery(); final VarExprList vars = serviceCall.getProject(); // reset the serviceCall select vars // protected VarExprList projectVars = new VarExprList() ; try { Field f = Query.class.getDeclaredField("projectVars"); f.setAccessible(true); f.set(serviceCall, new VarExprList()); } catch (NoSuchFieldException e2) { throw new IllegalStateException(e2.getMessage(), e2); } catch (SecurityException e2) { throw new IllegalStateException(e2.getMessage(), e2); } catch (IllegalAccessException e2) { throw new IllegalStateException(e2.getMessage(), e2); } final ElementService service = new ElementService(catalog.getServiceNode(), new ElementSubQuery(serviceCall), false); final Query newResult = new Query(); newResult.setQuerySelectType(); final ElementGroup filterGroup = SparqlQueryBuilder.getElementGroup(newResult); filterGroup.addElement(service); final ElementGroup typeGroup = new ElementGroup(); typeGroup.addElement(filterGroup); infoSet.setUseGUID(false); // we are now building complete set. // create the service call // make sure we project all vars for the filters. final Collection<QueryColumnInfo> typeFilters = new HashSet<QueryColumnInfo>(); final Collection<QueryColumnInfo> dataFilters = new HashSet<QueryColumnInfo>(); final Collection<QueryColumnInfo> columnsInQuery = new ArrayList<QueryColumnInfo>(); for (final Var v : vars.getVars()) { final QueryColumnInfo colInfo = infoSet.findColumnByGUIDVar(v.getName()); if (colInfo == null) { // may be a variable associated with a function if (vars.getExpr(v) != null) { newResult.addResultVar(v, vars.getExpr(v)); } else { throw new IllegalStateException(String.format("can not find column %s", v)); } } else { columnsInQuery.add(colInfo); newResult.addResultVar(colInfo.getVar()); } } // add the columns to the query. // the columns are named by GUID in the query. boolean firstTable = true; for (final QueryTableInfo tableInfo : infoSet.getTables()) { // add the data type filters for (final Column tblCol : tableInfo.getTable().getColumnList()) { QueryColumnInfo columnInfo = new QueryColumnInfo(tblCol); typeFilters.add(columnInfo); serviceCall.addResultVar(columnInfo.getGUIDVar()); } // add the binds for (QueryColumnInfo colInfo : columnsInQuery) { if (colInfo.getBaseColumnInfo().getName().getTableName().equals(tableInfo.getName())) { if (firstTable || !this.columnsInUsing.contains(colInfo.getName().getShortName())) { dataFilters.add(colInfo); } } } try { QueryTableInfo.addTypeFilters(infoSet, typeFilters, dataFilters, tableInfo.getJoinElements(), filterGroup, typeGroup); } catch (final SQLDataException e1) { throw new IllegalStateException(e1.getMessage(), e1); } dataFilters.clear(); typeFilters.clear(); firstTable = false; } // add equality check into service Call for (final String columnName : columnsInUsing) { QueryColumnInfo first = null; Expr expr = null; for (final QueryColumnInfo columnInfo : infoSet .listColumns(new SearchName(null, null, null, columnName))) { if (first == null) { first = columnInfo; } else { final E_Equals eq = new E_Equals(new ExprVar(first.getGUIDVar()), new ExprVar(columnInfo.getGUIDVar())); if (expr == null) { expr = eq; } else { expr = new E_LogicalAnd(expr, eq); } } } if (expr != null) { if (LOG.isDebugEnabled()) { LOG.debug("Adding filter: {}", expr); } ((ElementGroup) serviceCall.getQueryPattern()).addElementFilter(new ElementFilter(expr)); } } newResult.setQueryPattern(typeGroup); query = newResult; } isBuilt = true; if (LOG.isDebugEnabled()) { SparqlQueryBuilder.LOG.debug("Query parsed as {}", query); } } return query; }
From source file:jetbrains.exodus.entitystore.PersistentEntityStoreRefactorings.java
void refactorMakeLinkTablesConsistent() { store.executeInReadonlyTransaction(new StoreTransactionalExecutable() { @Override/* w w w .j av a 2s . co m*/ public void execute(@NotNull final StoreTransaction tx) { final PersistentStoreTransaction txn = (PersistentStoreTransaction) tx; for (final String entityType : store.getEntityTypes(txn)) { if (log.isInfoEnabled()) { log.info("Refactoring making links' tables consistent for [" + entityType + ']'); } try { final Collection<Pair<ByteIterable, ByteIterable>> badLinks = new ArrayList<>(); final Collection<Pair<ByteIterable, ByteIterable>> deleteLinks = new ArrayList<>(); final int entityTypeId = store.getEntityTypeId(txn, entityType, false); final TwoColumnTable linksTable = store.getLinksTable(txn, entityTypeId); final Transaction envTxn = txn.getEnvironmentTransaction(); final Cursor cursor = linksTable.getFirstIndexCursor(envTxn); final Store entitiesTable = store.getEntitiesTable(txn, entityTypeId); while (cursor.getNext()) { final long localId = LongBinding.compressedEntryToLong(cursor.getKey()); if (entitiesTable.get(envTxn, LongBinding.longToCompressedEntry(localId)) == null) { do { deleteLinks.add(new Pair<>(cursor.getKey(), cursor.getValue())); } while (cursor.getNextDup()); continue; } final LinkValue linkValue = LinkValue.entryToLinkValue(cursor.getValue()); // if target doesn't exist if (store.getLastVersion(txn, linkValue.getEntityId()) < 0) { deleteLinks.add(new Pair<>(cursor.getKey(), cursor.getValue())); continue; } if (linksTable.get2(envTxn, cursor.getValue()) == null) { badLinks.add(new Pair<>(cursor.getKey(), cursor.getValue())); } } cursor.close(); if (!badLinks.isEmpty()) { store.getEnvironment().executeInTransaction(new TransactionalExecutable() { @Override public void execute(@NotNull final Transaction txn) { for (final Pair<ByteIterable, ByteIterable> badLink : badLinks) { linksTable.put(txn, badLink.getFirst(), badLink.getSecond()); } } }); if (log.isInfoEnabled()) { log.info(badLinks.size() + " missing links found and fixed for [" + entityType + ']'); } } badLinks.clear(); final Cursor cursor2 = linksTable.getSecondIndexCursor(envTxn); while (cursor2.getNext()) { if (linksTable.get(envTxn, cursor2.getValue()) == null) { badLinks.add(new Pair<>(cursor2.getKey(), cursor2.getValue())); } } cursor2.close(); final int badLinksSize = badLinks.size(); final int deleteLinksSize = deleteLinks.size(); if (badLinksSize > 0 || deleteLinksSize > 0) { store.getEnvironment().executeInTransaction(new TransactionalExecutable() { @Override public void execute(@NotNull final Transaction txn) { for (final Pair<ByteIterable, ByteIterable> badLink : badLinks) { deletePair(linksTable.getSecondIndexCursor(txn), badLink.getFirst(), badLink.getSecond()); } for (final Pair<ByteIterable, ByteIterable> deleteLink : deleteLinks) { deletePair(linksTable.getFirstIndexCursor(txn), deleteLink.getFirst(), deleteLink.getSecond()); deletePair(linksTable.getSecondIndexCursor(txn), deleteLink.getSecond(), deleteLink.getFirst()); } } }); if (log.isInfoEnabled()) { if (badLinksSize > 0) { log.info(badLinksSize + " redundant links found and fixed for [" + entityType + ']'); } if (deleteLinksSize > 0) { log.info(deleteLinksSize + " phantom links found and fixed for [" + entityType + ']'); } } } } catch (Throwable t) { log.error("Failed to execute refactoring for entity type: " + entityType, t); throwJVMError(t); } } } }); }
From source file:edu.ksu.cis.indus.staticanalyses.dependency.DependencyXMLizerCLI.java
/** * Drives the analyses.// ww w . j ava 2s .c o m * * @param <T> dummy type parameter. */ private <T extends ITokens<T, Value>> void execute() { setInfoLogger(LOGGER); final String _tagName = "DependencyXMLizer:FA"; aa = OFAnalyzer.getFSOSAnalyzer(_tagName, TokenUtil.<T, Value, Type>getTokenManager(new SootValueTypeManager()), getStmtGraphFactory()); final ValueAnalyzerBasedProcessingController _pc = new ValueAnalyzerBasedProcessingController(); final Collection<IProcessor> _processors = new ArrayList<IProcessor>(); final PairManager _pairManager = new PairManager(false, true); final CallGraphInfo _cgi = new CallGraphInfo(new PairManager(false, true)); final IThreadGraphInfo _tgi = new ThreadGraph(_cgi, new CFGAnalysis(_cgi, getBbm()), _pairManager); final IExceptionRaisingInfo _eti = new ExceptionRaisingAnalysis(getStmtGraphFactory(), _cgi, aa.getEnvironment()); final ProcessingController _xmlcgipc = new ProcessingController(); final ValueAnalyzerBasedProcessingController _cgipc = new ValueAnalyzerBasedProcessingController(); final MetricsProcessor _countingProcessor = new MetricsProcessor(); final OFABasedCallInfoCollector _callGraphInfoCollector = new OFABasedCallInfoCollector(); final OneAllStmtSequenceRetriever _ssr = new OneAllStmtSequenceRetriever(); _ssr.setStmtGraphFactory(getStmtGraphFactory()); _pc.setStmtSequencesRetriever(_ssr); _pc.setAnalyzer(aa); _pc.setProcessingFilter(new TagBasedProcessingFilter(_tagName)); _cgipc.setAnalyzer(aa); _cgipc.setProcessingFilter(new CGBasedProcessingFilter(_cgi)); _cgipc.setStmtSequencesRetriever(_ssr); _xmlcgipc.setEnvironment(aa.getEnvironment()); _xmlcgipc.setProcessingFilter(new CGBasedXMLizingProcessingFilter(_cgi)); _xmlcgipc.setStmtSequencesRetriever(_ssr); final StaticFieldUseDefInfo _staticFieldUD = new StaticFieldUseDefInfo(); final AliasedUseDefInfo _aliasUD; if (useAliasedUseDefv1) { _aliasUD = new AliasedUseDefInfo(aa, bbm, _pairManager, new CFGAnalysis(_cgi, bbm)); } else { _aliasUD = new AliasedUseDefInfov2(aa, _cgi, _tgi, bbm, _pairManager); } info.put(ICallGraphInfo.ID, _cgi); info.put(IThreadGraphInfo.ID, _tgi); info.put(PairManager.ID, _pairManager); info.put(IEnvironment.ID, aa.getEnvironment()); info.put(IValueAnalyzer.ID, aa); info.put(IUseDefInfo.ALIASED_USE_DEF_ID, _aliasUD); info.put(IUseDefInfo.GLOBAL_USE_DEF_ID, _staticFieldUD); info.put(IStmtGraphFactory.ID, getStmtGraphFactory()); final EquivalenceClassBasedEscapeAnalysis _ecba = new EquivalenceClassBasedEscapeAnalysis(_cgi, _tgi, getBbm()); info.put(IEscapeInfo.ID, _ecba.getEscapeInfo()); final IMonitorInfo _monitorInfo = new MonitorAnalysis(); info.put(IMonitorInfo.ID, _monitorInfo); final SafeLockAnalysis _sla; if (useSafeLockAnalysis) { _sla = new SafeLockAnalysis(); info.put(SafeLockAnalysis.ID, _sla); } else { _sla = null; } initialize(); aa.analyze(getEnvironment(), getRootMethods()); _callGraphInfoCollector.reset(); _processors.clear(); _processors.add(_callGraphInfoCollector); _pc.reset(); _pc.driveProcessors(_processors); _cgi.createCallGraphInfo(_callGraphInfoCollector.getCallInfo()); writeInfo("CALL GRAPH:\n" + _cgi.toString()); if (commonUncheckedException) { final ExceptionRaisingAnalysis _t = (ExceptionRaisingAnalysis) _eti; _t.setupForCommonUncheckedExceptions(); } _processors.clear(); ((ThreadGraph) _tgi).reset(); _processors.add((IProcessor) _tgi); _processors.add((IProcessor) _eti); _processors.add(_countingProcessor); _cgipc.reset(); _cgipc.driveProcessors(_processors); writeInfo("THREAD GRAPH:\n" + ((ThreadGraph) _tgi).toString()); writeInfo("EXCEPTION THROW INFO:\n" + ((ExceptionRaisingAnalysis) _eti).toString()); writeInfo("STATISTICS: " + MapUtils.verbosePrint(new TreeMap(_countingProcessor.getStatistics()))); _aliasUD.hookup(_cgipc); _staticFieldUD.hookup(_cgipc); _cgipc.process(); _staticFieldUD.unhook(_cgipc); _aliasUD.unhook(_cgipc); writeInfo("BEGIN: dependency analyses"); if (exceptionalExits) { bbm = new BasicBlockGraphMgr(_eti); bbm.setStmtGraphFactory(getStmtGraphFactory()); } final AnalysesController _ac = new AnalysesController(info, _cgipc, getBbm()); _ac.addAnalyses(IMonitorInfo.ID, Collections.singleton((MonitorAnalysis) _monitorInfo)); _ac.addAnalyses(EquivalenceClassBasedEscapeAnalysis.ID, Collections.singleton(_ecba)); if (useSafeLockAnalysis) { _ac.addAnalyses(SafeLockAnalysis.ID, Collections.singleton(_sla)); } for (final Iterator _i1 = das.iterator(); _i1.hasNext();) { final IDependencyAnalysis _da1 = (IDependencyAnalysis) _i1.next(); for (final Iterator<? extends Comparable<? extends Object>> _i2 = _da1.getIds().iterator(); _i2 .hasNext();) { final Comparable<? extends Object> _id = _i2.next(); _ac.addAnalyses(_id, Collections.singleton(_da1)); } } _ac.initialize(); _ac.execute(); // write xml for (final Iterator _i1 = das.iterator(); _i1.hasNext();) { final IDependencyAnalysis _da1 = (IDependencyAnalysis) _i1.next(); for (final Iterator _i2 = _da1.getIds().iterator(); _i2.hasNext();) { final Object _id = _i2.next(); MapUtils.putIntoListInMap(info, _id, _da1); } } xmlizer.setGenerator(new UniqueJimpleIDGenerator()); xmlizer.writeXML(info); if (dumpJimple) { xmlizer.dumpJimple(null, xmlizer.getXmlOutputDir(), _xmlcgipc); } writeInfo("Total classes loaded: " + getEnvironment().getClasses().size()); SystemDependenceGraphBuilder.getSystemDependenceGraph(das, _cgi, getEnvironment().getClasses()); }