List of usage examples for java.lang Double equals
public boolean equals(Object obj)
From source file:org.apache.sysml.test.utils.TestUtils.java
/** * Compares two double values regarding tolerance t. If one or both of them * is null it is converted to 0.0.// www . j a va 2s . c o m * * @param v1 * @param v2 * @param t * Tolerance * @return */ public static boolean compareCellValue(Double v1, Double v2, double t, boolean ignoreNaN) { if (v1 == null) v1 = 0.0; if (v2 == null) v2 = 0.0; if (ignoreNaN && (v1.isNaN() || v1.isInfinite() || v2.isNaN() || v2.isInfinite())) return true; if (v1.equals(v2)) return true; if (AutomatedTestBase.TEST_GPU) { return Math.abs(v1 - v2) <= Math.max(t, AutomatedTestBase.GPU_TOLERANCE); } return Math.abs(v1 - v2) <= t; }
From source file:org.sakaiproject.tool.assessment.ui.bean.delivery.ItemContentsBean.java
public void setUpdatedScoreForEmi(Double score) { if (!score.equals(itemData.getScore())) { AuthorBean author = (AuthorBean) ContextUtil.lookupBean("author"); ItemService itemService = null;//w ww . j a v a2 s . c o m if (author.getIsEditPendingAssessmentFlow()) { itemService = new ItemService(); } else { itemService = new PublishedItemService(); } ItemFacade item = itemService.getItem(itemData.getItemId(), AgentFacade.getAgentString()); item.setScore(score); int answerCombinations = 0; double correctAnswerScore = 0.0; ItemDataIfc data = item.getData(); Set itemTextSet = data.getItemTextSet(); Iterator iter = itemTextSet.iterator(); while (iter.hasNext()) { ItemTextIfc itemText = (ItemTextIfc) iter.next(); if (!itemText.isEmiQuestionItemText()) continue; answerCombinations++; } iter = itemTextSet.iterator(); while (iter.hasNext()) { ItemTextIfc itemText = (ItemTextIfc) iter.next(); if (!itemText.isEmiQuestionItemText()) continue; int requiredOptions = itemText.getRequiredOptionsCount(); Double optionScore = item.getScore() / answerCombinations / requiredOptions; Set<AnswerIfc> answerSet = itemText.getAnswerSet(); Iterator<AnswerIfc> iter2 = answerSet.iterator(); while (iter2.hasNext()) { AnswerIfc answer = iter2.next(); log.debug("old value " + answer.getScore() + "new value " + optionScore); answer.setScore(optionScore); answer.setDiscount(optionScore); } EventTrackingService.post(EventTrackingService.newEvent("sam.assessment.revise", "itemId=" + itemData.getItemId(), true)); } itemService.saveItem(item); itemData.setScore(score); } }
From source file:com.apptentive.android.sdk.tests.module.engagement.InteractionTest.java
public void testInteractionSavingAndLoading() { Log.e("Running test: testCriteriaDaysSinceInstall()\n\n"); resetDevice();/*from w w w.java2 s .c o m*/ final String testInteraction = "test.interaction"; CodePointStore.storeRecord(getTargetContext(), true, testInteraction, "1.0", 1); CodePointStore.storeRecord(getTargetContext(), true, testInteraction, "1.1", 2); CodePointStore.storeRecord(getTargetContext(), true, testInteraction, "1.1", 3); CodePointStore.storeRecord(getTargetContext(), true, testInteraction, "1.1", 3); CodePointStore.storeRecord(getTargetContext(), true, testInteraction, "1.1", 3); CodePointStore.storeRecord(getTargetContext(), true, testInteraction, "2.0", 4); CodePointStore.storeRecord(getTargetContext(), true, testInteraction, "2.0", 4); CodePointStore.storeRecord(getTargetContext(), true, testInteraction, "2.0", 4); CodePointStore.storeRecord(getTargetContext(), true, testInteraction, "2.0", 4); CodePointStore.storeRecord(getTargetContext(), true, testInteraction, "2.0", 5); CodePointStore.storeRecord(getTargetContext(), true, testInteraction, "2.0", 5); CodePointStore.storeRecord(getTargetContext(), true, testInteraction, "2.1", 6); CodePointStore.storeRecord(getTargetContext(), true, testInteraction, "2.1", 6); CodePointStore.storeRecord(getTargetContext(), true, testInteraction, "2.1", 6); long value = 0; value = CodePointStore.getTotalInvokes(getTargetContext(), true, testInteraction); assertEquals(value, 14); value = CodePointStore.getVersionInvokes(getTargetContext(), true, testInteraction, "1.0"); assertEquals(value, 1); value = CodePointStore.getVersionInvokes(getTargetContext(), true, testInteraction, "1.1"); assertEquals(value, 4); value = CodePointStore.getVersionInvokes(getTargetContext(), true, testInteraction, "2.0"); assertEquals(value, 6); value = CodePointStore.getVersionInvokes(getTargetContext(), true, testInteraction, "2.1"); assertEquals(value, 3); value = CodePointStore.getBuildInvokes(getTargetContext(), true, testInteraction, "1"); assertEquals(value, 1); value = CodePointStore.getBuildInvokes(getTargetContext(), true, testInteraction, "2"); assertEquals(value, 1); value = CodePointStore.getBuildInvokes(getTargetContext(), true, testInteraction, "3"); assertEquals(value, 3); value = CodePointStore.getBuildInvokes(getTargetContext(), true, testInteraction, "4"); assertEquals(value, 4); value = CodePointStore.getBuildInvokes(getTargetContext(), true, testInteraction, "5"); assertEquals(value, 2); value = CodePointStore.getBuildInvokes(getTargetContext(), true, testInteraction, "6"); assertEquals(value, 3); Double lastInvoke = CodePointStore.getLastInvoke(getTargetContext(), true, testInteraction); assertFalse(lastInvoke.equals(0d)); Log.e("Finished test."); }
From source file:netdecoder.NetDecoder.java
public int correlationChange(String gene, Map<String, Node> controlNetwork, Map<String, Node> diseaseNetwork) { int count = 1; if (controlNetwork.containsKey(gene) && diseaseNetwork.containsKey(gene)) { Node geneControl = controlNetwork.get(gene); Node geneDisease = diseaseNetwork.get(gene); List<Edge> edgesControl = geneControl.getEdges(); List<Edge> edgesDisease = geneDisease.getEdges(); List<Edge> aux = new ArrayList(edgesControl); aux.retainAll(edgesDisease);//w w w.j a v a 2 s. c o m for (Edge e : aux) { int iControl = edgesControl.indexOf(e); int iDisease = edgesDisease.indexOf(e); Double sigControl = Math.signum(edgesControl.get(iControl).getSignScore()); Double sigDisease = Math.signum(edgesDisease.get(iDisease).getSignScore()); if (!sigControl.equals(sigDisease)) { count++; //System.out.println(e + "\t" + edgesControl.get(iControl).getSignScore()); //System.out.println(e + "\t" + edgesDisease.get(iDisease).getSignScore()); } } } return count; }
From source file:org.sakaiproject.tool.assessment.ui.bean.delivery.ItemContentsBean.java
public void setUpdatedScore(Double score) { //added conditional processing if (itemData.getTypeId().equals(TypeFacade.EXTENDED_MATCHING_ITEMS)) { setUpdatedScoreForEmi(score);// w ww.j a v a 2 s. co m return; } if (!score.equals(itemData.getScore())) { AuthorBean author = (AuthorBean) ContextUtil.lookupBean("author"); ItemService itemService = null; if (author.getIsEditPendingAssessmentFlow()) { itemService = new ItemService(); } else { itemService = new PublishedItemService(); } ItemFacade item = itemService.getItem(itemData.getItemId(), AgentFacade.getAgentString()); item.setScore(score); ItemDataIfc data = item.getData(); Set itemTextSet = data.getItemTextSet(); Iterator iter = itemTextSet.iterator(); while (iter.hasNext()) { ItemTextIfc itemText = (ItemTextIfc) iter.next(); Set<AnswerIfc> answerSet = itemText.getAnswerSet(); Iterator<AnswerIfc> iter2 = answerSet.iterator(); while (iter2.hasNext()) { AnswerIfc answer = iter2.next(); log.debug("old value " + answer.getScore() + "new value " + score); answer.setScore(score); } EventTrackingService.post(EventTrackingService.newEvent("sam.assessment.revise", "siteId=" + AgentFacade.getCurrentSiteId() + ", itemId=" + itemData.getItemId(), true)); } itemService.saveItem(item); itemData.setScore(score); } }
From source file:com.jskj.assets.server.servcie.Yimiaoshenqingdan.YimiaoshenqingdanService.java
/** * (???)/*from w w w. j a v a2 s. com*/ * * @param pager * @param conditionSql * @return */ @Override public YimiaotuihuoshenqingFindEntity findYimiaotuihuoshenqinglist(Pager pager, String conditionSql) { YimiaotuihuoshenqingFindEntity findEntitys = new YimiaotuihuoshenqingFindEntity(); List<YimiaotuihuoEntity> yimiaotuihuolist = new ArrayList<YimiaotuihuoEntity>(); //Backsale_detail_tb Backsale_detail_tbExample backsaledetailexam = new Backsale_detail_tbExample(); backsaledetailexam.createCriteria().addConditionSql(conditionSql); //count?limtStart/limitEnd int count = backsaledetailmapper.countByExample(backsaledetailexam); log.debug("found yimiaoxiaoshoutuihuoshenqing total:" + count); pager.setCount(count); backsaledetailexam.setLimitStart(pager.getStartDataIndex()); backsaledetailexam.setLimitEnd(pager.getPageSize()); //? backsaledetailexam.setOrderByClause("backsale_date DESC"); findEntitys.setCount(count); List<Backsale_detail_tb> backsale_detail_list = backsaledetailmapper.selectByExample(backsaledetailexam); if (backsale_detail_list.size() > 0) { for (Backsale_detail_tb bsd : backsale_detail_list) { log.debug(YimiaoshenqingdanService.class.getName() + "?" + bsd.getBacksaleId()); YiMiaotb yimiao = new YiMiaotb(); yimiao = yimiaomapper.selectByPrimaryKey(bsd.getYimiaoId()); YimiaotuihuoEntity ymth = new YimiaotuihuoEntity(); if (bsd.getBacksaleId() == bsd.getBacksaleId() && bsd.getStatus() == 3) { //??ID? YimiaodengjitbExample sqdexam = new YimiaodengjitbExample(); sqdexam.createCriteria().andXiangdanIdEqualTo(bsd.getBackDetailId()) .andStatusIn(new ArrayList<Integer>(Arrays.asList(3, 4))); List<Yimiaodengjitb> sqdlist = YimiaodengjitbMapper.selectByExample(sqdexam); Double chashu = 0d; Double shulian = chashu; ymth.setDengjishuliang(Arith.decimal(bsd.getQuantity(), 5)); // yimiaoshenqingdan.setQuantity(yimiaoshenqingdan.getQuantity()); //?? for (int i = 0; i < sqdlist.size(); i++) { //?? chashu = Arith.decimal(sqdlist.get(i).getQuantity(), 5); shulian = Arith.decimal(chashu + shulian, 5); Double strQuantity = Arith.round(Arith.mul(Arith.decimal(bsd.getQuantity() - shulian, 5), Arith.div(1, yimiao.getYimiaoHuansuanlv(), 0)), 0); strQuantity = Arith.div(strQuantity, Arith.div(1, yimiao.getYimiaoHuansuanlv(), 0), 5); // ymth.setDengjishuliang(Arith.decimal(bsd.getQuantity() - shulian, 5)); // bsd.setQuantity(ymth.getDengjishuliang()); if (shulian.equals(bsd.getQuantity())) { bsd.setStatus(4); Backsale_detail_tbExample ymsqd = new Backsale_detail_tbExample(); ymsqd.createCriteria().andBackDetailIdEqualTo(bsd.getBackDetailId()); backsaledetailmapper.updateByExample(bsd, ymsqd); } if (strQuantity <= 0) { continue; } ymth.setDengjishuliang(strQuantity); } } ymth.setBacksaledetails(bsd); BacksaletbExample backsaleexam = new BacksaletbExample(); backsaleexam.createCriteria().andBacksaleIdEqualTo(bsd.getBacksaleId()); List<Backsaletb> backsale_list = backsalemapper.selectByExample(backsaleexam); if (backsale_list.size() > 0) { for (Backsaletb bs : backsale_list) { log.debug(YimiaoshenqingdanService.class.getName() + "?" + bs.getBacksaleId()); ymth.setBacksale(bs); if (bs.getSupplierId() != null && !bs.getSupplierId().equals("")) { SupplierExample supplierexam = new SupplierExample(); supplierexam.createCriteria().andSupplierIdEqualTo(bs.getSupplierId()); List<Supplier> spl = suppliermapper.selectByExample(supplierexam); ymth.setSupplier(spl.get(0)); log.debug(YimiaoshenqingdanService.class.getName() + "??" + spl.get(0).getSupplierName()); } if (bs.getCustomerId() != null && !bs.getCustomerId().equals("")) { KehudanweitbExample kehudanweiExample = new KehudanweitbExample(); kehudanweiExample.createCriteria().andKehudanweiIdEqualTo(bs.getCustomerId()); List<Kehudanweitb> khdwl = kehudanweimapper.selectByExample(kehudanweiExample); ymth.setKehudanwei(khdwl.get(0)); log.debug(YimiaoshenqingdanService.class.getName() + "?" + khdwl.get(0).getKehudanweiName()); } } } YiMiaotbExample yimiaoexam = new YiMiaotbExample(); yimiaoexam.createCriteria().andYimiaoIdEqualTo(bsd.getYimiaoId()); List<YiMiaotb> yimiaolist = yimiaomapper.selectByExample(yimiaoexam); if (yimiaolist.size() > 0) { for (YiMiaotb ym : yimiaolist) { ymth.setYimiao(ym); } } if (ymth.getBacksaledetails().getStatus() == 3) { yimiaotuihuolist.add(ymth); } } } findEntitys.setResult(yimiaotuihuolist); return findEntitys; }
From source file:org.polymap.kaps.ui.form.NHK2010BewertungFormEditorPage.java
@SuppressWarnings("unchecked") private Composite createSumForm(final IFormEditorPageSite site, final Section section) { Composite parent = (Composite) section.getClient(); int col1 = 45; int col2 = 65; Control newLine, lastLine = null; newLine = createLabel(parent, "Zeitwerte", "Summe der Gebudezeitwerte", left().right(col1).top(lastLine), SWT.RIGHT);/* ww w . ja va 2 s . c om*/ createPreisField(bewertung.summeZeitwerte(), left().left(col2).right(100).top(lastLine), parent, false); site.addFieldListener(gesamtSumme = new IFormFieldListener() { @Override public void fieldChange(FormFieldEvent ev) { if (ev.getEventCode() == VALUE_CHANGE) { if (ev.getFieldName().equalsIgnoreCase(getPropertyName(nameTemplate.gebaeudeZeitWert()))) { // zeitwert am aktuellen gebude hat sich gendert Double zeitWert = (Double) ev.getNewValue(); Double result = 0.0d; for (NHK2010BewertungGebaeudeComposite gebaeude : getElements()) { if (zeitWert != null && selectedComposite.get() != null && selectedComposite.get() .laufendeNummer().get() == gebaeude.laufendeNummer().get()) { // gewhltes gebude gefunden // property nehmen result += zeitWert; } else { result += gebaeude.gebaeudeZeitWert().get() != null ? gebaeude.gebaeudeZeitWert().get() : 0.0d; } } pageSite.setFieldValue(bewertung.summeZeitwerte().qualifiedName().name(), result != null ? NumberFormatter.getFormatter(2).format(result) : null); } } } }); lastLine = newLine; newLine = createLabel(parent, "Bauteile", "+/- nicht erfasste Bauteile", left().right(col1).top(lastLine), SWT.RIGHT); createPreisField(bewertung.nichtErfassteBauteile(), left().left(col2).right(100).top(lastLine), parent, true); lastLine = newLine; newLine = createLabel(parent, "Auenanlagen", "Wert der Auenanlagen", left().right(col1).top(lastLine), SWT.RIGHT); createPreisField(bewertung.wertDerAussenanlagen(), left().left(col2).right(100).top(lastLine), parent, true); lastLine = newLine; newLine = createLabel(parent, "Auenanlagen in %", "Wert der Auenanlagen in % vom Gebudezeitwert", left().right(col1).top(lastLine), SWT.RIGHT); createBooleanField(bewertung.aussenAnlagenInProzent(), left().left(col1).right(col2).top(lastLine), parent); createFlaecheField(bewertung.prozentwertDerAussenanlagen(), left().left(col2).right(100).top(lastLine), parent, true); site.addFieldListener(aussenAnlagenListener = new IFormFieldListener() { @Override public void fieldChange(FormFieldEvent ev) { if (ev.getEventCode() == IFormFieldListener.VALUE_CHANGE) { if (ev.getFieldName().equals(bewertung.aussenAnlagenInProzent().qualifiedName().name())) { Boolean value = (Boolean) ev.getNewValue(); enableAussenanlageProzent(site, value); } } } }); lastLine = newLine; newLine = createLabel(parent, "Gesamtwert", "Gesamtwert der baulichen und sonstigen Anlagen", left().right(col1).top(lastLine), SWT.RIGHT); createPreisField(bewertung.gesamtWert(), left().left(col2).right(100).top(lastLine), parent, false); site.addFieldListener(gesamtWert = new FieldCalculation(site, 2, bewertung.gesamtWert(), bewertung.summeZeitwerte(), bewertung.nichtErfassteBauteile(), bewertung.wertDerAussenanlagen(), bewertung.prozentwertDerAussenanlagen()) { @Override protected Double calculate(ValueProvider values) { // summe ber alles Double result = new Double(0.0d); if (values.get(bewertung.summeZeitwerte()) != null) { result += values.get(bewertung.summeZeitwerte()); } if (values.get(bewertung.nichtErfassteBauteile()) != null) { result += values.get(bewertung.nichtErfassteBauteile()); } if (aussenanlageProzent) { if (values.get(bewertung.prozentwertDerAussenanlagen()) != null) { Double prozent = values.get(bewertung.prozentwertDerAussenanlagen()); result += result / 100 * prozent; } } else { if (values.get(bewertung.wertDerAussenanlagen()) != null) { result += values.get(bewertung.wertDerAussenanlagen()); } } return result; } }); lastLine = newLine; final VertragComposite vertrag = bewertung.vertrag().get(); String label = vertrag == null ? "Kein Vertrag zugewiesen" : "In Vertrag " + EingangsNummerFormatter.format(vertrag.eingangsNr().get()) + " bernehmen"; ActionButton openErweiterteDaten = new ActionButton(parent, new Action(label) { @Override public void run() { VertragsdatenBaulandComposite erweitert = VertragsdatenBaulandComposite.Mixin.forVertrag(vertrag); if (erweitert != null) { Double newValue = gesamtWert.getLastResultValue() == null ? bewertung.gesamtWert().get() : gesamtWert.getLastResultValue(); if (newValue != null && !newValue.equals(erweitert.wertDerBaulichenAnlagen())) { FormEditor editor = KapsPlugin.openEditor(fs, VertragsdatenBaulandComposite.NAME, erweitert); editor.setActivePage(VertragsdatenBaulandBodenwertFormEditorPage.class.getName()); EventManager.instance() .publish(new InterEditorPropertyChangeEvent(formEditor, editor, erweitert, erweitert.wertDerBaulichenAnlagen().qualifiedName().name(), erweitert.wertDerBaulichenAnlagen().get(), newValue)); EventManager.instance() .publish(new InterEditorPropertyChangeEvent(formEditor, editor, erweitert, erweitert.bewertungsMethode().qualifiedName().name(), erweitert.bewertungsMethode().get(), "NHK2010")); } MessageDialog.openInformation(PolymapWorkbench.getShellToParentOn(), "Wert bernommen", "Der Gesamtwert der baulichen Anlagen wurde in \"Wert der baulichen Anlagen\" im Reiter \"Boden- und Gebudewert \" in " + VertragsdatenBaulandComposite.NAME + " bernommen. Die Formulare werden entsprechend angezeigt."); } } }); openErweiterteDaten.setLayoutData(left().height(25).top(lastLine).bottom(100).create()); openErweiterteDaten.setEnabled(vertrag != null); newLine = openErweiterteDaten; enableAussenanlageProzent(site, bewertung.aussenAnlagenInProzent().get()); return section; }
From source file:com.jskj.assets.server.servcie.Yimiaoshenqingdan.YimiaoshenqingdanService.java
@Override public ShenbaoyimiaoFindEntity findShenbaoyimiaodj(Pager pager, String conditionSql) { YimiaoshenqingdantbExample yimiaoshenqingdanexample = new YimiaoshenqingdantbExample(); ShenbaoyimiaoFindEntity findEntity = new ShenbaoyimiaoFindEntity(); if (conditionSql != null && !conditionSql.trim().equals("")) { yimiaoshenqingdanexample.createCriteria().addConditionSql(conditionSql); }//from w w w .j a v a 2s . co m //count?limtStart/limitEnd int count = mapper.countByExample(yimiaoshenqingdanexample); log.debug("found shenbaoyimiao total:" + count); pager.setCount(count); yimiaoshenqingdanexample.setLimitStart(pager.getStartDataIndex()); yimiaoshenqingdanexample.setLimitEnd(pager.getPageSize()); //? yimiaoshenqingdanexample.setOrderByClause("xiangdan_id ASC"); findEntity.setCount(count); //? List<Yimiaoshenqingdantb> yimiaoshenqingdanlist = mapper.selectByExample(yimiaoshenqingdanexample); List<ShenbaoyimiaoEntity> resultAll = new ArrayList<ShenbaoyimiaoEntity>(); for (Yimiaoshenqingdantb yimiaoshenqingdan : yimiaoshenqingdanlist) { ShenbaoyimiaoEntity shenbaoyimiaoEntity = new ShenbaoyimiaoEntity(); YiMiaotb yimiao = new YiMiaotb(); yimiao = yimiaomapper.selectByPrimaryKey(yimiaoshenqingdan.getYimiaoId()); YimiaoAll yimiaoAll = new YimiaoAll(); log.debug("found Depot and bean copy:" + yimiao.getYimiaoName()); copier.copy(yimiao, yimiaoAll, null); shenbaoyimiaoEntity.setYimiaoAll(yimiaoAll); Shenqingdantb shenqingdantb = new Shenqingdantb(); shenqingdantb = shenqingdantbMapper.selectByPrimaryKey(yimiaoshenqingdan.getShenqingdanId()); shenbaoyimiaoEntity.setYimiaoshenqingtb(yimiaoshenqingdan); shenbaoyimiaoEntity.setShenqingdan(shenqingdantb); if (yimiaoshenqingdan.getXiangdanId() == yimiaoshenqingdan.getXiangdanId() && yimiaoshenqingdan.getStatus() == 0) { //??ID? YimiaodengjitbExample sqdexam = new YimiaodengjitbExample(); sqdexam.createCriteria().andXiangdanIdEqualTo(yimiaoshenqingdan.getXiangdanId()); List<Yimiaodengjitb> sqdlist = YimiaodengjitbMapper.selectByExample(sqdexam); Double chashu = 0d; Double shulian = chashu; shenbaoyimiaoEntity.setDengjishuliang(Arith.decimal(yimiaoshenqingdan.getQuantity(), 5)); // yimiaoshenqingdan.setQuantity(yimiaoshenqingdan.getQuantity()); //?? for (int i = 0; i < sqdlist.size(); i++) { //?? chashu = Arith.decimal(sqdlist.get(i).getQuantity(), 5); shulian = Arith.decimal(chashu + shulian, 5); Double strQuantity = Arith .round(Arith.mul(Arith.decimal(yimiaoshenqingdan.getQuantity() - shulian, 5), Arith.div(1, yimiaoAll.getYimiaoHuansuanlv(), 0)), 0); strQuantity = Arith.div(strQuantity, Arith.div(1, yimiaoAll.getYimiaoHuansuanlv(), 0), 5); if (strQuantity <= 0) { continue; } // shenbaoyimiaoEntity.setDengjishuliang(Arith.decimal(yimiaoshenqingdan.getQuantity() - shulian, 5)); shenbaoyimiaoEntity.setDengjishuliang(strQuantity); if (shulian.equals(yimiaoshenqingdan.getQuantity())) { yimiaoshenqingdan.setStatus(1); YimiaoshenqingdantbExample ymsqd = new YimiaoshenqingdantbExample(); ymsqd.createCriteria().andXiangdanIdEqualTo(yimiaoshenqingdan.getXiangdanId()); mapper.updateByExample(yimiaoshenqingdan, ymsqd); } } } if (shenbaoyimiaoEntity.getYimiaoshenqingtb().getStatus() == 0) { resultAll.add(shenbaoyimiaoEntity); } } findEntity.setResult(resultAll); return findEntity; }
From source file:org.apache.solr.TestDistributedSearch.java
@Test public void test() throws Exception { QueryResponse rsp = null;/*from ww w.j av a 2 s. c o m*/ int backupStress = stress; // make a copy so we can restore del("*:*"); indexr(id, 1, i1, 100, tlong, 100, t1, "now is the time for all good men", "foo_sev_enum", "Medium", tdate_a, "2010-04-20T11:00:00Z", tdate_b, "2009-08-20T11:00:00Z", "foo_f", 1.414f, "foo_b", "true", "foo_d", 1.414d, s1, "z${foo}"); indexr(id, 2, i1, 50, tlong, 50, t1, "to come to the aid of their country.", "foo_sev_enum", "Medium", "foo_sev_enum", "High", tdate_a, "2010-05-02T11:00:00Z", tdate_b, "2009-11-02T11:00:00Z", s1, "z${foo}"); indexr(id, 3, i1, 2, tlong, 2, t1, "how now brown cow", tdate_a, "2010-05-03T11:00:00Z", s1, "z${foo}"); indexr(id, 4, i1, -100, tlong, 101, t1, "the quick fox jumped over the lazy dog", tdate_a, "2010-05-03T11:00:00Z", tdate_b, "2010-05-03T11:00:00Z", s1, "a"); indexr(id, 5, i1, 500, tlong, 500, t1, "the quick fox jumped way over the lazy dog", tdate_a, "2010-05-05T11:00:00Z", s1, "b"); indexr(id, 6, i1, -600, tlong, 600, t1, "humpty dumpy sat on a wall", s1, "c"); indexr(id, 7, i1, 123, tlong, 123, t1, "humpty dumpy had a great fall", s1, "d"); indexr(id, 8, i1, 876, tlong, 876, tdate_b, "2010-01-05T11:00:00Z", "foo_sev_enum", "High", t1, "all the kings horses and all the kings men", s1, "e"); indexr(id, 9, i1, 7, tlong, 7, t1, "couldn't put humpty together again", s1, "f"); commit(); // try to ensure there's more than one segment indexr(id, 10, i1, 4321, tlong, 4321, t1, "this too shall pass", s1, "g"); indexr(id, 11, i1, -987, tlong, 987, "foo_sev_enum", "Medium", t1, "An eye for eye only ends up making the whole world blind.", s1, "h"); indexr(id, 12, i1, 379, tlong, 379, t1, "Great works are performed, not by strength, but by perseverance.", s1, "i"); indexr(id, 13, i1, 232, tlong, 232, t1, "no eggs on wall, lesson learned", oddField, "odd man out", s1, "j"); indexr(id, "1001", "lowerfilt", "toyota", s1, "k"); // for spellcheck indexr(id, 14, "SubjectTerms_mfacet", new String[] { "mathematical models", "mathematical analysis" }, s1, "l"); indexr(id, 15, "SubjectTerms_mfacet", new String[] { "test 1", "test 2", "test3" }); indexr(id, 16, "SubjectTerms_mfacet", new String[] { "test 1", "test 2", "test3" }); String[] vals = new String[100]; for (int i = 0; i < 100; i++) { vals[i] = "test " + i; } indexr(id, 17, "SubjectTerms_mfacet", vals); for (int i = 100; i < 150; i++) { indexr(id, i); } commit(); handle.clear(); handle.put("timestamp", SKIPVAL); handle.put("_version_", SKIPVAL); // not a cloud test, but may use updateLog //Test common query parameters. validateCommonQueryParameters(); // random value sort for (String f : fieldNames) { query("q", "*:*", "sort", f + " desc"); query("q", "*:*", "sort", f + " asc"); } // these queries should be exactly ordered and scores should exactly match query("q", "*:*", "sort", i1 + " desc"); query("q", "*:*", "sort", "{!func}testfunc(add(" + i1 + ",5))" + " desc"); query("q", "*:*", "sort", i1 + " asc"); query("q", "*:*", "sort", i1 + " desc", "fl", "*,score"); query("q", "*:*", "sort", "n_tl1 asc", "fl", "*,score"); query("q", "*:*", "sort", "n_tl1 desc"); handle.put("maxScore", SKIPVAL); query("q", "{!func}" + i1);// does not expect maxScore. So if it comes ,ignore it. JavaBinCodec.writeSolrDocumentList() //is agnostic of request params. handle.remove("maxScore"); query("q", "{!func}" + i1, "fl", "*,score"); // even scores should match exactly here handle.put("highlighting", UNORDERED); handle.put("response", UNORDERED); handle.put("maxScore", SKIPVAL); query("q", "quick"); query("q", "all", "fl", "id", "start", "0"); query("q", "all", "fl", "foofoofoo", "start", "0"); // no fields in returned docs query("q", "all", "fl", "id", "start", "100"); handle.put("score", SKIPVAL); query("q", "quick", "fl", "*,score"); query("q", "all", "fl", "*,score", "start", "1"); query("q", "all", "fl", "*,score", "start", "100"); query("q", "now their fox sat had put", "fl", "*,score", "hl", "true", "hl.fl", t1); query("q", "now their fox sat had put", "fl", "foofoofoo", "hl", "true", "hl.fl", t1); query("q", "matchesnothing", "fl", "*,score"); // test that a single NOW value is propagated to all shards... if that is true // then the primary sort should always be a tie and then the secondary should always decide query("q", "{!func}ms(NOW)", "sort", "score desc," + i1 + " desc", "fl", "id"); query("q", "*:*", "rows", 0, "facet", "true", "facet.field", t1, "facet.field", t1); query("q", "*:*", "rows", 0, "facet", "true", "facet.field", t1, "facet.limit", 1); query("q", "*:*", "rows", 0, "facet", "true", "facet.query", "quick", "facet.query", "quick", "facet.query", "all", "facet.query", "*:*"); query("q", "*:*", "rows", 0, "facet", "true", "facet.field", t1, "facet.mincount", 2); // a facet query to test out chars out of the ascii range query("q", "*:*", "rows", 0, "facet", "true", "facet.query", "{!term f=foo_s}international\u00ff\u01ff\u2222\u3333"); // simple field facet on date fields rsp = query("q", "*:*", "rows", 0, "facet", "true", "facet.limit", 1, // TODO: limit shouldn't be needed: SOLR-6386 "facet.field", tdate_a); assertEquals(1, rsp.getFacetFields().size()); rsp = query("q", "*:*", "rows", 0, "facet", "true", "facet.limit", 1, // TODO: limit shouldn't be needed: SOLR-6386 "facet.field", tdate_b, "facet.field", tdate_a); assertEquals(2, rsp.getFacetFields().size()); String facetQuery = "id:[1 TO 15]"; // simple range facet on one field query("q", facetQuery, "rows", 100, "facet", "true", "facet.range", tlong, "facet.range", tlong, "facet.range.start", 200, "facet.range.gap", 100, "facet.range.end", 900, "facet.range.method", FacetRangeMethod.FILTER); // simple range facet on one field using dv method query("q", facetQuery, "rows", 100, "facet", "true", "facet.range", tlong, "facet.range", tlong, "facet.range.start", 200, "facet.range.gap", 100, "facet.range.end", 900, "facet.range.method", FacetRangeMethod.DV); // range facet on multiple fields query("q", facetQuery, "rows", 100, "facet", "true", "facet.range", tlong, "facet.range", i1, "f." + i1 + ".facet.range.start", 300, "f." + i1 + ".facet.range.gap", 87, "facet.range.end", 900, "facet.range.start", 200, "facet.range.gap", 100, "f." + tlong + ".facet.range.end", 900, "f." + i1 + ".facet.range.method", FacetRangeMethod.FILTER, "f." + tlong + ".facet.range.method", FacetRangeMethod.DV); // range facet with "other" param QueryResponse response = query("q", facetQuery, "rows", 100, "facet", "true", "facet.range", tlong, "facet.range.start", 200, "facet.range.gap", 100, "facet.range.end", 900, "facet.range.other", "all"); assertEquals(tlong, response.getFacetRanges().get(0).getName()); assertEquals(new Integer(6), response.getFacetRanges().get(0).getBefore()); assertEquals(new Integer(5), response.getFacetRanges().get(0).getBetween()); assertEquals(new Integer(2), response.getFacetRanges().get(0).getAfter()); // Test mincounts. Do NOT want to go through all the stuff where with validateControlData in query() method // Purposely packing a _bunch_ of stuff together here to insure that the proper level of mincount is used for // each ModifiableSolrParams minParams = new ModifiableSolrParams(); minParams.set("q", "*:*"); minParams.set("rows", 1); minParams.set("facet", "true"); minParams.set("facet.missing", "true"); minParams.set("facet.field", i1); minParams.set("facet.missing", "true"); minParams.set("facet.mincount", 2); // Return a separate section of ranges over i1. Should respect global range mincount minParams.set("facet.range", i1); minParams.set("f." + i1 + ".facet.range.start", 0); minParams.set("f." + i1 + ".facet.range.gap", 200); minParams.set("f." + i1 + ".facet.range.end", 1200); minParams.set("f." + i1 + ".facet.mincount", 4); // Return a separate section of ranges over tlong Should respect facet.mincount minParams.add("facet.range", tlong); minParams.set("f." + tlong + ".facet.range.start", 0); minParams.set("f." + tlong + ".facet.range.gap", 100); minParams.set("f." + tlong + ".facet.range.end", 1200); // Repeat with a range type of date minParams.add("facet.range", tdate_b); minParams.set("f." + tdate_b + ".facet.range.start", "2009-02-01T00:00:00Z"); minParams.set("f." + tdate_b + ".facet.range.gap", "+1YEAR"); minParams.set("f." + tdate_b + ".facet.range.end", "2011-01-01T00:00:00Z"); minParams.set("f." + tdate_b + ".facet.mincount", 3); // Insure that global mincount is respected for facet queries minParams.set("facet.query", tdate_a + ":[2010-01-01T00:00:00Z TO 2011-01-01T00:00:00Z]"); // Should return some counts //minParams.set("facet.query", tdate_a + ":[* TO *]"); // Should be removed minParams.add("facet.query", tdate_b + ":[2008-01-01T00:00:00Z TO 2009-09-01T00:00:00Z]"); // Should be removed from response setDistributedParams(minParams); QueryResponse minResp = queryServer(minParams); ModifiableSolrParams eParams = new ModifiableSolrParams(); eParams.set("q", tdate_b + ":[* TO *]"); eParams.set("rows", 1000); eParams.set("fl", tdate_b); setDistributedParams(eParams); QueryResponse eResp = queryServer(eParams); // Check that exactly the right numbers of counts came through assertEquals("Should be exactly 2 range facets returned after minCounts taken into account ", 3, minResp.getFacetRanges().size()); assertEquals("Should only be 1 query facets returned after minCounts taken into account ", 1, minResp.getFacetQuery().size()); checkMinCountsField(minResp.getFacetField(i1).getValues(), new Object[] { null, 55L }); // Should just be the null entries for field checkMinCountsRange(minResp.getFacetRanges().get(0).getCounts(), new Object[] { "0", 5L }); // range on i1 checkMinCountsRange(minResp.getFacetRanges().get(1).getCounts(), new Object[] { "0", 3L, "100", 3L }); // range on tlong checkMinCountsRange(minResp.getFacetRanges().get(2).getCounts(), new Object[] { "2009-02-01T00:00:00Z", 3L }); // date (range) on tvh assertTrue("Should have a facet for tdate_a", minResp.getFacetQuery().containsKey("a_n_tdt:[2010-01-01T00:00:00Z TO 2011-01-01T00:00:00Z]")); int qCount = minResp.getFacetQuery().get("a_n_tdt:[2010-01-01T00:00:00Z TO 2011-01-01T00:00:00Z]"); assertEquals("tdate_a should be 5", qCount, 5); // Now let's do some queries, the above is getting too complex minParams = new ModifiableSolrParams(); minParams.set("q", "*:*"); minParams.set("rows", 1); minParams.set("facet", "true"); minParams.set("facet.mincount", 3); minParams.set("facet.query", tdate_a + ":[2010-01-01T00:00:00Z TO 2010-05-04T00:00:00Z]"); minParams.add("facet.query", tdate_b + ":[2009-01-01T00:00:00Z TO 2010-01-01T00:00:00Z]"); // Should be removed setDistributedParams(minParams); minResp = queryServer(minParams); assertEquals("Should only be 1 query facets returned after minCounts taken into account ", 1, minResp.getFacetQuery().size()); assertTrue("Should be an entry for a_n_tdt", minResp.getFacetQuery().containsKey("a_n_tdt:[2010-01-01T00:00:00Z TO 2010-05-04T00:00:00Z]")); qCount = minResp.getFacetQuery().get("a_n_tdt:[2010-01-01T00:00:00Z TO 2010-05-04T00:00:00Z]"); assertEquals("a_n_tdt should have a count of 4 ", qCount, 4); // variations of fl query("q", "*:*", "fl", "score", "sort", i1 + " desc"); query("q", "*:*", "fl", i1 + ",score", "sort", i1 + " desc"); query("q", "*:*", "fl", i1, "fl", "score", "sort", i1 + " desc"); query("q", "*:*", "fl", "id," + i1, "sort", i1 + " desc"); query("q", "*:*", "fl", "id", "fl", i1, "sort", i1 + " desc"); query("q", "*:*", "fl", i1, "fl", "id", "sort", i1 + " desc"); query("q", "*:*", "fl", "id", "fl", nint, "fl", tint, "sort", i1 + " desc"); query("q", "*:*", "fl", nint, "fl", "id", "fl", tint, "sort", i1 + " desc"); handle.put("did", SKIPVAL); query("q", "*:*", "fl", "did:[docid]", "sort", i1 + " desc"); handle.remove("did"); query("q", "*:*", "fl", "log(" + tlong + "),abs(" + tlong + "),score", "sort", i1 + " desc"); query("q", "*:*", "fl", "n_*", "sort", i1 + " desc"); // basic spellcheck testing query("q", "toyata", "fl", "id,lowerfilt", "spellcheck", true, "spellcheck.q", "toyata", "qt", "spellCheckCompRH_Direct", "shards.qt", "spellCheckCompRH_Direct"); stress = 0; // turn off stress... we want to tex max combos in min time for (int i = 0; i < 25 * RANDOM_MULTIPLIER; i++) { String f = fieldNames[random().nextInt(fieldNames.length)]; if (random().nextBoolean()) f = t1; // the text field is a really interesting one to facet on (and it's multi-valued too) // we want a random query and not just *:* so we'll get zero counts in facets also // TODO: do a better random query String q = random().nextBoolean() ? "*:*" : "id:(1 3 5 7 9 11 13) OR id:[100 TO " + random().nextInt(50) + "]"; int nolimit = random().nextBoolean() ? -1 : 10000; // these should be equivalent // if limit==-1, we should always get exact matches query("q", q, "rows", 0, "facet", "true", "facet.field", f, "facet.limit", nolimit, "facet.sort", "count", "facet.mincount", random().nextInt(5), "facet.offset", random().nextInt(10)); query("q", q, "rows", 0, "facet", "true", "facet.field", f, "facet.limit", nolimit, "facet.sort", "index", "facet.mincount", random().nextInt(5), "facet.offset", random().nextInt(10)); // for index sort, we should get exact results for mincount <= 1 query("q", q, "rows", 0, "facet", "true", "facet.field", f, "facet.sort", "index", "facet.mincount", random().nextInt(2), "facet.offset", random().nextInt(10), "facet.limit", random().nextInt(11) - 1); } stress = backupStress; // restore stress // test faceting multiple things at once query("q", "*:*", "rows", 0, "facet", "true", "facet.query", "quick", "facet.query", "all", "facet.query", "*:*", "facet.field", t1); // test filter tagging, facet exclusion, and naming (multi-select facet support) queryAndCompareUIF("q", "*:*", "rows", 0, "facet", "true", "facet.query", "{!key=myquick}quick", "facet.query", "{!key=myall ex=a}all", "facet.query", "*:*", "facet.field", "{!key=mykey ex=a}" + t1, "facet.field", "{!key=other ex=b}" + t1, "facet.field", "{!key=again ex=a,b}" + t1, "facet.field", t1, "fq", "{!tag=a}id:[1 TO 7]", "fq", "{!tag=b}id:[3 TO 9]"); queryAndCompareUIF("q", "*:*", "facet", "true", "facet.field", "{!ex=t1}SubjectTerms_mfacet", "fq", "{!tag=t1}SubjectTerms_mfacet:(test 1)", "facet.limit", "10", "facet.mincount", "1"); // test field that is valid in schema but missing in all shards query("q", "*:*", "rows", 100, "facet", "true", "facet.field", missingField, "facet.mincount", 2); // test field that is valid in schema and missing in some shards query("q", "*:*", "rows", 100, "facet", "true", "facet.field", oddField, "facet.mincount", 2); query("q", "*:*", "sort", i1 + " desc", "stats", "true", "stats.field", "stats_dt"); query("q", "*:*", "sort", i1 + " desc", "stats", "true", "stats.field", i1); handle.put("stddev", FUZZY); handle.put("sumOfSquares", FUZZY); query("q", "*:*", "sort", i1 + " desc", "stats", "true", "stats.field", tdate_a); query("q", "*:*", "sort", i1 + " desc", "stats", "true", "stats.field", tdate_b); handle.remove("stddev"); handle.remove("sumOfSquares"); rsp = query("q", "*:*", "sort", i1 + " desc", "stats", "true", "stats.field", "{!cardinality='true'}" + oddField, "stats.field", "{!cardinality='true'}" + tlong); { // don't leak variabls // long FieldStatsInfo s = rsp.getFieldStatsInfo().get(tlong); assertNotNull("missing stats", s); assertEquals("wrong cardinality", new Long(13), s.getCardinality()); // assertNull("expected null for min", s.getMin()); assertNull("expected null for mean", s.getMean()); assertNull("expected null for count", s.getCount()); assertNull("expected null for calcDistinct", s.getCountDistinct()); assertNull("expected null for distinct vals", s.getDistinctValues()); assertNull("expected null for max", s.getMax()); assertNull("expected null for missing", s.getMissing()); assertNull("expected null for stddev", s.getStddev()); assertNull("expected null for sum", s.getSum()); assertNull("expected null for percentiles", s.getSum()); // string s = rsp.getFieldStatsInfo().get(oddField); assertNotNull("missing stats", s); assertEquals("wrong cardinality", new Long(1), s.getCardinality()); // assertNull("expected null for min", s.getMin()); assertNull("expected null for mean", s.getMean()); assertNull("expected null for count", s.getCount()); assertNull("expected null for calcDistinct", s.getCountDistinct()); assertNull("expected null for distinct vals", s.getDistinctValues()); assertNull("expected null for max", s.getMax()); assertNull("expected null for missing", s.getMissing()); assertNull("expected null for stddev", s.getStddev()); assertNull("expected null for sum", s.getSum()); assertNull("expected null for percentiles", s.getSum()); } query("q", "*:*", "sort", i1 + " desc", "stats", "true", "stats.field", "{!percentiles='1,2,3,4,5'}" + i1); query("q", "*:*", "sort", i1 + " desc", "stats", "true", "stats.field", "{!percentiles='1,20,30,40,98,99,99.9'}" + i1); rsp = query("q", "*:*", "sort", i1 + " desc", "stats", "true", "stats.field", "{!percentiles='1.0,99.999,0.001'}" + tlong); { // don't leak variabls Double[] expectedKeys = new Double[] { 1.0D, 99.999D, 0.001D }; Double[] expectedVals = new Double[] { 2.0D, 4320.0D, 2.0D }; FieldStatsInfo s = rsp.getFieldStatsInfo().get(tlong); assertNotNull("no stats for " + tlong, s); Map<Double, Double> p = s.getPercentiles(); assertNotNull("no percentils", p); assertEquals("insufficient percentiles", expectedKeys.length, p.size()); Iterator<Double> actualKeys = p.keySet().iterator(); for (int i = 0; i < expectedKeys.length; i++) { Double expectedKey = expectedKeys[i]; assertTrue("Ran out of actual keys as of : " + i + "->" + expectedKey, actualKeys.hasNext()); assertEquals(expectedKey, actualKeys.next()); assertEquals("percentiles are off: " + p.toString(), expectedVals[i], p.get(expectedKey), 1.0D); } // assertNull("expected null for count", s.getMin()); assertNull("expected null for count", s.getMean()); assertNull("expected null for count", s.getCount()); assertNull("expected null for calcDistinct", s.getCountDistinct()); assertNull("expected null for distinct vals", s.getDistinctValues()); assertNull("expected null for max", s.getMax()); assertNull("expected null for missing", s.getMissing()); assertNull("expected null for stddev", s.getStddev()); assertNull("expected null for sum", s.getSum()); } query("q", "*:*", "sort", i1 + " desc", "stats", "true", "stats.field", "{!percentiles='1,20,50,80,99'}" + tdate_a); query("q", "*:*", "sort", i1 + " desc", "stats", "true", "fq", "{!tag=nothing}-*:*", "stats.field", "{!key=special_key ex=nothing}stats_dt"); query("q", "*:*", "sort", i1 + " desc", "stats", "true", "f.stats_dt.stats.calcdistinct", "true", "stats.field", "{!key=special_key}stats_dt"); query("q", "*:*", "sort", i1 + " desc", "stats", "true", "f.stats_dt.stats.calcdistinct", "true", "fq", "{!tag=xxx}id:[3 TO 9]", "stats.field", "{!key=special_key}stats_dt", "stats.field", "{!ex=xxx}stats_dt"); handle.put("stddev", FUZZY); handle.put("sumOfSquares", FUZZY); query("q", "*:*", "sort", i1 + " desc", "stats", "true", // do a really simple query so distributed IDF doesn't cause problems // when comparing with control collection "stats.field", "{!lucene key=q_key}" + i1 + "foo_b:true", "stats.field", "{!func key=f_key}sum(" + tlong + "," + i1 + ")"); query("q", "*:*", "sort", i1 + " desc", "stats", "true", "stats.field", "stats_dt", "stats.field", i1, "stats.field", tdate_a, "stats.field", tdate_b); // only ask for "min" and "mean", explicitly exclude deps of mean, whitebox check shard responses try { RequestTrackingQueue trackingQueue = new RequestTrackingQueue(); TrackingShardHandlerFactory.setTrackingQueue(jettys, trackingQueue); rsp = query("q", "*:*", "sort", i1 + " desc", "stats", "true", "stats.field", "{!min=true sum=false mean=true count=false}" + i1); FieldStatsInfo s = rsp.getFieldStatsInfo().get(i1); assertNotNull("no stats for " + i1, s); // assertEquals("wrong min", -987.0D, (Double) s.getMin(), 0.0001D); assertEquals("wrong mean", 377.153846D, (Double) s.getMean(), 0.0001D); // assertNull("expected null for count", s.getCount()); assertNull("expected null for calcDistinct", s.getCountDistinct()); assertNull("expected null for distinct vals", s.getDistinctValues()); assertNull("expected null for max", s.getMax()); assertNull("expected null for missing", s.getMissing()); assertNull("expected null for stddev", s.getStddev()); assertNull("expected null for sum", s.getSum()); assertNull("expected null for percentiles", s.getPercentiles()); assertNull("expected null for cardinality", s.getCardinality()); // sanity check deps relationship for (Stat dep : EnumSet.of(Stat.sum, Stat.count)) { assertTrue("Purpose of this test is to ensure that asking for some stats works even when the deps " + "of those stats are explicitly excluded -- but the expected dep relationshp is no longer valid. " + "ie: who changed the code and didn't change this test?, expected: " + dep, Stat.mean.getDistribDeps().contains(dep)); } // check our shard requests & responses - ensure we didn't get unneccessary stats from every shard int numStatsShardRequests = 0; EnumSet<Stat> shardStatsExpected = EnumSet.of(Stat.min, Stat.sum, Stat.count); for (List<ShardRequestAndParams> shard : trackingQueue.getAllRequests().values()) { for (ShardRequestAndParams shardReq : shard) { if (shardReq.params.getBool(StatsParams.STATS, false)) { numStatsShardRequests++; for (ShardResponse shardRsp : shardReq.sreq.responses) { NamedList<Object> shardStats = ((NamedList<NamedList<NamedList<Object>>>) shardRsp .getSolrResponse().getResponse().get("stats")).get("stats_fields").get(i1); assertNotNull("no stard stats for " + i1, shardStats); // for (Map.Entry<String, Object> entry : shardStats) { Stat found = Stat.forName(entry.getKey()); assertNotNull("found shardRsp stat key we were not expecting: " + entry, found); assertTrue("found stat we were not expecting: " + entry, shardStatsExpected.contains(found)); } } } } } assertTrue("did't see any stats=true shard requests", 0 < numStatsShardRequests); } finally { TrackingShardHandlerFactory.setTrackingQueue(jettys, null); } // only ask for "min", "mean" and "stddev", rsp = query("q", "*:*", "sort", i1 + " desc", "stats", "true", "stats.field", "{!min=true mean=true stddev=true}" + i1); { // don't leak variables FieldStatsInfo s = rsp.getFieldStatsInfo().get(i1); assertNotNull("no stats for " + i1, s); // assertEquals("wrong min", -987.0D, (Double) s.getMin(), 0.0001D); assertEquals("wrong mean", 377.153846D, (Double) s.getMean(), 0.0001D); assertEquals("wrong stddev", 1271.76215D, (Double) s.getStddev(), 0.0001D); // assertNull("expected null for count", s.getCount()); assertNull("expected null for calcDistinct", s.getCountDistinct()); assertNull("expected null for distinct vals", s.getDistinctValues()); assertNull("expected null for max", s.getMax()); assertNull("expected null for missing", s.getMissing()); assertNull("expected null for sum", s.getSum()); assertNull("expected null for percentiles", s.getPercentiles()); assertNull("expected null for cardinality", s.getCardinality()); } // request stats, but disable them all via param refs rsp = query("q", "*:*", "sort", i1 + " desc", "stats", "true", "doMin", "false", "stats.field", "{!min=$doMin}" + i1); { // don't leak variables FieldStatsInfo s = rsp.getFieldStatsInfo().get(i1); // stats section should exist, even though stats should be null assertNotNull("no stats for " + i1, s); // assertNull("expected null for min", s.getMin()); assertNull("expected null for mean", s.getMean()); assertNull("expected null for stddev", s.getStddev()); // assertNull("expected null for count", s.getCount()); assertNull("expected null for calcDistinct", s.getCountDistinct()); assertNull("expected null for distinct vals", s.getDistinctValues()); assertNull("expected null for max", s.getMax()); assertNull("expected null for missing", s.getMissing()); assertNull("expected null for sum", s.getSum()); assertNull("expected null for percentiles", s.getPercentiles()); assertNull("expected null for cardinality", s.getCardinality()); } final String[] stats = new String[] { "min", "max", "sum", "sumOfSquares", "stddev", "mean", "missing", "count" }; // ask for arbitrary pairs of stats for (String stat1 : stats) { for (String stat2 : stats) { // NOTE: stat1 might equal stat2 - good edge case to test for rsp = query("q", "*:*", "sort", i1 + " desc", "stats", "true", "stats.field", "{!" + stat1 + "=true " + stat2 + "=true}" + i1); final List<String> statsExpected = new ArrayList<String>(2); statsExpected.add(stat1); if (!stat1.equals(stat2)) { statsExpected.add(stat2); } // ignore the FieldStatsInfo convinience class, and look directly at the NamedList // so we don't need any sort of crazy reflection NamedList<Object> svals = ((NamedList<NamedList<NamedList<Object>>>) rsp.getResponse().get("stats")) .get("stats_fields").get(i1); assertNotNull("no stats for field " + i1, svals); assertEquals("wrong quantity of stats", statsExpected.size(), svals.size()); for (String s : statsExpected) { assertNotNull("stat shouldn't be null: " + s, svals.get(s)); assertTrue("stat should be a Number: " + s + " -> " + svals.get(s).getClass(), svals.get(s) instanceof Number); // some loose assertions since we're iterating over various stats if (svals.get(s) instanceof Double) { Double val = (Double) svals.get(s); assertFalse("stat shouldn't be NaN: " + s, val.isNaN()); assertFalse("stat shouldn't be Inf: " + s, val.isInfinite()); assertFalse("stat shouldn't be 0: " + s, val.equals(0.0D)); } else { // count or missing assertTrue("stat should be count of missing: " + s, ("count".equals(s) || "missing".equals(s))); assertTrue("stat should be a Long: " + s + " -> " + svals.get(s).getClass(), svals.get(s) instanceof Long); Long val = (Long) svals.get(s); assertFalse("stat shouldn't be 0: " + s, val.equals(0L)); } } } } // all of these diff ways of asking for min & calcdistinct should have the same result for (SolrParams p : new SolrParams[] { params("stats.field", "{!min=true calcdistinct=true}" + i1), params("stats.calcdistinct", "true", "stats.field", "{!min=true}" + i1), params("f." + i1 + ".stats.calcdistinct", "true", "stats.field", "{!min=true}" + i1), params("stats.calcdistinct", "false", "f." + i1 + ".stats.calcdistinct", "true", "stats.field", "{!min=true}" + i1), params("stats.calcdistinct", "false", "f." + i1 + ".stats.calcdistinct", "false", "stats.field", "{!min=true calcdistinct=true}" + i1), params("stats.calcdistinct", "false", "f." + i1 + ".stats.calcdistinct", "false", "stats.field", "{!min=true countDistinct=true distinctValues=true}" + i1), params("stats.field", "{!min=true countDistinct=true distinctValues=true}" + i1), params("yes", "true", "stats.field", "{!min=$yes countDistinct=$yes distinctValues=$yes}" + i1), }) { rsp = query(SolrParams.wrapDefaults(p, params("q", "*:*", "sort", i1 + " desc", "stats", "true"))); FieldStatsInfo s = rsp.getFieldStatsInfo().get(i1); assertNotNull(p + " no stats for " + i1, s); // assertEquals(p + " wrong min", -987.0D, (Double) s.getMin(), 0.0001D); assertEquals(p + " wrong calcDistinct", new Long(13), s.getCountDistinct()); assertNotNull(p + " expected non-null list for distinct vals", s.getDistinctValues()); assertEquals(p + " expected list for distinct vals", 13, s.getDistinctValues().size()); // assertNull(p + " expected null for mean", s.getMean()); assertNull(p + " expected null for count", s.getCount()); assertNull(p + " expected null for max", s.getMax()); assertNull(p + " expected null for missing", s.getMissing()); assertNull(p + " expected null for stddev", s.getStddev()); assertNull(p + " expected null for sum", s.getSum()); assertNull(p + " expected null for percentiles", s.getPercentiles()); assertNull(p + " expected null for cardinality", s.getCardinality()); } // all of these diff ways of excluding calcdistinct should have the same result for (SolrParams p : new SolrParams[] { params("stats.field", "{!min=true calcdistinct=false}" + i1), params("stats.calcdistinct", "false", "stats.field", "{!min=true}" + i1), params("f." + i1 + ".stats.calcdistinct", "false", "stats.field", "{!min=true}" + i1), params("stats.calcdistinct", "true", "f." + i1 + ".stats.calcdistinct", "false", "stats.field", "{!min=true}" + i1), params("stats.calcdistinct", "true", "f." + i1 + ".stats.calcdistinct", "true", "stats.field", "{!min=true calcdistinct=false}" + i1), params("stats.calcdistinct", "true", "f." + i1 + ".stats.calcdistinct", "true", "stats.field", "{!min=true countDistinct=false distinctValues=false}" + i1), }) { rsp = query(SolrParams.wrapDefaults(p, params("q", "*:*", "sort", i1 + " desc", "stats", "true"))); FieldStatsInfo s = rsp.getFieldStatsInfo().get(i1); assertNotNull(p + " no stats for " + i1, s); // assertEquals(p + " wrong min", -987.0D, (Double) s.getMin(), 0.0001D); // assertNull(p + " expected null for calcDistinct", s.getCountDistinct()); assertNull(p + " expected null for distinct vals", s.getDistinctValues()); // assertNull(p + " expected null for mean", s.getMean()); assertNull(p + " expected null for count", s.getCount()); assertNull(p + " expected null for max", s.getMax()); assertNull(p + " expected null for missing", s.getMissing()); assertNull(p + " expected null for stddev", s.getStddev()); assertNull(p + " expected null for sum", s.getSum()); assertNull(p + " expected null for percentiles", s.getPercentiles()); assertNull(p + " expected null for cardinality", s.getCardinality()); } // this field doesn't exist in any doc in the result set. // ensure we get expected values for the stats we ask for, but null for the stats rsp = query("q", "*:*", "sort", i1 + " desc", "stats", "true", "stats.field", "{!min=true mean=true stddev=true}does_not_exist_i"); { // don't leak variables FieldStatsInfo s = rsp.getFieldStatsInfo().get("does_not_exist_i"); assertNotNull("no stats for bogus field", s); // things we explicit expect because we asked for them // NOTE: min is expected to be null even though requested because of no values assertEquals("wrong min", null, s.getMin()); assertTrue("mean should be NaN", ((Double) s.getMean()).isNaN()); assertEquals("wrong stddev", 0.0D, (Double) s.getStddev(), 0.0D); // things that we didn't ask for, so they better be null assertNull("expected null for count", s.getCount()); assertNull("expected null for calcDistinct", s.getCountDistinct()); assertNull("expected null for distinct vals", s.getDistinctValues()); assertNull("expected null for max", s.getMax()); assertNull("expected null for missing", s.getMissing()); assertNull("expected null for sum", s.getSum()); assertNull("expected null for percentiles", s.getPercentiles()); assertNull("expected null for cardinality", s.getCardinality()); } // look at stats on non numeric fields // // not all stats are supported on every field type, so some of these permutations will // result in no stats being computed but this at least lets us sanity check that for each // of these field+stats(s) combinations we get consistent results between the distribted // request and the single node situation. // // NOTE: percentiles excluded because it doesn't support simple 'true/false' syntax // (and since it doesn't work for non-numerics anyway, we aren't missing any coverage here) EnumSet<Stat> allStats = EnumSet.complementOf(EnumSet.of(Stat.percentiles)); int numTotalStatQueries = 0; // don't go overboard, just do all permutations of 1 or 2 stat params, for each field & query final int numStatParamsAtOnce = 2; for (int numParams = 1; numParams <= numStatParamsAtOnce; numParams++) { for (EnumSet<Stat> set : new StatSetCombinations(numParams, allStats)) { for (String field : new String[] { "foo_f", i1, tlong, tdate_a, oddField, "foo_sev_enum", // fields that no doc has any value in "bogus___s", "bogus___f", "bogus___i", "bogus___tdt", "bogus___sev_enum" }) { for (String q : new String[] { "*:*", // all docs "bogus___s:bogus", // no docs "id:" + random().nextInt(50), // 0 or 1 doc... "id:" + random().nextInt(50), "id:" + random().nextInt(100), "id:" + random().nextInt(100), "id:" + random().nextInt(200) }) { // EnumSets use natural ordering, we want to randomize the order of the params List<Stat> combo = new ArrayList<Stat>(set); Collections.shuffle(combo, random()); StringBuilder paras = new StringBuilder("{!key=k "); for (Stat stat : combo) { paras.append(stat + "=true "); } paras.append("}").append(field); numTotalStatQueries++; rsp = query("q", q, "rows", "0", "stats", "true", "stats.field", paras.toString()); // simple assert, mostly relying on comparison with single shard FieldStatsInfo s = rsp.getFieldStatsInfo().get("k"); assertNotNull(s); // TODO: if we had a programatic way to determine what stats are supported // by what field types, we could make more confident asserts here. } } } } handle.remove("stddev"); handle.remove("sumOfSquares"); assertEquals("Sanity check failed: either test broke, or test changed, or you adjusted Stat enum" + " (adjust constant accordingly if intentional)", 5082, numTotalStatQueries); /*** TODO: the failure may come back in "exception" try { // test error produced for field that is invalid for schema query("q","*:*", "rows",100, "facet","true", "facet.field",invalidField, "facet.mincount",2); TestCase.fail("SolrServerException expected for invalid field that is not in schema"); } catch (SolrServerException ex) { // expected } ***/ // Try to get better coverage for refinement queries by turning off over requesting. // This makes it much more likely that we may not get the top facet values and hence // we turn of that checking. handle.put("facet_fields", SKIPVAL); query("q", "*:*", "rows", 0, "facet", "true", "facet.field", t1, "facet.limit", 5, "facet.shard.limit", 5); // check a complex key name query("q", "*:*", "rows", 0, "facet", "true", "facet.field", "{!key='$a b/c \\' \\} foo'}" + t1, "facet.limit", 5, "facet.shard.limit", 5); query("q", "*:*", "rows", 0, "facet", "true", "facet.field", "{!key='$a'}" + t1, "facet.limit", 5, "facet.shard.limit", 5); handle.remove("facet_fields"); // Make sure there is no macro expansion for field values query("q", "*:*", "rows", 0, "facet", "true", "facet.field", s1, "facet.limit", 5, "facet.shard.limit", 5); query("q", "*:*", "rows", 0, "facet", "true", "facet.field", s1, "facet.limit", 5, "facet.shard.limit", 5, "expandMacros", "true"); query("q", "*:*", "rows", 0, "facet", "true", "facet.field", s1, "facet.limit", 5, "facet.shard.limit", 5, "expandMacros", "false"); // Macro expansion should still work for the parameters query("q", "*:*", "rows", 0, "facet", "true", "facet.field", "${foo}", "f.${foo}.mincount", 1, "foo", s1); query("q", "*:*", "rows", 0, "facet", "true", "facet.field", "${foo}", "f.${foo}.mincount", 1, "foo", s1, "expandMacros", "true"); // index the same document to two servers and make sure things // don't blow up. if (clients.size() >= 2) { index(id, 100, i1, 107, t1, "oh no, a duplicate!"); for (int i = 0; i < clients.size(); i++) { index_specific(i, id, 100, i1, 107, t1, "oh no, a duplicate!"); } commit(); query("q", "duplicate", "hl", "true", "hl.fl", t1); query("q", "fox duplicate horses", "hl", "true", "hl.fl", t1); query("q", "*:*", "rows", 100); } //SOLR 3161 ensure shards.qt=/update fails (anything but search handler really) // Also see TestRemoteStreaming#testQtUpdateFails() try { ignoreException("isShard is only acceptable"); // query("q","*:*","shards.qt","/update","stream.body","<delete><query>*:*</query></delete>"); // fail(); } catch (SolrException e) { //expected } unIgnoreException("isShard is only acceptable"); // test debugging // handle.put("explain", UNORDERED); handle.put("explain", SKIPVAL); // internal docids differ, idf differs w/o global idf handle.put("debug", UNORDERED); handle.put("time", SKIPVAL); handle.put("track", SKIP); //track is not included in single node search query("q", "now their fox sat had put", "fl", "*,score", CommonParams.DEBUG_QUERY, "true"); query("q", "id:[1 TO 5]", CommonParams.DEBUG_QUERY, "true"); query("q", "id:[1 TO 5]", CommonParams.DEBUG, CommonParams.TIMING); query("q", "id:[1 TO 5]", CommonParams.DEBUG, CommonParams.RESULTS); query("q", "id:[1 TO 5]", CommonParams.DEBUG, CommonParams.QUERY); // SOLR-6545, wild card field list indexr(id, "19", "text", "d", "cat_a_sS", "1", t1, "2"); commit(); rsp = query("q", "id:19", "fl", "id", "fl", "*a_sS"); assertFieldValues(rsp.getResults(), "id", 19); rsp = query("q", "id:19", "fl", "id," + t1 + ",cat*"); assertFieldValues(rsp.getResults(), "id", 19); // Check Info is added to for each shard ModifiableSolrParams q = new ModifiableSolrParams(); q.set("q", "*:*"); q.set(ShardParams.SHARDS_INFO, true); setDistributedParams(q); rsp = queryServer(q); NamedList<?> sinfo = (NamedList<?>) rsp.getResponse().get(ShardParams.SHARDS_INFO); String shards = getShardsString(); int cnt = StringUtils.countMatches(shards, ",") + 1; assertNotNull("missing shard info", sinfo); assertEquals("should have an entry for each shard [" + sinfo + "] " + shards, cnt, sinfo.size()); // test shards.tolerant=true for (int numDownServers = 0; numDownServers < jettys.size() - 1; numDownServers++) { List<JettySolrRunner> upJettys = new ArrayList<>(jettys); List<SolrClient> upClients = new ArrayList<>(clients); List<JettySolrRunner> downJettys = new ArrayList<>(); List<String> upShards = new ArrayList<>(Arrays.asList(shardsArr)); for (int i = 0; i < numDownServers; i++) { // shut down some of the jettys int indexToRemove = r.nextInt(upJettys.size()); JettySolrRunner downJetty = upJettys.remove(indexToRemove); upClients.remove(indexToRemove); upShards.remove(indexToRemove); ChaosMonkey.stop(downJetty); downJettys.add(downJetty); } queryPartialResults(upShards, upClients, "q", "*:*", "facet", "true", "facet.field", t1, "facet.field", t1, "facet.limit", 5, ShardParams.SHARDS_INFO, "true", ShardParams.SHARDS_TOLERANT, "true"); queryPartialResults(upShards, upClients, "q", "*:*", "facet", "true", "facet.query", i1 + ":[1 TO 50]", "facet.query", i1 + ":[1 TO 50]", ShardParams.SHARDS_INFO, "true", ShardParams.SHARDS_TOLERANT, "true"); // test group query queryPartialResults(upShards, upClients, "q", "*:*", "rows", 100, "fl", "id," + i1, "group", "true", "group.query", t1 + ":kings OR " + t1 + ":eggs", "group.limit", 10, "sort", i1 + " asc, id asc", CommonParams.TIME_ALLOWED, 1, ShardParams.SHARDS_INFO, "true", ShardParams.SHARDS_TOLERANT, "true"); queryPartialResults(upShards, upClients, "q", "*:*", "stats", "true", "stats.field", i1, ShardParams.SHARDS_INFO, "true", ShardParams.SHARDS_TOLERANT, "true"); queryPartialResults(upShards, upClients, "q", "toyata", "spellcheck", "true", "spellcheck.q", "toyata", "qt", "spellCheckCompRH_Direct", "shards.qt", "spellCheckCompRH_Direct", ShardParams.SHARDS_INFO, "true", ShardParams.SHARDS_TOLERANT, "true"); // restart the jettys for (JettySolrRunner downJetty : downJettys) { ChaosMonkey.start(downJetty); } } // This index has the same number for every field // TODO: This test currently fails because debug info is obtained only // on shards with matches. // query("q","matchesnothing","fl","*,score", "debugQuery", "true"); // Thread.sleep(10000000000L); del("*:*"); // delete all docs and test stats request commit(); try { query("q", "*:*", "stats", "true", "stats.field", "stats_dt", "stats.field", i1, "stats.field", tdate_a, "stats.field", tdate_b, "stats.calcdistinct", "true"); } catch (HttpSolrClient.RemoteSolrException e) { if (e.getMessage().startsWith("java.lang.NullPointerException")) { fail("NullPointerException with stats request on empty index"); } else { throw e; } } String fieldName = "severity"; indexr("id", "1", fieldName, "Not Available"); indexr("id", "2", fieldName, "Low"); indexr("id", "3", fieldName, "Medium"); indexr("id", "4", fieldName, "High"); indexr("id", "5", fieldName, "Critical"); commit(); rsp = query("q", "*:*", "stats", "true", "stats.field", fieldName); assertEquals(new EnumFieldValue(0, "Not Available"), rsp.getFieldStatsInfo().get(fieldName).getMin()); query("q", "*:*", "stats", "true", "stats.field", fieldName, StatsParams.STATS_CALC_DISTINCT, "true"); assertEquals(new EnumFieldValue(4, "Critical"), rsp.getFieldStatsInfo().get(fieldName).getMax()); handle.put("severity", UNORDERED); // this is stupid, but stats.facet doesn't garuntee order query("q", "*:*", "stats", "true", "stats.field", fieldName, "stats.facet", fieldName); }
From source file:org.sakaiproject.tool.messageforums.ui.MessageForumStatisticsBean.java
/** * Compares two statistics by grades.//from ww w .j a v a 2s . c o m * Higher grades are greater than lower grades. * Stats with equal grades are compared by name * If one has a grade, and the other doesn't, treat having a grade as greater than not having a grade * If neither has a grade, compare by name */ private static int compareGradesFromStats(DecoratedCompiledMessageStatistics stat1, DecoratedCompiledMessageStatistics stat2) { Double grd1 = getGradeFromStat(stat1); Double grd2 = getGradeFromStat(stat2); // If they're both null, or an equal grade, revert to the name comparator if ((grd1 == null && grd2 == null) || (grd1 != null && grd1.equals(grd2))) { return nameComparatorAsc.compare(stat1, stat2); } boolean exists1 = grd1 != null; boolean exists2 = grd2 != null; if (exists1 && exists2) { // Both grades exist, compare them return Double.compare(grd1, grd2); } // One grade exists and the other doesn't, Boolean compare their existence return Boolean.compare(exists1, exists2); }