List of usage examples for org.apache.hadoop.conf Configuration setBoolean
public void setBoolean(String name, boolean value)
name
property to a boolean
. From source file:org.apache.lens.cube.parse.TestCubeRewriter.java
License:Apache License
@Test public void testCubeWhereQueryForMonthWithNoPartialData() throws Exception { Configuration conf = getConf(); conf.setBoolean(CubeQueryConfUtil.FAIL_QUERY_ON_PARTIAL_DATA, true); LensException e = getLensExceptionInRewrite( "select SUM(msr2) from testCube" + " where " + TWO_MONTHS_RANGE_UPTO_HOURS, conf); assertEquals(e.getErrorCode(),//from w w w. j a va2 s .c om LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE.getLensErrorInfo().getErrorCode()); NoCandidateFactAvailableException ne = (NoCandidateFactAvailableException) e; PruneCauses.BriefAndDetailedError pruneCauses = ne.getJsonMessage(); assertEquals(pruneCauses.getBrief().substring(0, MISSING_PARTITIONS.errorFormat.length() - 3), MISSING_PARTITIONS.errorFormat.substring(0, MISSING_PARTITIONS.errorFormat.length() - 3), pruneCauses.getBrief()); Set<String> expectedSet = Sets .newTreeSet(Arrays.asList("summary1", "summary2", "testfact2_raw", "summary3", "testfact")); boolean missingPartitionCause = false; for (String key : pruneCauses.getDetails().keySet()) { Set<String> actualKeySet = Sets.newTreeSet(Splitter.on(',').split(key)); if (expectedSet.equals(actualKeySet)) { assertEquals(pruneCauses.getDetails().get(key).iterator().next().getCause(), MISSING_PARTITIONS); missingPartitionCause = true; } } assertTrue(missingPartitionCause, MISSING_PARTITIONS + " error does not occur for facttables set " + expectedSet + " Details :" + pruneCauses.getDetails()); assertEquals(pruneCauses.getDetails().get("testfactmonthly").iterator().next().getCause(), NO_FACT_UPDATE_PERIODS_FOR_GIVEN_RANGE); assertEquals(pruneCauses.getDetails().get("testfact2").iterator().next().getCause(), MISSING_PARTITIONS); assertEquals(pruneCauses.getDetails().get("cheapfact").iterator().next().getCause(), NO_CANDIDATE_STORAGES); CandidateTablePruneCause cheapFactPruneCauses = pruneCauses.getDetails().get("cheapfact").iterator().next(); assertEquals(cheapFactPruneCauses.getStorageCauses().get("c0").getCause(), SkipStorageCode.RANGE_NOT_ANSWERABLE); assertEquals(cheapFactPruneCauses.getStorageCauses().get("c99").getCause(), SkipStorageCode.UNSUPPORTED); assertEquals(pruneCauses.getDetails().get("summary4").iterator().next().getCause(), TIMEDIM_NOT_SUPPORTED); assertTrue(pruneCauses.getDetails().get("summary4").iterator().next().getUnsupportedTimeDims() .contains("d_time")); }
From source file:org.apache.lens.cube.parse.TestCubeRewriter.java
License:Apache License
@Test public void testDimensionQueryWithMultipleStorages() throws Exception { String hqlQuery = rewrite("select name, stateid from" + " citydim", getConf()); String expected = getExpectedQuery("citydim", "select citydim.name," + " citydim.stateid from ", null, "c1_citytable", true); compareQueries(hqlQuery, expected);/* w w w . j a va 2 s. c o m*/ Configuration conf = getConf(); // should pick up c2 storage when 'fail on partial data' enabled conf.setBoolean(CubeQueryConfUtil.FAIL_QUERY_ON_PARTIAL_DATA, true); hqlQuery = rewrite("select name, stateid from" + " citydim", conf); expected = getExpectedQuery("citydim", "select citydim.name," + " citydim.stateid from ", null, "c2_citytable", false); compareQueries(hqlQuery, expected); // state table is present on c1 with partition dumps and partitions added LensException e = getLensExceptionInRewrite("select name, capital from statedim ", conf); assertEquals(e.getErrorCode(), LensCubeErrorCode.NO_CANDIDATE_DIM_AVAILABLE.getLensErrorInfo().getErrorCode()); NoCandidateDimAvailableException ne = (NoCandidateDimAvailableException) e; assertEquals(ne.getJsonMessage(), new PruneCauses.BriefAndDetailedError(NO_CANDIDATE_STORAGES.errorFormat, new HashMap<String, List<CandidateTablePruneCause>>() { { put("statetable", Arrays.asList(CandidateTablePruneCause .noCandidateStorages(new HashMap<String, SkipStorageCause>() { { put("c1_statetable", new SkipStorageCause(SkipStorageCode.NO_PARTITIONS)); } }))); put("statetable_partitioned", Arrays.asList(CandidateTablePruneCause .noCandidateStorages(new HashMap<String, SkipStorageCause>() { { put("C3_statetable_partitioned", new SkipStorageCause(SkipStorageCode.UNSUPPORTED)); } }))); } })); conf.setBoolean(CubeQueryConfUtil.FAIL_QUERY_ON_PARTIAL_DATA, false); // non existing parts should be populated CubeQueryContext rewrittenQuery = rewriteCtx("select name, capital from statedim ", conf); expected = getExpectedQuery("statedim", "select statedim.name," + " statedim.capital from ", null, "c1_statetable", true); compareQueries(rewrittenQuery.toHQL(), expected); assertNotNull(rewrittenQuery.getNonExistingParts()); // run a query with time range function hqlQuery = rewrite("select name, stateid from citydim where " + TWO_DAYS_RANGE, conf); expected = getExpectedQuery("citydim", "select citydim.name," + " citydim.stateid from ", null, TWO_DAYS_RANGE, null, "c1_citytable", true); compareQueries(hqlQuery, expected); // query with alias hqlQuery = rewrite("select name, c.stateid from citydim" + " c", conf); expected = getExpectedQuery("c", "select c.name, c.stateid from ", null, "c1_citytable", true); compareQueries(hqlQuery, expected); // query with where clause hqlQuery = rewrite("select name, c.stateid from citydim" + " c where name != 'xyz' ", conf); expected = getExpectedQuery("c", "select c.name, c.stateid from ", null, " c.name != 'xyz' ", null, "c1_citytable", true); compareQueries(hqlQuery, expected); // query with orderby hqlQuery = rewrite("select name, c.stateid from citydim" + " c where name != 'xyz' order by name", conf); expected = getExpectedQuery("c", "select c.name, c.stateid from ", null, " c.name != 'xyz' ", " order by c.name asc", "c1_citytable", true); compareQueries(hqlQuery, expected); // query with where and orderby hqlQuery = rewrite("select name, c.stateid from citydim" + " c where name != 'xyz' order by name", conf); expected = getExpectedQuery("c", "select c.name, c.stateid from ", null, " c.name != 'xyz' ", " order by c.name asc ", "c1_citytable", true); compareQueries(hqlQuery, expected); // query with orderby with order specified hqlQuery = rewrite("select name, c.stateid from citydim" + " c where name != 'xyz' order by name desc ", conf); expected = getExpectedQuery("c", "select c.name, c.stateid from ", null, " c.name != 'xyz' ", " order by c.name desc", "c1_citytable", true); compareQueries(hqlQuery, expected); conf.set(DRIVER_SUPPORTED_STORAGES, "C2"); hqlQuery = rewrite("select name, stateid from citydim", conf); expected = getExpectedQuery("citydim", "select citydim.name," + " citydim.stateid from ", null, "c2_citytable", false); compareQueries(hqlQuery, expected); conf.set(DRIVER_SUPPORTED_STORAGES, "C1"); hqlQuery = rewrite("select name, stateid from citydim", conf); expected = getExpectedQuery("citydim", "select citydim.name," + " citydim.stateid from ", null, "c1_citytable", true); compareQueries(hqlQuery, expected); conf.set(DRIVER_SUPPORTED_STORAGES, ""); conf.set(CubeQueryConfUtil.VALID_STORAGE_DIM_TABLES, "C1_citytable"); hqlQuery = rewrite("select name, stateid from citydim", conf); expected = getExpectedQuery("citydim", "select citydim.name," + " citydim.stateid from ", null, "c1_citytable", true); compareQueries(hqlQuery, expected); conf.set(DRIVER_SUPPORTED_STORAGES, ""); conf.set(CubeQueryConfUtil.VALID_STORAGE_DIM_TABLES, "C2_citytable"); hqlQuery = rewrite("select name, stateid from citydim", conf); expected = getExpectedQuery("citydim", "select citydim.name," + " citydim.stateid from ", null, "c2_citytable", false); compareQueries(hqlQuery, expected); hqlQuery = rewrite("select name n, count(1) from citydim" + " group by name order by n ", conf); expected = getExpectedQuery("citydim", "select citydim.name as `n`," + " count(1) from ", " group by citydim.name order by n asc", "c2_citytable", false); compareQueries(hqlQuery, expected); hqlQuery = rewrite("select name as `n`, count(1) from citydim" + " order by n ", conf); compareQueries(hqlQuery, expected); hqlQuery = rewrite("select count(1) from citydim" + " group by name order by name ", conf); expected = getExpectedQuery("citydim", "select citydim.name," + " count(1) from ", " group by citydim.name order by citydim.name asc ", "c2_citytable", false); compareQueries(hqlQuery, expected); }
From source file:org.apache.lens.cube.parse.TestCubeRewriter.java
License:Apache License
@Test public void testJoinWithMultipleAliases() throws Exception { String cubeQl = "SELECT SUM(msr2) from testCube left outer join citydim c1 on testCube.cityid = c1.id" + " left outer join statedim s1 on c1.stateid = s1.id" + " left outer join citydim c2 on s1.countryid = c2.id where " + TWO_DAYS_RANGE; Configuration conf = getConfWithStorages("C1"); conf.setBoolean(DISABLE_AUTO_JOINS, true); String hqlQuery = rewrite(cubeQl, conf); String db = getDbName();/*from w ww . ja v a 2 s . c o m*/ String expectedJoin = " LEFT OUTER JOIN " + db + "c1_citytable c1 ON (( testcube . cityid ) = ( c1 . id )) AND (c1.dt = 'latest') " + " LEFT OUTER JOIN " + db + "c1_statetable s1 ON (( c1 . stateid ) = ( s1 . id )) AND (s1.dt = 'latest') " + " LEFT OUTER JOIN " + db + "c1_citytable c2 ON (( s1 . countryid ) = ( c2 . id )) AND (c2.dt = 'latest')"; String expected = getExpectedQuery(TEST_CUBE_NAME, "select sum(testcube.msr2)" + " FROM ", expectedJoin, null, null, null, getWhereForHourly2days(TEST_CUBE_NAME, "C1_testfact2")); compareQueries(hqlQuery, expected); }
From source file:org.apache.lens.cube.parse.TestCubeRewriter.java
License:Apache License
@Test public void testSelectDimonlyJoinOnCube() throws Exception { String query = "SELECT count (distinct cubecity.name) from testCube where " + TWO_DAYS_RANGE; Configuration conf = new Configuration(getConf()); conf.setBoolean(DISABLE_AUTO_JOINS, false); String hql = rewrite(query, conf); System.out.println("@@ HQL = " + hql); assertNotNull(hql);/* w w w . ja va 2s . co m*/ }
From source file:org.apache.lens.cube.parse.TestTimeRangeWriter.java
License:Apache License
protected CubeQueryContext getMockedCubeContext(boolean betweenOnly) { CubeQueryContext context = Mockito.mock(CubeQueryContext.class); Configuration configuration = new Configuration(); configuration.setBoolean(CubeQueryConfUtil.BETWEEN_ONLY_TIME_RANGE_WRITER, betweenOnly); Mockito.when(context.getConf()).thenReturn(configuration); Mockito.when(context.shouldReplaceTimeDimWithPart()).thenReturn(true); return context; }
From source file:org.apache.lens.cube.parse.TestTimeRangeWriterWithQuery.java
License:Apache License
@Test public void testCubeQueryWithTimeDim() throws Exception { Configuration tconf = new Configuration(conf); // hourly partitions for two days tconf.setBoolean(FAIL_QUERY_ON_PARTIAL_DATA, true); tconf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C4"); tconf.setBoolean(CubeQueryConfUtil.REPLACE_TIMEDIM_WITH_PART_COL, false); tconf.set(CubeQueryConfUtil.PART_WHERE_CLAUSE_DATE_FORMAT, "yyyy-MM-dd HH:mm:ss"); tconf.set(CubeQueryConfUtil.getValidUpdatePeriodsKey("testfact", "C4"), "MONTHLY,DAILY,HOURLY"); String query = "SELECT test_time_dim, msr2 FROM testCube where " + TWO_DAYS_RANGE_TTD; String hqlQuery = rewrite(query, tconf); Map<String, String> whereClauses = new HashMap<String, String>(); whereClauses.put(getDbName() + "c4_testfact2", TestBetweenTimeRangeWriter.getBetweenClause("timehourchain1", "full_hour", getUptoHour(TWODAYS_BACK), getUptoHour(getOneLess(NOW, UpdatePeriod.HOURLY.calendarField())), TestTimeRangeWriter.DB_FORMAT)); System.out.println("HQL:" + hqlQuery); String expected = getExpectedQuery(cubeName, "select timehourchain1.full_hour, sum(testcube.msr2) FROM ", " join " + getDbName() + "c4_hourDimTbl timehourchain1 on testcube.test_time_dim_hour_id = timehourchain1.id", null, " GROUP BY timehourchain1.full_hour", null, whereClauses); TestCubeRewriter.compareQueries(hqlQuery, expected); query = "SELECT msr2 FROM testCube where " + TWO_DAYS_RANGE_TTD; hqlQuery = rewrite(query, tconf);//from ww w . j a v a 2s . c om System.out.println("HQL:" + hqlQuery); expected = getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", " join " + getDbName() + "c4_hourDimTbl timehourchain1 on testcube.test_time_dim_hour_id = timehourchain1.id", null, null, null, whereClauses); TestCubeRewriter.compareQueries(hqlQuery, expected); query = "SELECT msr2 FROM testCube where testcube.cityid > 2 and " + TWO_DAYS_RANGE_TTD + " and testcube.cityid != 5"; hqlQuery = rewrite(query, tconf); System.out.println("HQL:" + hqlQuery); expected = getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", " join " + getDbName() + "c4_hourDimTbl timehourchain1 on testcube.test_time_dim_hour_id = timehourchain1.id", " testcube.cityid > 2 ", " and testcube.cityid != 5", null, whereClauses); TestCubeRewriter.compareQueries(hqlQuery, expected); // multiple range query hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE_TTD + " OR " + TWO_DAYS_RANGE_TTD_BEFORE_4_DAYS, tconf); whereClauses = new HashMap<>(); whereClauses.put(getDbName() + "c4_testfact2", TestBetweenTimeRangeWriter.getBetweenClause("timehourchain1", "full_hour", getUptoHour(TWODAYS_BACK), getUptoHour(getOneLess(NOW, UpdatePeriod.HOURLY.calendarField())), TestTimeRangeWriter.DB_FORMAT) + " OR " + TestBetweenTimeRangeWriter.getBetweenClause("timehourchain1", "full_hour", getUptoHour(BEFORE_6_DAYS), getUptoHour(getOneLess(BEFORE_4_DAYS, UpdatePeriod.HOURLY.calendarField())), TestTimeRangeWriter.DB_FORMAT)); expected = getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", " join " + getDbName() + "c4_hourDimTbl timehourchain1 on testcube.test_time_dim_hour_id = timehourchain1.id", null, null, null, whereClauses); System.out.println("HQL:" + hqlQuery); TestCubeRewriter.compareQueries(hqlQuery, expected); hqlQuery = rewrite("select to_date(test_time_dim), SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE_TTD + " OR " + TWO_DAYS_RANGE_TTD_BEFORE_4_DAYS, tconf); expected = getExpectedQuery(cubeName, "select to_date(timehourchain1.full_hour), sum(testcube.msr2) FROM ", " join " + getDbName() + "c4_hourDimTbl timehourchain1 on testcube.test_time_dim_hour_id = timehourchain1.id", null, " group by to_date(timehourchain1.full_hour)", null, whereClauses); System.out.println("HQL:" + hqlQuery); TestCubeRewriter.compareQueries(hqlQuery, expected); }
From source file:org.apache.lens.cube.parse.TestTimeRangeWriterWithQuery.java
License:Apache License
@Test public void testCubeQueryWithTimeDimThruChain() throws Exception { // hourly partitions for two days Configuration tconf = new Configuration(conf); tconf.setBoolean(FAIL_QUERY_ON_PARTIAL_DATA, true); tconf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C4"); tconf.setBoolean(CubeQueryConfUtil.REPLACE_TIMEDIM_WITH_PART_COL, false); tconf.set(CubeQueryConfUtil.PART_WHERE_CLAUSE_DATE_FORMAT, "yyyy-MM-dd HH:mm:ss"); tconf.set(CubeQueryConfUtil.getValidUpdatePeriodsKey("testfact", "C4"), "MONTHLY,DAILY,HOURLY"); String query = "SELECT test_time_dim2, msr2 FROM testCube where " + TWO_DAYS_RANGE_TTD2; String hqlQuery = rewrite(query, tconf); Map<String, String> whereClauses = new HashMap<String, String>(); whereClauses.put(getDbName() + "c4_testfact2", TestBetweenTimeRangeWriter.getBetweenClause("timehourchain2", "full_hour", getUptoHour(TWODAYS_BACK), getUptoHour(getOneLess(NOW, UpdatePeriod.HOURLY.calendarField())), TestTimeRangeWriter.DB_FORMAT)); System.out.println("HQL:" + hqlQuery); String expected = getExpectedQuery(cubeName, "select timehourchain2.full_hour, sum(testcube.msr2) FROM ", " join " + getDbName() + "c4_hourDimTbl timehourchain2 on testcube.test_time_dim_hour_id2 = timehourchain2.id", null, " GROUP BY timehourchain2.full_hour", null, whereClauses); TestCubeRewriter.compareQueries(hqlQuery, expected); query = "SELECT msr2 FROM testCube where " + TWO_DAYS_RANGE_TTD2; hqlQuery = rewrite(query, tconf);/*from w ww . ja v a 2s. c o m*/ System.out.println("HQL:" + hqlQuery); expected = getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", " join " + getDbName() + "c4_hourDimTbl timehourchain2 on testcube.test_time_dim_hour_id2 = timehourchain2.id", null, null, null, whereClauses); TestCubeRewriter.compareQueries(hqlQuery, expected); query = "SELECT msr2 FROM testCube where testcube.cityid > 2 and " + TWO_DAYS_RANGE_TTD2 + " and testcube.cityid != 5"; hqlQuery = rewrite(query, tconf); System.out.println("HQL:" + hqlQuery); expected = getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", " join " + getDbName() + "c4_hourDimTbl timehourchain2 on testcube.test_time_dim_hour_id2 = timehourchain2.id", " testcube.cityid > 2 ", " and testcube.cityid != 5", null, whereClauses); TestCubeRewriter.compareQueries(hqlQuery, expected); // multiple range query hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE_TTD2 + " OR " + TWO_DAYS_RANGE_TTD2_BEFORE_4_DAYS, tconf); whereClauses = new HashMap<>(); whereClauses.put(getDbName() + "c4_testfact2", TestBetweenTimeRangeWriter.getBetweenClause("timehourchain2", "full_hour", getUptoHour(TWODAYS_BACK), getUptoHour(getOneLess(NOW, UpdatePeriod.HOURLY.calendarField())), TestTimeRangeWriter.DB_FORMAT) + " OR " + TestBetweenTimeRangeWriter.getBetweenClause("timehourchain2", "full_hour", getUptoHour(BEFORE_6_DAYS), getUptoHour(getOneLess(BEFORE_4_DAYS, UpdatePeriod.HOURLY.calendarField())), TestTimeRangeWriter.DB_FORMAT)); expected = getExpectedQuery(cubeName, "select sum(testcube.msr2) FROM ", " join " + getDbName() + "c4_hourDimTbl timehourchain2 on testcube.test_time_dim_hour_id2 = timehourchain2.id", null, null, null, whereClauses); System.out.println("HQL:" + hqlQuery); TestCubeRewriter.compareQueries(hqlQuery, expected); hqlQuery = rewrite("select to_date(test_time_dim2), SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE_TTD2 + " OR " + TWO_DAYS_RANGE_TTD2_BEFORE_4_DAYS, tconf); expected = getExpectedQuery(cubeName, "select to_date(timehourchain2.full_hour), sum(testcube.msr2) FROM ", " join " + getDbName() + "c4_hourDimTbl timehourchain2 on testcube.test_time_dim_hour_id2 = timehourchain2.id", null, " group by to_date(timehourchain2.full_hour)", null, whereClauses); System.out.println("HQL:" + hqlQuery); TestCubeRewriter.compareQueries(hqlQuery, expected); }
From source file:org.apache.lens.cube.parse.TestUnionQueries.java
License:Apache License
@Test public void testUnionQueries() throws Exception { Configuration conf = getConf(); conf.set(getValidStorageTablesKey("testfact"), "C1_testFact,C2_testFact"); conf.set(getValidUpdatePeriodsKey("testfact", "C1"), "DAILY,HOURLY"); conf.set(getValidUpdatePeriodsKey("testfact2", "C1"), "YEARLY"); conf.set(getValidUpdatePeriodsKey("testfact", "C2"), "MONTHLY,DAILY"); conf.setBoolean(CubeQueryConfUtil.ENABLE_STORAGES_UNION, false); ArrayList<String> storages = Lists.newArrayList("c1_testfact", "c2_testfact"); try {/*w ww.j av a 2 s . c om*/ getStorageToUpdatePeriodMap().put("c1_testfact", Lists.newArrayList(HOURLY, DAILY)); getStorageToUpdatePeriodMap().put("c2_testfact", Lists.newArrayList(MONTHLY)); // Union query String hqlQuery; String expected; StoragePartitionProvider provider = new StoragePartitionProvider() { @Override public Map<String, String> providePartitionsForStorage(String storage) { return getWhereForMonthlyDailyAndHourly2monthsUnionQuery(storage); } }; try { rewrite("select cityid as `City ID`, msr8, msr7 as `Third measure` " + "from testCube where " + TWO_MONTHS_RANGE_UPTO_HOURS, conf); fail("Union feature is disabled, should have failed"); } catch (LensException e) { assertEquals(e.getErrorCode(), LensCubeErrorCode.STORAGE_UNION_DISABLED.getLensErrorInfo().getErrorCode()); } conf.setBoolean(CubeQueryConfUtil.ENABLE_STORAGES_UNION, true); hqlQuery = rewrite("select ascii(cityname) as `City Name`, msr8, msr7 as `Third measure` " + "from testCube where ascii(cityname) = 'c' and cityname = 'a' and zipcode = 'b' and " + TWO_MONTHS_RANGE_UPTO_HOURS, conf); expected = getExpectedUnionQuery(TEST_CUBE_NAME, storages, provider, "SELECT testcube.alias0 as `City Name`, sum(testcube.alias1) + max(testcube.alias2), " + "case when sum(testcube.alias1) = 0 then 0 else sum(testcube.alias3)/sum(testcube.alias1) end " + "as `Third Measure`", null, "group by testcube.alias0", "select ascii(cubecity.name) as `alias0`, sum(testcube.msr2) as `alias1`, " + "max(testcube.msr3) as `alias2`, " + "sum(case when testcube.cityid = 'x' then testcube.msr21 else testcube.msr22 end) as `alias3`", " join " + getDbName() + "c1_citytable cubecity on testcube.cityid = cubecity.id and (cubecity.dt = 'latest')", "ascii(cubecity.name) = 'c' and cubecity.name = 'a' and testcube.zipcode = 'b'", "group by ascii(cubecity.name))"); compareQueries(hqlQuery, expected); hqlQuery = rewrite("select asciicity as `City Name`, msr8, msr7 as `Third measure` " + "from testCube where asciicity = 'c' and cityname = 'a' and zipcode = 'b' and " + TWO_MONTHS_RANGE_UPTO_HOURS, conf); compareQueries(hqlQuery, expected); hqlQuery = rewrite("select ascii(cityid) as `City ID`, msr8, msr7 as `Third measure` " + "from testCube where ascii(cityid) = 'c' and cityid = 'a' and zipcode = 'b' and " + TWO_MONTHS_RANGE_UPTO_HOURS, conf); expected = getExpectedUnionQuery(TEST_CUBE_NAME, storages, provider, "SELECT testcube.alias0 as `City ID`, sum(testcube.alias1) + max(testcube.alias2), " + "case when sum(testcube.alias1) = 0 then 0 else sum(testcube.alias3)/sum(testcube.alias1) end " + "as `Third Measure`", null, "group by testcube.alias0", "select ascii(testcube.cityid) as `alias0`, sum(testcube.msr2) as `alias1`, " + "max(testcube.msr3) as `alias2`, " + "sum(case when testcube.cityid = 'x' then testcube.msr21 else testcube.msr22 end) as `alias3`", "ascii(testcube.cityid) = 'c' and testcube.cityid = 'a' and testcube.zipcode = 'b'", "group by ascii(testcube.cityid)"); compareQueries(hqlQuery, expected); hqlQuery = rewrite("select cityid as `City ID`, msr8, msr7 as `Third measure` " + "from testCube where cityid = 'a' and zipcode = 'b' and " + TWO_MONTHS_RANGE_UPTO_HOURS, conf); expected = getExpectedUnionQuery(TEST_CUBE_NAME, storages, provider, "SELECT testcube.alias0 as `City ID`, sum(testcube.alias1) + max(testcube.alias2), " + "case when sum(testcube.alias1) = 0 then 0 else sum(testcube.alias3)/sum(testcube.alias1) end " + "as `Third Measure`", null, "group by testcube.alias0", "select testcube.cityid as `alias0`, sum(testcube.msr2) as `alias1`, " + "max(testcube.msr3) as `alias2`, " + "sum(case when testcube.cityid = 'x' then testcube.msr21 else testcube.msr22 end) as `alias3`", "testcube.cityid = 'a' and testcube.zipcode = 'b'", "group by testcube.cityid"); compareQueries(hqlQuery, expected); hqlQuery = rewrite("select cityid as `City ID`, msr3 as `Third measure` from testCube where " + TWO_MONTHS_RANGE_UPTO_HOURS + " having msr7 > 10", conf); expected = getExpectedUnionQuery(TEST_CUBE_NAME, storages, provider, "SELECT testcube.alias0 as `City ID`, max(testcube.alias1) as `Third measure`", null, "group by testcube.alias0 having " + "(case when sum(testcube.alias2)=0 then 0 else sum(testcube.alias3)/sum(testcube.alias2) end > 10 )", "SELECT testcube.cityid as `alias0`, max(testcube.msr3) as `alias1`, " + "sum(testcube.msr2) as `alias2`, " + "sum(case when testcube.cityid='x' then testcube.msr21 else testcube.msr22 end) as `alias3`", null, "group by testcube.cityid"); compareQueries(hqlQuery, expected); hqlQuery = rewrite("select cityid as `City ID`, msr3 as `Third measure` from testCube where " + TWO_MONTHS_RANGE_UPTO_HOURS + " having msr8 > 10", conf); expected = getExpectedUnionQuery(TEST_CUBE_NAME, storages, provider, "SELECT testcube.alias0 as `City ID`, max(testcube.alias1) as `Third measure`", null, "GROUP BY testcube.alias0 " + "HAVING (sum(testcube.alias2) + max(testcube.alias1)) > 10 ", "SELECT testcube.cityid as `alias0`, max(testcube.msr3) as `alias1`, " + "sum(testcube.msr2)as `alias2`", null, "group by testcube.cityid"); compareQueries(hqlQuery, expected); hqlQuery = rewrite("select msr3 as `Measure 3` from testCube where " + TWO_MONTHS_RANGE_UPTO_HOURS + " having msr2 > 10 and msr2 < 100", conf); expected = getExpectedUnionQuery(TEST_CUBE_NAME, storages, provider, "SELECT max(testcube.alias0) as `Measure 3` ", null, " HAVING sum(testcube.alias1) > 10 and sum(testcube.alias1) < 100", "SELECT max(testcube.msr3) as `alias0`, sum(testcube.msr2) as `alias1`", null, null); compareQueries(hqlQuery, expected); hqlQuery = rewrite("select zipcode, cityid as `City ID`, msr3 as `Measure 3`, msr4, " + "SUM(msr2) as `Measure 2` from testCube where " + TWO_MONTHS_RANGE_UPTO_HOURS + " having msr4 > 10 order by cityid desc limit 5", conf); expected = getExpectedUnionQuery(TEST_CUBE_NAME, storages, provider, "SELECT testcube.alias0, testcube.alias1 as `City ID`, max(testcube.alias2) as `Measure 3`, " + "count(testcube.alias3), sum(testcube.alias4) as `Measure 2`", null, "group by testcube.alias0, testcube.alias1 " + " having count(testcube.alias3) > 10 order by testcube.alias1 desc limit 5", "select testcube.zipcode as `alias0`, testcube.cityid as `alias1`, " + "max(testcube.msr3) as `alias2`,count(testcube.msr4) as `alias3`, sum(testcube.msr2) as `alias4`", null, "group by testcube.zipcode, testcube.cityid "); compareQueries(hqlQuery, expected); conf.setBoolean(CubeQueryConfUtil.ENABLE_GROUP_BY_TO_SELECT, false); conf.setBoolean(ENABLE_SELECT_TO_GROUPBY, false); hqlQuery = rewrite("select cityid as `City ID`, msr3 as `Measure 3`, " + "SUM(msr2) as `Measure 2` from testCube" + " where " + TWO_MONTHS_RANGE_UPTO_HOURS + " group by zipcode having msr4 > 10 order by cityid desc limit 5", conf); expected = getExpectedUnionQuery(TEST_CUBE_NAME, storages, provider, "SELECT testcube.alias0 as `City ID`,max(testcube.alias1) as `Measure 3`,sum(testcube.alias2) as `Measure 2` ", null, "group by testcube.alias3 having count(testcube.alias4) > 10 order by testcube.alias0 desc limit 5", "SELECT testcube.cityid as `alias0`, max(testcube.msr3) as `alias1`, " + "sum(testcube.msr2) as `alias2`, testcube.zipcode as `alias3`, count(testcube .msr4) as `alias4` FROM ", null, "GROUP BY testcube.zipcode"); compareQueries(hqlQuery, expected); } finally { getStorageToUpdatePeriodMap().clear(); } }
From source file:org.apache.lens.cube.parse.TestUnionQueries.java
License:Apache License
@Test public void testCubeWhereQueryWithMultipleTables() throws Exception { Configuration conf = getConf(); conf.setBoolean(CubeQueryConfUtil.ENABLE_STORAGES_UNION, true); conf.set(getValidStorageTablesKey("testfact"), "C1_testFact,C2_testFact"); conf.set(getValidUpdatePeriodsKey("testfact", "C1"), "DAILY"); conf.set(getValidUpdatePeriodsKey("testfact2", "C1"), "YEARLY"); conf.set(getValidUpdatePeriodsKey("testfact", "C2"), "HOURLY"); getStorageToUpdatePeriodMap().put("c1_testfact", Lists.newArrayList(DAILY)); getStorageToUpdatePeriodMap().put("c2_testfact", Lists.newArrayList(HOURLY)); StoragePartitionProvider provider = new StoragePartitionProvider() { @Override//from w w w .j a va 2 s . co m public Map<String, String> providePartitionsForStorage(String storage) { return getWhereForDailyAndHourly2days(TEST_CUBE_NAME, storage); } }; try { // Union query String hqlQuery = rewrite("select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf); System.out.println("HQL:" + hqlQuery); String expected = getExpectedUnionQuery(TEST_CUBE_NAME, Lists.newArrayList("c1_testfact", "c2_testfact"), provider, "select sum(testcube.alias0) ", null, null, "select sum(testcube.msr2) as `alias0` from ", null, null); compareQueries(hqlQuery, expected); } finally { getStorageToUpdatePeriodMap().clear(); } }