List of usage examples for java.util NavigableSet contains
boolean contains(Object o);
From source file:com.inclouds.hbase.rowcache.RowCache.java
/** * FIXME - optimize./* ww w. j av a 2 s . c o m*/ * * @param family * the family * @param column * the column * @return true, if successful */ private boolean shouldSkipColumn(byte[] family, byte[] column) { RequestContext context = contextTLS.get(); Map<byte[], NavigableSet<byte[]>> map = context.getFamilyMap(); NavigableSet<byte[]> cols = map.get(family); if (cols == null || cols.size() == 0) return false; return cols.contains(column) == false; }
From source file:org.apache.hadoop.hbase.master.TestRollingRestart.java
private void assertRegionsAssigned(MiniHBaseCluster cluster, Set<String> expectedRegions) throws IOException { int numFound = getNumberOfOnlineRegions(cluster); if (expectedRegions.size() > numFound) { log("Expected to find " + expectedRegions.size() + " but only found" + " " + numFound); NavigableSet<String> foundRegions = HBaseTestingUtility.getAllOnlineRegions(cluster); for (String region : expectedRegions) { if (!foundRegions.contains(region)) { log("Missing region: " + region); }/* w w w .ja v a2s . com*/ } assertEquals(expectedRegions.size(), numFound); } else if (expectedRegions.size() < numFound) { int doubled = numFound - expectedRegions.size(); log("Expected to find " + expectedRegions.size() + " but found" + " " + numFound + " (" + doubled + " double assignments?)"); NavigableSet<String> doubleRegions = getDoubleAssignedRegions(cluster); for (String region : doubleRegions) { log("Region is double assigned: " + region); } assertEquals(expectedRegions.size(), numFound); } else { log("Success! Found expected number of " + numFound + " regions"); } }
From source file:org.apache.hadoop.hbase.regionserver.Memcache.java
private void getRowKeyBefore(ConcurrentSkipListSet<KeyValue> set, KeyValue search, NavigableSet<KeyValue> candidates, final NavigableSet<KeyValue> deletes, final long now) { NavigableSet<KeyValue> headSet = set.headSet(search); // If we tried to create a headMap and got an empty map, then there are // no keys at or before the search key, so we're done. if (headSet.isEmpty()) { return;/*from w w w . j ava 2s . c o m*/ } // If there aren't any candidate keys at this point, we need to search // backwards until we find at least one candidate or run out of headMap. if (candidates.isEmpty()) { KeyValue lastFound = null; for (Iterator<KeyValue> i = headSet.descendingIterator(); i.hasNext();) { KeyValue found = i.next(); // if the last row we found a candidate key for is different than // the row of the current candidate, we can stop looking -- if its // not a delete record. boolean deleted = found.isDeleteType(); if (lastFound != null && this.comparator.matchingRows(lastFound, found) && !deleted) { break; } // If this isn't a delete, record it as a candidate key. Also // take note of this candidate so that we'll know when // we cross the row boundary into the previous row. if (!deleted) { if (Store.notExpiredAndNotInDeletes(this.ttl, found, now, deletes)) { lastFound = found; candidates.add(found); } else { // Its expired. Store.expiredOrDeleted(set, found); } } else { // We are encountering items in reverse. We may have just added // an item to candidates that this later item deletes. Check. If we // found something in candidates, remove it from the set. if (Store.handleDeletes(found, candidates, deletes)) { remove(set, found); } } } } else { // If there are already some candidate keys, we only need to consider // the very last row's worth of keys in the headMap, because any // smaller acceptable candidate keys would have caused us to start // our search earlier in the list, and we wouldn't be searching here. SortedSet<KeyValue> rowTailMap = headSet.tailSet(headSet.last().cloneRow(HConstants.LATEST_TIMESTAMP)); Iterator<KeyValue> i = rowTailMap.iterator(); do { KeyValue found = i.next(); if (found.isDeleteType()) { Store.handleDeletes(found, candidates, deletes); } else { if (ttl == HConstants.FOREVER || now < found.getTimestamp() + ttl || !deletes.contains(found)) { candidates.add(found); } else { Store.expiredOrDeleted(set, found); } } } while (i.hasNext()); } }
From source file:org.apache.hadoop.hbase.regionserver.transactional.TrxTransactionState.java
private synchronized Cell[] getAllCells(final Scan scan) { //if (LOG.isTraceEnabled()) LOG.trace("getAllCells -- ENTRY"); List<Cell> kvList = new ArrayList<Cell>(); ListIterator<WriteAction> writeOrderIter = null; for (writeOrderIter = writeOrdering.listIterator(); writeOrderIter.hasNext();) { WriteAction action = writeOrderIter.next(); byte[] row = action.getRow(); List<Cell> kvs = action.getCells(); if (scan.getStartRow() != null && !Bytes.equals(scan.getStartRow(), HConstants.EMPTY_START_ROW) && Bytes.compareTo(row, scan.getStartRow()) < 0) { continue; }/*from w w w . j ava 2 s .co m*/ if (scan.getStopRow() != null && !Bytes.equals(scan.getStopRow(), HConstants.EMPTY_END_ROW) && Bytes.compareTo(row, scan.getStopRow()) > 0) { continue; } if (!scan.hasFamilies()) { kvList.addAll(kvs); continue; } // Pick only the Cell's that match the 'scan' specifications for (Cell lv_kv : kvs) { byte[] lv_kv_family = lv_kv.getFamilyArray(); Map<byte[], NavigableSet<byte[]>> lv_familyMap = scan.getFamilyMap(); NavigableSet<byte[]> set = lv_familyMap.get(lv_kv_family); if (set == null || set.size() == 0) { kvList.add(lv_kv); continue; } if (set.contains(lv_kv.getQualifierArray())) { kvList.add(lv_kv); } } } if (LOG.isTraceEnabled()) LOG.trace("getAllCells -- EXIT kvList size = " + kvList.size()); return kvList.toArray(new Cell[kvList.size()]); }
From source file:org.apache.hadoop.hbase.regionserver.transactional.TrxTransactionState.java
private synchronized KeyValue[] getAllKVs(final Scan scan) { //if (LOG.isTraceEnabled()) LOG.trace("getAllKVs -- ENTRY"); List<KeyValue> kvList = new ArrayList<KeyValue>(); ListIterator<WriteAction> writeOrderIter = null; for (writeOrderIter = writeOrdering.listIterator(); writeOrderIter.hasNext();) { WriteAction action = writeOrderIter.next(); byte[] row = action.getRow(); List<KeyValue> kvs = action.getKeyValues(); if (scan.getStartRow() != null && !Bytes.equals(scan.getStartRow(), HConstants.EMPTY_START_ROW) && Bytes.compareTo(row, scan.getStartRow()) < 0) { continue; }//from w w w. j a v a2 s. co m if (scan.getStopRow() != null && !Bytes.equals(scan.getStopRow(), HConstants.EMPTY_END_ROW) && Bytes.compareTo(row, scan.getStopRow()) > 0) { continue; } if (!scan.hasFamilies()) { kvList.addAll(kvs); continue; } // Pick only the Cell's that match the 'scan' specifications Map<byte[], NavigableSet<byte[]>> lv_familyMap = scan.getFamilyMap(); for (KeyValue lv_kv : kvs) { byte[] lv_kv_family = lv_kv.getFamily(); NavigableSet<byte[]> set = lv_familyMap.get(lv_kv_family); if (set == null || set.size() == 0) { kvList.add(lv_kv); continue; } if (set.contains(lv_kv.getQualifier())) { kvList.add(lv_kv); } } } if (LOG.isTraceEnabled()) LOG.trace("getAllKVs -- EXIT kvList size = " + kvList.size()); return kvList.toArray(new KeyValue[kvList.size()]); }
From source file:org.apache.kylin.common.persistence.ResourceStoreTest.java
private static void testBasics(ResourceStore store) throws IOException { String dir1 = "/cube"; String path1 = "/cube/_test.json"; StringEntity content1 = new StringEntity("anything"); String dir2 = "/table"; String path2 = "/table/_test.json"; StringEntity content2 = new StringEntity("something"); // cleanup legacy if any store.deleteResource(path1);/*from ww w .j a va 2 s. c o m*/ store.deleteResource(path2); StringEntity t; // put/get store.putResource(path1, content1, StringEntity.serializer); assertTrue(store.exists(path1)); t = store.getResource(path1, StringEntity.class, StringEntity.serializer); assertEquals(content1, t); store.putResource(path2, content2, StringEntity.serializer); assertTrue(store.exists(path2)); t = store.getResource(path2, StringEntity.class, StringEntity.serializer); assertEquals(content2, t); // overwrite t.str = "new string"; store.putResource(path2, t, StringEntity.serializer); // write conflict try { t.setLastModified(t.getLastModified() - 1); store.putResource(path2, t, StringEntity.serializer); fail("write conflict should trigger IllegalStateException"); } catch (IllegalStateException e) { // expected } // list NavigableSet<String> list; list = store.listResources(dir1); assertTrue(list.contains(path1)); assertTrue(list.contains(path2) == false); list = store.listResources(dir2); assertTrue(list.contains(path2)); assertTrue(list.contains(path1) == false); list = store.listResources("/"); assertTrue(list.contains(dir1)); assertTrue(list.contains(dir2)); assertTrue(list.contains(path1) == false); assertTrue(list.contains(path2) == false); list = store.listResources(path1); assertNull(list); list = store.listResources(path2); assertNull(list); // delete/exist store.deleteResource(path1); assertTrue(store.exists(path1) == false); list = store.listResources(dir1); assertTrue(list == null || list.contains(path1) == false); store.deleteResource(path2); assertTrue(store.exists(path2) == false); list = store.listResources(dir2); assertTrue(list == null || list.contains(path2) == false); }