Example usage for java.util HashMap HashMap

List of usage examples for java.util HashMap HashMap

Introduction

In this page you can find the example usage for java.util HashMap HashMap.

Prototype

public HashMap() 

Source Link

Document

Constructs an empty HashMap with the default initial capacity (16) and the default load factor (0.75).

Usage

From source file:de.uni_rostock.goodod.checker.CheckerApp.java

public static void main(String[] args) throws OWLOntologyCreationException {
    config = Configuration.getConfiguration(args);
    String bioTopVariantA = "biotoplite_group_A_TEST.owl";
    String bioTopVariantB = "biotoplite_group_B_TEST.owl";
    String repoRoot = config.getString("repositoryRoot");
    File commonBioTopF = new File(repoRoot + File.separator + config.getString("bioTopLiteSource"));

    String groupAFile = repoRoot + File.separator + "Results" + File.separator + "GruppeA" + File.separator
            + bioTopVariantA;/*  w  w w . j a v a2s . c  o m*/
    String groupBFile = repoRoot + File.separator + "Results" + File.separator + "GruppeB" + File.separator
            + bioTopVariantB;
    String testFile = config.getString("testDescription");
    IRI bioTopIRI = IRI.create("http://purl.org/biotop/biotoplite.owl");
    SimpleIRIMapper bioTopLiteMapper = new SimpleIRIMapper(bioTopIRI, IRI.create(commonBioTopF));
    SimpleIRIMapper variantMapperA = new SimpleIRIMapper(
            IRI.create("http://purl.org/biotop/biotoplite_group_A_TEST.owl"), IRI.create(new File(groupAFile)));
    SimpleIRIMapper variantMapperB = new SimpleIRIMapper(
            IRI.create("http://purl.org/biotop/biotoplite_group_B_TEST.owl"), IRI.create(new File(groupBFile)));
    //logger.info("Loading ontology " + testFile + ".");
    OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
    manager.addIRIMapper(variantMapperA);
    manager.addIRIMapper(variantMapperB);
    manager.addIRIMapper(bioTopLiteMapper);
    FileDocumentSource source = new FileDocumentSource(new File(testFile));
    OWLOntology ontology = null;
    try {
        ontology = manager.loadOntologyFromOntologyDocument(source);
    } catch (Throwable e) {
        logger.fatal("Loading failed", e);
        System.exit(1);
    }

    org.semanticweb.HermiT.Configuration reasonerConfig = new org.semanticweb.HermiT.Configuration();
    reasonerConfig.throwInconsistentOntologyException = false;
    //ReasonerProgressMonitor monitor = new ConsoleProgressMonitor();
    reasonerConfig.existentialStrategyType = ExistentialStrategyType.INDIVIDUAL_REUSE;
    //reasonerConfig.reasonerProgressMonitor = monitor;
    reasonerConfig.tableauMonitorType = TableauMonitorType.NONE;
    //reasonerConfig.individualTaskTimeout = 10000;
    Reasoner reasoner = new Reasoner(reasonerConfig, ontology);
    reasoner.classifyClasses();
    Set<OWLClass> before = reasoner.getUnsatisfiableClasses()
            .getEntitiesMinus(manager.getOWLDataFactory().getOWLNothing());
    //logger.info("Found " + before.size() + " inconsistent classes before import change.");
    logger.debug(before);

    reasoner.dispose();
    reasoner = null;
    manager.removeOntology(ontology);
    ontology = null;

    Map<IRI, IRI> importMap = new HashMap<IRI, IRI>();

    OWLOntologyLoaderConfiguration interimConfig = new OWLOntologyLoaderConfiguration();
    for (String str : config.getStringArray("ignoredImports")) {
        IRI ignoredIRI = IRI.create(str);
        importMap.put(ignoredIRI, bioTopIRI);

        interimConfig = interimConfig.addIgnoredImport(ignoredIRI);
    }

    interimConfig = interimConfig.setMissingImportHandlingStrategy(MissingImportHandlingStrategy.SILENT);
    try {
        ontology = manager.loadOntologyFromOntologyDocument(source, interimConfig);
    } catch (Throwable e) {
        logger.fatal("Loading failed", e);
        System.exit(1);
    }
    BasicImportingNormalizerFactory n = new BasicImportingNormalizerFactory(importMap, interimConfig);

    n.normalize(ontology);

    reasoner = new Reasoner(reasonerConfig, ontology);
    reasoner.classifyClasses();
    Set<OWLClass> after = reasoner.getUnsatisfiableClasses()
            .getEntitiesMinus(manager.getOWLDataFactory().getOWLNothing());

    //logger.info("Found " + after.size() + " inconsistent classes after import change.");
    logger.debug(after);

    /*
     * We need some tidying afterwards. The after set can contain
     * inconsistent classes that are inconsistent only because in the new
     * import, they are subclasses of a class that was already inconsistent before.
     * Hence we remove them from the after set.  
     */
    for (OWLClass c : before) {
        Set<OWLClass> subclasses = SubClassCollector.collect(c, manager.getImportsClosure(ontology));
        for (OWLClass subC : subclasses) {
            if ((true == after.contains(subC)) && (false == before.contains(subC))) {
                after.remove(subC);
            }
        }
    }
    int difference = before.size() - after.size();

    if (0 == difference) {
        logger.info(testFile + ": OK");
    } else {
        logger.warn(testFile + ": Import change is not neutral to inconsistencies (" + before.size() + '/'
                + after.size() + ")");
    }
}

From source file:edu.ucla.cs.scai.swim.qa.ontology.dbpedia.tipicality.Test.java

public static void main(String[] args) throws IOException, ClassNotFoundException {
    String path = DBpediaOntology.DBPEDIA_CSV_FOLDER;
    if (args != null && args.length > 0) {
        path = args[0];/*from www. j a  va2  s  .c  o  m*/
        if (!path.endsWith("/")) {
            path = path + "/";
        }
    }

    stopAttributes.add("http://www.w3.org/1999/02/22-rdf-syntax-ns#type");
    stopAttributes.add("http://www.w3.org/2002/07/owl#sameAs");
    stopAttributes.add("http://dbpedia.org/ontology/wikiPageRevisionID");
    stopAttributes.add("http://dbpedia.org/ontology/wikiPageID");
    stopAttributes.add("http://purl.org/dc/elements/1.1/description");
    stopAttributes.add("http://dbpedia.org/ontology/thumbnail");
    stopAttributes.add("http://dbpedia.org/ontology/type");

    try (ObjectInputStream ois = new ObjectInputStream(new FileInputStream(path + "counts.bin"))) {
        categories = (HashSet<String>) ois.readObject();
        attributes = (HashSet<String>) ois.readObject();
        categoryCount = (HashMap<String, Integer>) ois.readObject();
        attributeCount = (HashMap<String, Integer>) ois.readObject();
        categoryAttributeCount = (HashMap<String, HashMap<String, Integer>>) ois.readObject();
        attributeCategoryCount = (HashMap<String, HashMap<String, Integer>>) ois.readObject();
    }

    System.out.println(categories.size() + " categories found");
    System.out.println(attributes.size() + " attributes found");

    n = 0;
    for (Map.Entry<String, Integer> e : categoryCount.entrySet()) {
        n += e.getValue();
    }

    System.out.println(n);

    HashMap<String, ArrayList<Pair>> sortedCategoryAttributes = new HashMap<>();

    for (String category : categories) {
        //System.out.println(category);
        //System.out.println("-----------");
        ArrayList<Pair> attributesRank = new ArrayList<Pair>();
        Integer c = categoryCount.get(category);
        if (c == null || c == 0) {
            continue;
        }
        HashMap<String, Integer> thisCategoryAttributeCount = categoryAttributeCount.get(category);
        for (Map.Entry<String, Integer> e : thisCategoryAttributeCount.entrySet()) {
            attributesRank.add(new Pair(e.getKey(), 1.0 * e.getValue() / c));
        }
        Collections.sort(attributesRank);
        for (Pair p : attributesRank) {
            //System.out.println("A:" + p.getS() + "\t" + p.getP());
        }
        //System.out.println("===============================");
        sortedCategoryAttributes.put(category, attributesRank);
    }

    for (String attribute : attributes) {
        //System.out.println(attribute);
        //System.out.println("-----------");
        ArrayList<Pair> categoriesRank = new ArrayList<>();
        Integer a = attributeCount.get(attribute);
        if (a == null || a == 0) {
            continue;
        }
        HashMap<String, Integer> thisAttributeCategoryCount = attributeCategoryCount.get(attribute);
        for (Map.Entry<String, Integer> e : thisAttributeCategoryCount.entrySet()) {
            categoriesRank.add(new Pair(e.getKey(), 1.0 * e.getValue() / a));
        }
        Collections.sort(categoriesRank);
        for (Pair p : categoriesRank) {
            //System.out.println("C:" + p.getS() + "\t" + p.getP());
        }
        //System.out.println("===============================");
    }

    HashMap<Integer, Integer> histogram = new HashMap<>();
    histogram.put(0, 0);
    histogram.put(1, 0);
    histogram.put(2, 0);
    histogram.put(Integer.MAX_VALUE, 0);

    int nTest = 0;

    if (args != null && args.length > 0) {
        path = args[0];
        if (!path.endsWith("/")) {
            path = path + "/";
        }
    }

    for (File f : new File(path).listFiles()) {
        if (f.isFile() && f.getName().endsWith(".csv")) {
            String category = f.getName().replaceFirst("\\.csv", "");
            System.out.println("Category: " + category);
            ArrayList<HashSet<String>> entities = extractEntities(f, 2);
            for (HashSet<String> attributesOfThisEntity : entities) {
                nTest++;
                ArrayList<String> rankedCategories = rankedCategories(attributesOfThisEntity);
                boolean found = false;
                for (int i = 0; i < rankedCategories.size() && !found; i++) {
                    if (rankedCategories.get(i).equals(category)) {
                        Integer count = histogram.get(i);
                        if (count == null) {
                            histogram.put(i, 1);
                        } else {
                            histogram.put(i, count + 1);
                        }
                        found = true;
                    }
                }
                if (!found) {
                    histogram.put(Integer.MAX_VALUE, histogram.get(Integer.MAX_VALUE) + 1);
                }
            }
            System.out.println("Tested entities: " + nTest);
            System.out.println("1: " + histogram.get(0));
            System.out.println("2: " + histogram.get(1));
            System.out.println("3: " + histogram.get(2));
            System.out.println("+3: " + (nTest - histogram.get(2) - histogram.get(1) - histogram.get(0)
                    - histogram.get(Integer.MAX_VALUE)));
            System.out.println("NF: " + histogram.get(Integer.MAX_VALUE));
        }
    }
}

From source file:fi.iki.elonen.SimpleWebServer.java

/**
 * Starts as a standalone file server and waits for Enter.
 *///from ww w  .ja v  a  2s  . c  o  m
public static void main(String[] args) {
    // Defaults
    int port = 8080;

    String host = null; // bind to all interfaces by default
    List<File> rootDirs = new ArrayList<File>();
    boolean quiet = false;
    String cors = null;
    Map<String, String> options = new HashMap<String, String>();

    // Parse command-line, with short and long versions of the options.
    for (int i = 0; i < args.length; ++i) {
        if ("-h".equalsIgnoreCase(args[i]) || "--host".equalsIgnoreCase(args[i])) {
            host = args[i + 1];
        } else if ("-p".equalsIgnoreCase(args[i]) || "--port".equalsIgnoreCase(args[i])) {
            if (args[i + 1].equals("public")) {
                port = PUBLIC;
            } else if (args[i + 1].equals("private")) {
                port = PRIVATE;
            } else {
                port = Integer.parseInt(args[i + 1]);
            }
        } else if ("-q".equalsIgnoreCase(args[i]) || "--quiet".equalsIgnoreCase(args[i])) {
            quiet = true;
        } else if ("-d".equalsIgnoreCase(args[i]) || "--dir".equalsIgnoreCase(args[i])) {
            rootDirs.add(new File(args[i + 1]).getAbsoluteFile());
        } else if (args[i].startsWith("--cors")) {
            cors = "*";
            int equalIdx = args[i].indexOf('=');
            if (equalIdx > 0) {
                cors = args[i].substring(equalIdx + 1);
            }
        } else if ("--licence".equalsIgnoreCase(args[i])) {
            System.out.println(SimpleWebServer.LICENCE + "\n");
        } else if (args[i].startsWith("-X:")) {
            int dot = args[i].indexOf('=');
            if (dot > 0) {
                String name = args[i].substring(0, dot);
                String value = args[i].substring(dot + 1, args[i].length());
                options.put(name, value);
            }
        }
    }

    if (rootDirs.isEmpty()) {
        rootDirs.add(new File(".").getAbsoluteFile());
    }
    options.put("host", host);
    options.put("port", "" + port);
    options.put("quiet", String.valueOf(quiet));
    StringBuilder sb = new StringBuilder();
    for (File dir : rootDirs) {
        if (sb.length() > 0) {
            sb.append(":");
        }
        try {
            sb.append(dir.getCanonicalPath());
        } catch (IOException ignored) {
        }
    }
    options.put("home", sb.toString());
    ServiceLoader<WebServerPluginInfo> serviceLoader = ServiceLoader.load(WebServerPluginInfo.class);
    for (WebServerPluginInfo info : serviceLoader) {
        String[] mimeTypes = info.getMimeTypes();
        for (String mime : mimeTypes) {
            String[] indexFiles = info.getIndexFilesForMimeType(mime);
            if (!quiet) {
                System.out.print("# Found plugin for Mime type: \"" + mime + "\"");
                if (indexFiles != null) {
                    System.out.print(" (serving index files: ");
                    for (String indexFile : indexFiles) {
                        System.out.print(indexFile + " ");
                    }
                }
                System.out.println(").");
            }
            registerPluginForMimeType(indexFiles, mime, info.getWebServerPlugin(mime), options);
        }
    }
    ServerRunner.executeInstance(new SimpleWebServer(host, port, rootDirs, quiet, cors));
}

From source file:com.linkedin.pinot.tools.data.generator.DataGenerator.java

public static void main(String[] args) throws IOException, JSONException {
    final String[] columns = { "column1", "column2", "column3", "column4", "column5" };
    final Map<String, DataType> dataTypes = new HashMap<String, DataType>();
    final Map<String, FieldType> fieldTypes = new HashMap<String, FieldType>();
    final Map<String, TimeUnit> timeUnits = new HashMap<String, TimeUnit>();

    final Map<String, Integer> cardinality = new HashMap<String, Integer>();
    final Map<String, IntRange> range = new HashMap<String, IntRange>();

    for (final String col : columns) {
        dataTypes.put(col, DataType.INT);
        fieldTypes.put(col, FieldType.DIMENSION);
        cardinality.put(col, 1000);//from   w w  w  .j a v  a  2  s . c  o m
    }
    final DataGeneratorSpec spec = new DataGeneratorSpec(Arrays.asList(columns), cardinality, range, dataTypes,
            fieldTypes, timeUnits, FileFormat.AVRO, "/tmp/out", true);

    final DataGenerator gen = new DataGenerator();
    gen.init(spec);
    gen.generate(1000000L, 2);
}

From source file:it.eng.spagobi.behaviouralmodel.lov.service.GridMetadataContainer.java

public static void main(String args[]) {
    GridMetadataContainer gmc = new GridMetadataContainer();

    gmc.setResults(2);//w  ww . ja  v a 2  s .co m

    gmc.getFields().add("strNumber");
    HashMap am = new HashMap<String, String>();
    am.put("a", "a");
    am.put("b", "b");
    gmc.getFields().add(am);
    HashMap am2 = new HashMap<String, String>();
    am2.put("1", "1");
    am2.put("2", "2");
    gmc.getFields().add(am2);

    HashMap am3 = new HashMap<String, String>();
    am3.put("11a", "11a");
    am3.put("11b", "11b");
    gmc.getValues().add(am3);
    HashMap am4 = new HashMap<String, String>();
    am4.put("111", "111");
    am4.put("112", "112");
    gmc.getValues().add(am4);
    try {
        System.out.println(gmc.toJSONString());
    } catch (Exception e) {
        System.out.println("ssss");
    }

}

From source file:com.saba.CalendarDemo.java

public static void main(String[] args) throws Exception {

    Map<String, Object> data = new HashMap<String, Object>();
    prepareXLSDynamicValues(data);/*from w ww. j av  a  2  s  . co  m*/

    XSSFWorkbook workbook = new XSSFWorkbook();
    XSSFSheet sheet = workbook.createSheet("Awarded Bid & Contact Details");

    Map<String, CellStyle> styles = createStyles(workbook);
    sheet.setPrintGridlines(false);
    sheet.setDisplayGridlines(false);
    PrintSetup printSetup = sheet.getPrintSetup();
    printSetup.setLandscape(true);
    sheet.setFitToPage(true);
    sheet.setHorizontallyCenter(true);

    setupColumnWidthForEachFields(sheet);

    //preparing the contact & details table along with data 
    prepareContactDetailsTableAndData(data, sheet, styles);

    int contactdetrow = contactDetails.length + 2;
    //preparing the award bid & details table along with data 
    prepareAwardBidDetailsTableAndData(data, sheet, styles, contactdetrow);

    int awardDetailsRow = (contactDetails.length + awardedBidDetails.length + 4);
    //preparing the product & details table 
    prepareProductDetailsTable(workbook, sheet, styles, awardDetailsRow);
    //preparing the product & details table data 
    prepareProductDetailsTableData(data, sheet, styles, awardDetailsRow);

    FileOutputStream out = new FileOutputStream("award_bid.xlsx");
    workbook.write(out);
    out.close();

}

From source file:ml.dmlc.xgboost4j.java.example.CustomObjective.java

public static void main(String[] args) throws XGBoostError {
    //load train mat (svmlight format)
    DMatrix trainMat = new DMatrix("../../demo/data/agaricus.txt.train");
    //load valid mat (svmlight format)
    DMatrix testMat = new DMatrix("../../demo/data/agaricus.txt.test");

    HashMap<String, Object> params = new HashMap<String, Object>();
    params.put("eta", 1.0);
    params.put("max_depth", 2);
    params.put("silent", 1);

    //set round// www  . ja v  a  2s .com
    int round = 2;

    //specify watchList
    HashMap<String, DMatrix> watches = new HashMap<String, DMatrix>();
    watches.put("train", trainMat);
    watches.put("test", testMat);

    //user define obj and eval
    IObjective obj = new LogRegObj();
    IEvaluation eval = new EvalError();

    //train a booster
    System.out.println("begin to train the booster model");
    Booster booster = XGBoost.train(trainMat, params, round, watches, obj, eval);
}

From source file:com.example.license.LicenseUtil.java

public static void main(String[] args) {
    String key = "A1B2C3D4E5F6";
    LicenseData licenseData = new LicenseData();
    licenseData.setUserNum(50000000);/*  w  ww. j a va  2 s  .  c  o  m*/
    licenseData.setSpaceSum(1000000000);
    licenseData.setEndTime(new Date());
    try {
        String source = convertToString(licenseData);
        log.info("license data  " + source);
        String secret = generateSecret(key, licenseData);
        log.info("  " + secret);
        String encryptData = DESUtil.encrypt(source, secret);
        log.info("license data?: " + encryptData);
        Map<String, String> ml = new HashMap<String, String>();
        ml.put("data", encryptData);
        ml.put("secret", secret);
        String license = convertToString(ml);
        log.info("license : " + license);

        String seed = "1";
        String encrypt_license = RSAUtil.encrypt(license, seed);
        log.info("encrypt_license : " + encrypt_license);
        String decrypt_license = RSAUtil.decrypt(encrypt_license, seed);
        log.info("decrypt_license : " + decrypt_license);

        @SuppressWarnings("unchecked")
        Map<String, String> decrypt_eicense_obj = convertToObjcet(decrypt_license, Map.class);
        String encrypt_data = decrypt_eicense_obj.get("data");
        log.info("encrypt_data : " + encrypt_data);
        String secret_ = decrypt_eicense_obj.get("secret");
        // LicenseData ld
        // =generateLicense(decrypt_license,LicenseData.class);
        // secret_+=ld.getEndTime().getTime();
        log.info(" : " + secret_);
        String decryptData = DESUtil.decrypt(encrypt_data, secret_);
        System.out.println("?: " + decryptData);

        Sigar sigar = new Sigar();
        try {
            Cpu cpu = sigar.getCpu();
            log.info(cpu.toString());
        } catch (SigarException e) {
            e.printStackTrace();
        }

        OperatingSystem os = OperatingSystem.getInstance();

        log.info(os.toString());
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:com.amazonaws.services.dynamodbv2.client.DynamoDBDynamicFaultInjection.java

public static void main(String[] args) throws Exception {

    init();//  w w  w.  j  a v a  2  s. c  o  m

    try {

        // Create a table with a primary key named 'name', which holds a string
        createTable();

        // Describe our new table
        describeTable();

        // Add some items
        putItem(newItem("Bill & Ted's Excellent Adventure", 1989, "****", "James", "Sara"));
        putItem(newItem("Airplane", 1980, "*****", "James", "Billy Bob"));

        // Get some items
        getItem("Airplane");
        getItem("Bill & Ted's Excellent Adventure");

        // Scan items for movies with a year attribute greater than 1985
        Map<String, Condition> scanFilter = new HashMap<String, Condition>();
        Condition condition = new Condition().withComparisonOperator(ComparisonOperator.GT.toString())
                .withAttributeValueList(new AttributeValue().withN("1985"));
        scanFilter.put("year", condition);
        ScanRequest scanRequest = new ScanRequest(TABLENAME).withScanFilter(scanFilter);
        ScanResult scanResult = dynamoDBClient.scan(scanRequest);
        logger.info("Result: " + scanResult);

    } catch (AmazonServiceException ase) {

        logger.error("Service Exception: " + ase);

    } catch (AmazonClientException ace) {

        logger.error("Client Exception: " + ace);
    }
}

From source file:com.mozilla.socorro.RawDumpSizeScan.java

public static void main(String[] args) throws ParseException {
    String startDateStr = args[0];
    String endDateStr = args[1];/*from w w w.ja  va 2 s .  c  om*/

    // Set both start/end time and start/stop row
    Calendar startCal = Calendar.getInstance();
    Calendar endCal = Calendar.getInstance();

    SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd");

    if (!StringUtils.isBlank(startDateStr)) {
        startCal.setTime(sdf.parse(startDateStr));
    }
    if (!StringUtils.isBlank(endDateStr)) {
        endCal.setTime(sdf.parse(endDateStr));
    }

    DescriptiveStatistics stats = new DescriptiveStatistics();
    long numNullRawBytes = 0L;
    HTable table = null;
    Map<String, Integer> rowValueSizeMap = new HashMap<String, Integer>();
    try {
        table = new HTable(TABLE_NAME_CRASH_REPORTS);
        Scan[] scans = generateScans(startCal, endCal);
        for (Scan s : scans) {
            ResultScanner rs = table.getScanner(s);
            Iterator<Result> iter = rs.iterator();
            while (iter.hasNext()) {
                Result r = iter.next();
                ImmutableBytesWritable rawBytes = r.getBytes();
                //length = r.getValue(RAW_DATA_BYTES, DUMP_BYTES);
                if (rawBytes != null) {
                    int length = rawBytes.getLength();
                    if (length > 20971520) {
                        rowValueSizeMap.put(new String(r.getRow()), length);
                    }
                    stats.addValue(length);
                } else {
                    numNullRawBytes++;
                }

                if (stats.getN() % 10000 == 0) {
                    System.out.println("Processed " + stats.getN());
                    System.out.println(String.format("Min: %.02f Max: %.02f Mean: %.02f", stats.getMin(),
                            stats.getMax(), stats.getMean()));
                    System.out.println(
                            String.format("1st Quartile: %.02f 2nd Quartile: %.02f 3rd Quartile: %.02f",
                                    stats.getPercentile(25.0d), stats.getPercentile(50.0d),
                                    stats.getPercentile(75.0d)));
                    System.out.println("Number of large entries: " + rowValueSizeMap.size());
                }
            }
            rs.close();
        }

        System.out.println("Finished Processing!");
        System.out.println(String.format("Min: %.02f Max: %.02f Mean: %.02f", stats.getMin(), stats.getMax(),
                stats.getMean()));
        System.out.println(String.format("1st Quartile: %.02f 2nd Quartile: %.02f 3rd Quartile: %.02f",
                stats.getPercentile(25.0d), stats.getPercentile(50.0d), stats.getPercentile(75.0d)));

        for (Map.Entry<String, Integer> entry : rowValueSizeMap.entrySet()) {
            System.out.println(String.format("RowId: %s => Length: %d", entry.getKey(), entry.getValue()));
        }
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    } finally {
        if (table != null) {
            try {
                table.close();
            } catch (IOException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
        }
    }
}