Example usage for java.util HashMap clear

List of usage examples for java.util HashMap clear

Introduction

In this page you can find the example usage for java.util HashMap clear.

Prototype

public void clear() 

Source Link

Document

Removes all of the mappings from this map.

Usage

From source file:org.apache.hadoop.raid.TestRetryTaskEncoding.java

public void testRetryTask() throws Exception {
    LOG.info("Test testRetryTask started.");
    createClusters(false);// w w w. j  a v a 2 s  .  co  m
    Configuration newConf = new Configuration(conf);
    RaidNode.createChecksumStore(newConf, true);
    Path raidDir = new Path("/raidtest/1");
    HashMap<Codec, Long[]> fileCRCs = new HashMap<Codec, Long[]>();
    HashMap<Codec, Path> filePaths = new HashMap<Codec, Path>();
    HashMap<InjectionEventI, Double> specialFailProbs = new HashMap<InjectionEventI, Double>();
    PolicyInfo info = new PolicyInfo();
    info.setProperty("targetReplication", Integer.toString(targetReplication));
    info.setProperty("metaReplication", Integer.toString(metaReplication));
    try {
        createTestFiles(raidDir, filePaths, fileCRCs, null);
        LOG.info("Test testRetryTask created test files");
        for (Codec codec : Codec.getCodecs()) {
            Path filePath = filePaths.get(codec);
            FileStatus stat = fileSys.getFileStatus(filePath);
            info.setCodecId(codec.id);

            LOG.info("Codec: " + codec + ", Path: " + filePath + " Sync every task to the finalize stage, "
                    + "all partial parity files are generated");
            specialFailProbs.clear();
            specialFailProbs.put(InjectionEvent.RAID_ENCODING_FAILURE_CONCAT_FILE, 1.0);
            specialFailProbs.put(InjectionEvent.RAID_ENCODING_FAILURE_GET_SRC_STRIPES, 1.0);
            specialFailProbs.put(InjectionEvent.RAID_ENCODING_FAILURE_PUT_STRIPE, 1.0);
            TestEncodingHandler h = new TestEncodingHandler(0.0, specialFailProbs, false);
            InjectionHandler.set(h);
            assertEquals("Should succeed", true, runEncodingTasks(newConf, codec, stat, info, 1000));
            assertEquals("Only did two concats, one failed, one succeeded ", 2,
                    h.events.get(InjectionEvent.RAID_ENCODING_FAILURE_CONCAT_FILE).get());
            if (codec.isDirRaid) {
                assertEquals("Only did two getSrcStripes, one failed, two succeeded", 2,
                        h.events.get(InjectionEvent.RAID_ENCODING_FAILURE_GET_SRC_STRIPES).get());
                assertEquals("Only did two putStripes, one failed, one succeeded", 2,
                        h.events.get(InjectionEvent.RAID_ENCODING_FAILURE_PUT_STRIPE).get());
            }
            if (!codec.isDirRaid) {
                TestRaidDfs.waitForFileRaided(LOG, fileSys, filePath,
                        new Path(codec.parityDirectory, RaidNode.makeRelative(filePath.getParent())),
                        targetReplication);
            } else {
                TestRaidDfs.waitForDirRaided(LOG, fileSys, filePath,
                        new Path(codec.parityDirectory, RaidNode.makeRelative(raidDir)), targetReplication);
            }
            TestRaidDfs.waitForReplicasReduction(fileSys, filePath, targetReplication);
        }
        verifyCorrectness(raidDir, fileCRCs, null);
        LOG.info("Test testRetryTask successful.");
    } catch (Exception e) {
        LOG.info("testRetryTask Exception ", e);
        throw e;
    } finally {
        stopClusters();

    }
    LOG.info("Test testRetryTask completed.");
}

From source file:com.xandy.calendar.selectcalendars.SelectCalendarsSyncFragment.java

@Override
public void onPause() {
    final ListAdapter listAdapter = getListAdapter();
    if (listAdapter != null) {
        HashMap<Long, SelectCalendarsSyncAdapter.CalendarRow> changes = ((SelectCalendarsSyncAdapter) listAdapter)
                .getChanges();/*from w  w  w  .  j  a va  2  s. com*/
        if (changes != null && changes.size() > 0) {
            for (SelectCalendarsSyncAdapter.CalendarRow row : changes.values()) {
                if (row.synced == row.originalSynced) {
                    continue;
                }
                long id = row.id;
                mService.cancelOperation((int) id);
                // Use the full long id in case it makes a difference
                Uri uri = ContentUris.withAppendedId(Calendars.CONTENT_URI, row.id);
                ContentValues values = new ContentValues();
                // Toggle the current setting
                int synced = row.synced ? 1 : 0;
                values.put(Calendars.SYNC_EVENTS, synced);
                values.put(Calendars.VISIBLE, synced);
                mService.startUpdate((int) id, null, uri, values, null, null, 0);
            }
            changes.clear();
        }
    }
    getActivity().getContentResolver().unregisterContentObserver(mCalendarsObserver);
    super.onPause();
}

From source file:org.eclipse.smila.lucene.internal.LuceneServiceImpl.java

/**
 * Unloads the mappings.//from w w  w  . j  av  a2  s .co  m
 */
protected void unloadMappings() {
    if (_mappings != null) {
        final Collection<HashMap<String, HashMap<String, Integer>>> collection = _mappings.values();
        for (final HashMap<String, HashMap<String, Integer>> map : collection) {
            if (map != null) {
                final Collection<HashMap<String, Integer>> values = map.values();
                for (final HashMap<String, Integer> submap : values) {
                    if (submap != null) {
                        submap.clear();
                    }
                }
                map.clear();
            }
        }
        _mappings.clear();
        _mappings = null;
    }
}

From source file:org.eclipse.smila.lucene.internal.LuceneServiceImpl.java

/**
 * Unloads the reverse mappings./*ww  w.  java 2s. c  o m*/
 */
private void unloadReverseMappings() {
    if (_reverseMappings != null) {
        final Collection<HashMap<String, HashMap<Integer, String>>> collection = _reverseMappings.values();
        for (final HashMap<String, HashMap<Integer, String>> map : collection) {
            if (map != null) {
                final Collection<HashMap<Integer, String>> values = map.values();
                for (final HashMap<Integer, String> submap : values) {
                    if (submap != null) {
                        submap.clear();
                    }
                }
                map.clear();
            }
        }
        _reverseMappings.clear();
        _reverseMappings = null;
    }
}

From source file:org.kuali.ext.mm.integration.service.impl.kfs.KfsPurchasingService.java

@SuppressWarnings("unchecked")
protected void populateReceivingAddress(RequisitionDocument reqs, String chartCode, String orgCode) {

    HashMap<String, Object> fieldValues = new HashMap<String, Object>();
    fieldValues.put("chartOfAccountsCode", chartCode);
    fieldValues.put("organizationCode", orgCode);
    fieldValues.put("defaultIndicator", true);
    // first check if there is a campus level bill address, if so use that instead of value from order document
    Collection recvAddresses = financialBusinessObjectService.findMatching(ReceivingAddress.class, fieldValues);
    ReceivingAddress defaultRcvAddress = null;
    if (recvAddresses != null && !recvAddresses.isEmpty()) {
        defaultRcvAddress = (ReceivingAddress) recvAddresses.iterator().next();
    } else {/*from   ww w  . ja  va2s.  co  m*/
        fieldValues.clear();
        fieldValues.put("chartOfAccountsCode", chartCode);
        fieldValues.put("defaultIndicator", true);
        recvAddresses = financialBusinessObjectService.findMatching(ReceivingAddress.class, fieldValues);
        if (recvAddresses != null && !recvAddresses.isEmpty()) {
            defaultRcvAddress = (ReceivingAddress) recvAddresses.iterator().next();
        }
    }
    if (defaultRcvAddress != null) {
        reqs.setReceivingName(defaultRcvAddress.getReceivingName());
        reqs.setReceivingLine1Address(defaultRcvAddress.getReceivingLine1Address());
        reqs.setReceivingLine2Address(defaultRcvAddress.getReceivingLine2Address());
        reqs.setReceivingCityName(defaultRcvAddress.getReceivingCityName());
        reqs.setReceivingStateCode(defaultRcvAddress.getReceivingStateCode());
        reqs.setReceivingPostalCode(defaultRcvAddress.getReceivingPostalCode());
        reqs.setReceivingCountryCode(defaultRcvAddress.getReceivingCountryCode());
        reqs.setAddressToVendorIndicator(defaultRcvAddress.isUseReceivingIndicator());
    } else {
        reqs.setReceivingName(null);
        reqs.setReceivingLine1Address(null);
        reqs.setReceivingLine2Address(null);
        reqs.setReceivingCityName(null);
        reqs.setReceivingStateCode(null);
        reqs.setReceivingPostalCode(null);
        reqs.setReceivingCountryCode(null);
        reqs.setAddressToVendorIndicator(false);
    }
}

From source file:com.miz.mizuu.fragments.TvShowDetailsFragment.java

private void loadSeasons(final int capacity) {
    // Show ProgressBar
    new AsyncTask<Void, Void, Void>() {
        private List<GridSeason> mSeasons = new ArrayList<GridSeason>();

        @Override//from ww  w.  ja va 2 s .com
        protected Void doInBackground(Void... params) {

            HashMap<String, EpisodeCounter> seasons = MizuuApplication.getTvEpisodeDbAdapter()
                    .getSeasons(thisShow.getId());

            for (String key : seasons.keySet()) {
                File temp = FileUtils.getTvShowSeason(mContext, thisShow.getId(), key);
                mSeasons.add(new GridSeason(mContext, thisShow.getId(), Integer.valueOf(key),
                        seasons.get(key).getEpisodeCount(), seasons.get(key).getWatchedCount(),
                        temp.exists() ? temp : FileUtils.getTvShowThumb(mContext, thisShow.getId())));
            }

            seasons.clear();

            Collections.sort(mSeasons);

            return null;
        }

        @Override
        protected void onPostExecute(Void result) {
            mSeasonsLayout.loadItems(mContext, mPicasso, capacity, mImageThumbSize, mSeasons,
                    HorizontalCardLayout.SEASONS, mToolbarColor);
            mSeasonsLayout.setSeeMoreVisibility(true);
        }
    }.execute();
}

From source file:gov.nih.nci.ncicb.tcga.dcc.qclive.common.action.MafFileProcessor.java

@Override
protected File doWork(final File mafFile, final QcContext context) throws ProcessorException {
    // make sure the disease is set
    if (context.getArchive() != null) {
        DiseaseContextHolder.setDisease(context.getArchive().getTumorType());
    }/* w w w  .j  a v a  2 s  .  c  o  m*/

    FileReader fileReader = null;
    BufferedReader bufferedReader = null;

    try {
        // open file
        fileReader = new FileReader(mafFile);
        bufferedReader = new BufferedReader(fileReader);

        int lineNum = 0;

        // find first non-blank line not starting with #, this is the header
        String headerLine = bufferedReader.readLine();
        lineNum++;
        while (StringUtils.isEmpty(headerLine.trim())
                || StringUtils.startsWith(headerLine, COMMENT_LINE_TOKEN)) {
            headerLine = bufferedReader.readLine();
            lineNum++;
        }

        final List<String> headers = Arrays.asList(headerLine.split("\\t"));

        context.setFile(mafFile);
        final Map<String, Integer> fieldOrder = mapFieldOrder(headers);
        // need to find out the file id for this maf file
        final Long mafFileId = fileInfoQueries.getFileId(mafFile.getName(), context.getArchive().getId());
        if (mafFileId == null || mafFileId == -1) {
            context.getArchive().setDeployStatus(Archive.STATUS_IN_REVIEW);
            throw new ProcessorException(new StringBuilder().append("File '").append(mafFile.getName())
                    .append("' was not found in the database").toString());
        }
        if (isAddMafInfo(mafFileId)) {
            HashMap<String, BCRID> biospecimens = new HashMap<String, BCRID>();
            String line;
            while ((line = bufferedReader.readLine()) != null) {
                lineNum++;
                if (!StringUtils.isBlank(line.trim()) && !StringUtils.startsWith(line, COMMENT_LINE_TOKEN)) {
                    final String[] row = line.split("\\t");

                    try {
                        processRow(row, fieldOrder, mafFileId, biospecimens, context, mafFile, lineNum);
                        //  If exceeds batch size store it in the database
                        if (biospecimens.size() >= getBatchSize()) {
                            try {
                                insertBiospecimenToFileRelationships(biospecimens, context, mafFile);
                            } catch (UUIDException ue) {
                                throw new ProcessorException(ue.getMessage(), ue);
                            }
                            biospecimens.clear();
                        }
                    } catch (DataAccessException e) {
                        // catch DB errors per line
                        context.getArchive().setDeployStatus(Archive.STATUS_IN_REVIEW);
                        context.addError(MessageFormat.format(MessagePropertyType.MAF_FILE_PROCESSING_ERROR,
                                mafFile.getName(),
                                new StringBuilder().append("Mutation information from file at line '")
                                        .append(lineNum).append("' was not successfully added. Root cause: ")
                                        .append(e.getMessage()).toString()));
                    }
                }
            }
            // process remaining biospecimens
            if (biospecimens.size() > 0) {
                try {
                    insertBiospecimenToFileRelationships(biospecimens, context, mafFile);
                } catch (UUIDException ue) {
                    context.getArchive().setDeployStatus(Archive.STATUS_IN_REVIEW);
                    throw new ProcessorException(ue.getMessage(), ue);
                } catch (DataAccessException e) {
                    context.getArchive().setDeployStatus(Archive.STATUS_IN_REVIEW);
                    throw new ProcessorException(e.getMessage(), e);
                }
                biospecimens.clear();
            }
        }
    } catch (IOException e) {
        context.getArchive().setDeployStatus(Archive.STATUS_IN_REVIEW);
        throw new ProcessorException(
                new StringBuilder().append("Error reading maf file ").append(mafFile.getName()).toString());
    } finally {
        if (bufferedReader != null) {
            try {
                bufferedReader.close();
            } catch (IOException e) {
                // ignore
            }
        }

        if (fileReader != null) {
            try {
                fileReader.close();
            } catch (IOException e) {
                // ignore
            }
        }
    }

    return mafFile;
}

From source file:org.apache.hadoop.hive.ql.QTestUtil2.java

public void createSources() throws Exception {

    startSessionState();/*from ww w.ja v a 2  s .c  om*/

    // Create a bunch of tables with columns key and value
    LinkedList<String> cols = new LinkedList<String>();
    cols.add("key");
    cols.add("value");

    LinkedList<String> part_cols = new LinkedList<String>();
    part_cols.add("ds");
    part_cols.add("hr");
    db.createTable("srcpart", cols, part_cols, TextInputFormat.class, IgnoreKeyTextOutputFormat.class);

    Path fpath;
    HashMap<String, String> part_spec = new HashMap<String, String>();
    for (String ds : new String[] { "2008-04-08", "2008-04-09" }) {
        for (String hr : new String[] { "11", "12" }) {
            part_spec.clear();
            part_spec.put("ds", ds);
            part_spec.put("hr", hr);
            // System.out.println("Loading partition with spec: " +
            // part_spec);
            // db.createPartition(srcpart, part_spec);
            fpath = new Path(testFiles, "kv1.txt");
            // db.loadPartition(fpath, srcpart.getName(), part_spec, true);
            runLoadCmd("LOAD DATA LOCAL INPATH '" + fpath.toString()
                    + "' OVERWRITE INTO TABLE srcpart PARTITION (ds='" + ds + "',hr='" + hr + "')");
        }
    }
    ArrayList<String> bucketCols = new ArrayList<String>();
    bucketCols.add("key");
    runCreateTableCmd(
            "CREATE TABLE srcbucket(key int, value string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE");
    // db.createTable("srcbucket", cols, null, TextInputFormat.class,
    // IgnoreKeyTextOutputFormat.class, 2, bucketCols);
    for (String fname : new String[] { "srcbucket0.txt", "srcbucket1.txt" }) {
        fpath = new Path(testFiles, fname);
        runLoadCmd("LOAD DATA LOCAL INPATH '" + fpath.toString() + "' INTO TABLE srcbucket");
    }

    runCreateTableCmd("CREATE TABLE srcbucket2(key int, value string) "
            + "CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE");
    // db.createTable("srcbucket", cols, null, TextInputFormat.class,
    // IgnoreKeyTextOutputFormat.class, 2, bucketCols);
    for (String fname : new String[] { "srcbucket20.txt", "srcbucket21.txt", "srcbucket22.txt",
            "srcbucket23.txt" }) {
        fpath = new Path(testFiles, fname);
        runLoadCmd("LOAD DATA LOCAL INPATH '" + fpath.toString() + "' INTO TABLE srcbucket2");
    }

    for (String tname : new String[] { "src", "src1" }) {
        db.createTable(tname, cols, null, TextInputFormat.class, IgnoreKeyTextOutputFormat.class);
    }
    db.createTable("src_sequencefile", cols, null, SequenceFileInputFormat.class,
            SequenceFileOutputFormat.class);

    Table srcThrift = new Table(db.getCurrentDatabase(), "src_thrift");
    srcThrift.setInputFormatClass(SequenceFileInputFormat.class.getName());
    srcThrift.setOutputFormatClass(SequenceFileOutputFormat.class.getName());
    srcThrift.setSerializationLib(ThriftDeserializer.class.getName());
    srcThrift.setSerdeParam(Constants.SERIALIZATION_CLASS, Complex.class.getName());
    srcThrift.setSerdeParam(Constants.SERIALIZATION_FORMAT, TBinaryProtocol.class.getName());
    db.createTable(srcThrift);

    LinkedList<String> json_cols = new LinkedList<String>();
    json_cols.add("json");
    db.createTable("src_json", json_cols, null, TextInputFormat.class, IgnoreKeyTextOutputFormat.class);

    // load the input data into the src table
    fpath = new Path(testFiles, "kv1.txt");
    runLoadCmd("LOAD DATA LOCAL INPATH '" + fpath.toString() + "' INTO TABLE src");

    // load the input data into the src table
    fpath = new Path(testFiles, "kv3.txt");
    runLoadCmd("LOAD DATA LOCAL INPATH '" + fpath.toString() + "' INTO TABLE src1");

    // load the input data into the src_sequencefile table
    fpath = new Path(testFiles, "kv1.seq");
    runLoadCmd("LOAD DATA LOCAL INPATH '" + fpath.toString() + "' INTO TABLE src_sequencefile");

    // load the input data into the src_thrift table
    fpath = new Path(testFiles, "complex.seq");
    runLoadCmd("LOAD DATA LOCAL INPATH '" + fpath.toString() + "' INTO TABLE src_thrift");

    // load the json data into the src_json table
    fpath = new Path(testFiles, "json.txt");
    runLoadCmd("LOAD DATA LOCAL INPATH '" + fpath.toString() + "' INTO TABLE src_json");

}

From source file:org.apache.hadoop.hive.sql.QTestUtil.java

public void createSources() throws Exception {

    startSessionState();/*from w w w  .ja va  2s.  c  om*/
    conf.setBoolean("hive.test.init.phase", true);

    // Create a bunch of tables with columns key and value
    LinkedList<String> cols = new LinkedList<String>();
    cols.add("key");
    cols.add("value");

    LinkedList<String> part_cols = new LinkedList<String>();
    part_cols.add("ds");
    part_cols.add("hr");
    db.createTable("srcpart", cols, part_cols, TextInputFormat.class, IgnoreKeyTextOutputFormat.class);

    Path fpath;
    HashMap<String, String> part_spec = new HashMap<String, String>();
    for (String ds : new String[] { "2008-04-08", "2008-04-09" }) {
        for (String hr : new String[] { "11", "12" }) {
            part_spec.clear();
            part_spec.put("ds", ds);
            part_spec.put("hr", hr);
            // System.out.println("Loading partition with spec: " + part_spec);
            // db.createPartition(srcpart, part_spec);
            fpath = new Path(testFiles, "kv1.txt");
            // db.loadPartition(fpath, srcpart.getName(), part_spec, true);
            runLoadCmd("LOAD DATA LOCAL INPATH '" + fpath.toString()
                    + "' OVERWRITE INTO TABLE srcpart PARTITION (ds='" + ds + "',hr='" + hr + "')");
        }
    }
    ArrayList<String> bucketCols = new ArrayList<String>();
    bucketCols.add("key");
    runCreateTableCmd(
            "CREATE TABLE srcbucket(key int, value string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE");
    // db.createTable("srcbucket", cols, null, TextInputFormat.class,
    // IgnoreKeyTextOutputFormat.class, 2, bucketCols);
    for (String fname : new String[] { "srcbucket0.txt", "srcbucket1.txt" }) {
        fpath = new Path(testFiles, fname);
        runLoadCmd("LOAD DATA LOCAL INPATH '" + fpath.toString() + "' INTO TABLE srcbucket");
    }

    runCreateTableCmd("CREATE TABLE srcbucket2(key int, value string) "
            + "CLUSTERED BY (key) INTO 4 BUCKETS STORED AS TEXTFILE");
    // db.createTable("srcbucket", cols, null, TextInputFormat.class,
    // IgnoreKeyTextOutputFormat.class, 2, bucketCols);
    for (String fname : new String[] { "srcbucket20.txt", "srcbucket21.txt", "srcbucket22.txt",
            "srcbucket23.txt" }) {
        fpath = new Path(testFiles, fname);
        runLoadCmd("LOAD DATA LOCAL INPATH '" + fpath.toString() + "' INTO TABLE srcbucket2");
    }

    for (String tname : new String[] { "src", "src1" }) {
        db.createTable(tname, cols, null, TextInputFormat.class, IgnoreKeyTextOutputFormat.class);
    }
    db.createTable("src_sequencefile", cols, null, SequenceFileInputFormat.class,
            SequenceFileOutputFormat.class);

    Table srcThrift = new Table(db.getCurrentDatabase(), "src_thrift");
    srcThrift.setInputFormatClass(SequenceFileInputFormat.class.getName());
    srcThrift.setOutputFormatClass(SequenceFileOutputFormat.class.getName());
    srcThrift.setSerializationLib(ThriftDeserializer.class.getName());
    srcThrift.setSerdeParam(Constants.SERIALIZATION_CLASS, Complex.class.getName());
    srcThrift.setSerdeParam(Constants.SERIALIZATION_FORMAT, TBinaryProtocol.class.getName());
    db.createTable(srcThrift);

    LinkedList<String> json_cols = new LinkedList<String>();
    json_cols.add("json");
    db.createTable("src_json", json_cols, null, TextInputFormat.class, IgnoreKeyTextOutputFormat.class);

    // create table for sql testcases
    runCreateTableCmd(
            "CREATE TABLE src_sql_t1(a string, b string, c int, d int, e int) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' STORED AS TEXTFILE");
    runCreateTableCmd(
            "CREATE TABLE src_sql_t2(x string, y string) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' STORED AS TEXTFILE");
    runCreateTableCmd(
            "CREATE TABLE src_sql_t3(x string, y string, z int) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' STORED AS TEXTFILE");

    // load the input data into the src table
    fpath = new Path(testFiles, "kv1.txt");
    runLoadCmd("LOAD DATA LOCAL INPATH '" + fpath.toString() + "' INTO TABLE src");

    // load the input data into the src table
    fpath = new Path(testFiles, "kv3.txt");
    runLoadCmd("LOAD DATA LOCAL INPATH '" + fpath.toString() + "' INTO TABLE src1");

    // load the input data into the src_sequencefile table
    fpath = new Path(testFiles, "kv1.seq");
    runLoadCmd("LOAD DATA LOCAL INPATH '" + fpath.toString() + "' INTO TABLE src_sequencefile");

    // load the input data into the src_thrift table
    fpath = new Path(testFiles, "complex.seq");
    runLoadCmd("LOAD DATA LOCAL INPATH '" + fpath.toString() + "' INTO TABLE src_thrift");

    // load the json data into the src_json table
    fpath = new Path(testFiles, "json.txt");
    runLoadCmd("LOAD DATA LOCAL INPATH '" + fpath.toString() + "' INTO TABLE src_json");

    // load data for sql test cases
    fpath = new Path(testFiles, "sql_t1.txt");
    runLoadCmd("LOAD DATA LOCAL INPATH '" + fpath.toString() + "' INTO TABLE src_sql_t1");

    fpath = new Path(testFiles, "sql_t2.txt");
    runLoadCmd("LOAD DATA LOCAL INPATH '" + fpath.toString() + "' INTO TABLE src_sql_t2");

    fpath = new Path(testFiles, "sql_t3.txt");
    runLoadCmd("LOAD DATA LOCAL INPATH '" + fpath.toString() + "' INTO TABLE src_sql_t3");

    createOthers();// add by frank
    conf.setBoolean("hive.test.init.phase", false);
}