Java tutorial
/* * Copyright 2016 Fumiharu Kinoshita * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package info.bunji.mongodb.synces; import java.util.Set; import org.bson.BasicBSONObject; import org.bson.BsonTimestamp; import org.bson.Document; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.ImmutableList; import com.mongodb.BasicDBObject; import com.mongodb.MongoClient; import com.mongodb.QueryOperators; import com.mongodb.client.FindIterable; import com.mongodb.client.MongoCollection; import com.mongodb.client.MongoCursor; import com.mongodb.client.MongoDatabase; import info.bunji.asyncutil.AsyncProcess; import info.bunji.mongodb.synces.util.DocumentUtils; /** ************************************************ * initial collection import process. * @author Fumiharu Kinoshita ************************************************ */ public class CollectionExtractor extends AsyncProcess<SyncOperation> { private Logger logger = LoggerFactory.getLogger(getClass()); private SyncConfig config; private BsonTimestamp timestamp; private static final long LOGGING_INTERVAL = 5000; /** ******************************************** * @param ts ? * @param config sync config ******************************************** */ public CollectionExtractor(SyncConfig config, BsonTimestamp ts) { this.config = config; timestamp = ts; } /* ******************************************** * {@inheridDoc} ******************************************** */ @Override protected void execute() throws Exception { Set<String> includeFields = config.getIncludeFields(); Set<String> excludeFields = config.getExcludeFields(); String index = config.getDestDbName(); String syncName = config.getSyncName(); // ??? try (MongoClient client = MongoClientService.getClient(config)) { /* if (timestamp == null) { logger.info("[{}] start initial import from db [{}]", syncName, config.getMongoDbName()); // ??oplog??????? //BsonTimestamp lastOpTs = config.getLastOpTime(); //logger.debug("[{}] current oplog timestamp = [{}]", syncName, lastOpTs.toString()); // ?????? MongoDatabase db = client.getDatabase(config.getMongoDbName()); // ???? Object lastId = null; for (String collection : getTargetColectionList(db)) { logger.info("[{}] start initial import. [{}]", syncName, collection); MongoCollection<Document> conn = db.getCollection(collection); BasicDBObject filter = getFilterForInitialImport(new BasicDBObject(), lastId); long count = conn.count(filter); // get document from collection long processed = 0; FindIterable<Document> results = conn.find(filter).sort(new BasicDBObject("_id", 1)); for (Document doc : results) { Document filteredDoc = DocumentUtils.applyFieldFilter(doc, includeFields, excludeFields); append(new SyncOperation(Operation.INSERT, index, collection, filteredDoc, null)); if ((++processed % LOGGING_INTERVAL) == 0) { logger.info("[{}] processing initial import. [{}({}/{})]", syncName, collection, processed, count); } } logger.info("[{}] initial import finished. [{}(total:{})]", syncName, collection, processed); } } */ //===================== logger.info("[{}] start initial import from db [{}]", syncName, config.getMongoDbName()); // ?????? MongoDatabase db = client.getDatabase(config.getMongoDbName()); // ???? Object lastId = null; for (String collection : getTargetColectionList(db)) { logger.info("[{}] start initial import. [{}]", syncName, collection); MongoCollection<Document> conn = db.getCollection(collection); BasicDBObject filter = getFilterForInitialImport(new BasicDBObject(), lastId); long count = conn.count(filter); long processed = 0; FindIterable<Document> results = conn.find(filter).sort(new BasicDBObject("_id", 1)); for (Document doc : results) { Document filteredDoc = DocumentUtils.applyFieldFilter(doc, includeFields, excludeFields); append(new SyncOperation(Operation.INSERT, index, collection, filteredDoc, null)); if ((++processed % LOGGING_INTERVAL) == 0) { logger.info("[{}] processing initial import. [{}({}/{})]", syncName, collection, processed, count); } } logger.info("[{}] initial import finished. [{}(total:{})]", syncName, collection, processed); } //===================== logger.info("[{}] finish import collection(s).", syncName); // append(DocumentUtils.makeStatusOperation(Status.RUNNING, config, timestamp)); config.setStatus(Status.RUNNING); config.setLastOpTime(timestamp); // config.setLastSyncTime(timestamp); // append(DocumentUtils.makeStatusOperation(config)); append(SyncOperation.fromConfig(config)); } catch (Throwable t) { config.setStatus(Status.INITIAL_IMPORT_FAILED); logger.error("[{}] initial import failed.({})", syncName, t.getMessage(), t); throw t; } } /** ********************************** * * @param filter * @param id * @return ********************************** */ private BasicDBObject getFilterForInitialImport(BasicDBObject filter, Object id) { if (id == null) { return filter; } BasicDBObject idFilter = new BasicDBObject("_id", new BasicBSONObject(QueryOperators.GT, id)); if (filter == null || filter.equals(new BasicDBObject())) { return idFilter; } return new BasicDBObject(QueryOperators.AND, ImmutableList.of(filter, idFilter)); } /** ********************************** * ??????. * <br> * ???????? * @return ???? ********************************** */ private Set<String> getTargetColectionList(MongoDatabase db) { Set<String> collectionSet = config.getImportCollections(); if (collectionSet.isEmpty()) { // ??????(???) MongoCursor<String> it = db.listCollectionNames().iterator(); while (it.hasNext()) { String name = it.next(); if (!name.startsWith("system.")) { collectionSet.add(name); } } } return collectionSet; } /* ********************************** * {@inheridDoc} ********************************** */ @Override protected void postProcess() { super.postProcess(); //logger.info("[{}] extract collection finished.", config.getSyncName()); } }