Java tutorial
/* * Copyright 2014 NAVER Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.navercorp.pinpoint.collector.dao.hbase; import com.navercorp.pinpoint.collector.dao.MapStatisticsCallerDao; import com.navercorp.pinpoint.collector.dao.hbase.statistics.BulkIncrementer; import com.navercorp.pinpoint.collector.dao.hbase.statistics.CallRowKey; import com.navercorp.pinpoint.collector.dao.hbase.statistics.CalleeColumnName; import com.navercorp.pinpoint.collector.dao.hbase.statistics.ColumnName; import com.navercorp.pinpoint.collector.dao.hbase.statistics.RowKey; import com.navercorp.pinpoint.common.hbase.HbaseColumnFamily; import com.navercorp.pinpoint.common.hbase.HbaseOperations2; import com.navercorp.pinpoint.common.server.util.AcceptedTimeService; import com.navercorp.pinpoint.common.trace.ServiceType; import com.navercorp.pinpoint.common.util.ApplicationMapStatisticsUtils; import com.navercorp.pinpoint.common.util.TimeSlot; import com.sematext.hbase.wd.RowKeyDistributorByHashPrefix; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Increment; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.stereotype.Repository; import java.util.List; import java.util.Map; /** * Update statistics of caller node * * @author netspider * @author emeroad * @author HyunGil Jeong */ @Repository public class HbaseMapStatisticsCallerDao extends AbstractHbaseDao implements MapStatisticsCallerDao { private final Logger logger = LoggerFactory.getLogger(this.getClass()); @Autowired private HbaseOperations2 hbaseTemplate; @Autowired private AcceptedTimeService acceptedTimeService; @Autowired private TimeSlot timeSlot; @Autowired @Qualifier("callerBulkIncrementer") private BulkIncrementer bulkIncrementer; @Autowired @Qualifier("statisticsCallerRowKeyDistributor") private RowKeyDistributorByHashPrefix rowKeyDistributorByHashPrefix; private final boolean useBulk; public HbaseMapStatisticsCallerDao() { this(true); } public HbaseMapStatisticsCallerDao(boolean useBulk) { this.useBulk = useBulk; } @Override public void update(String callerApplicationName, ServiceType callerServiceType, String callerAgentid, String calleeApplicationName, ServiceType calleeServiceType, String calleeHost, int elapsed, boolean isError) { if (callerApplicationName == null) { throw new NullPointerException("callerApplicationName must not be null"); } if (calleeApplicationName == null) { throw new NullPointerException("calleeApplicationName must not be null"); } if (logger.isDebugEnabled()) { logger.debug("[Caller] {} ({}) {} -> {} ({})[{}]", callerApplicationName, callerServiceType, callerAgentid, calleeApplicationName, calleeServiceType, calleeHost); } // there may be no endpoint in case of httpclient calleeHost = StringUtils.defaultString(calleeHost); // make row key. rowkey is me final long acceptedTime = acceptedTimeService.getAcceptedTime(); final long rowTimeSlot = timeSlot.getTimeSlot(acceptedTime); final RowKey callerRowKey = new CallRowKey(callerApplicationName, callerServiceType.getCode(), rowTimeSlot); final short calleeSlotNumber = ApplicationMapStatisticsUtils.getSlotNumber(calleeServiceType, elapsed, isError); final ColumnName calleeColumnName = new CalleeColumnName(callerAgentid, calleeServiceType.getCode(), calleeApplicationName, calleeHost, calleeSlotNumber); if (useBulk) { TableName mapStatisticsCalleeTableName = getTableName(); bulkIncrementer.increment(mapStatisticsCalleeTableName, callerRowKey, calleeColumnName); } else { final byte[] rowKey = getDistributedKey(callerRowKey.getRowKey()); // column name is the name of caller app. byte[] columnName = calleeColumnName.getColumnName(); increment(rowKey, columnName, 1L); } } private void increment(byte[] rowKey, byte[] columnName, long increment) { if (rowKey == null) { throw new NullPointerException("rowKey must not be null"); } if (columnName == null) { throw new NullPointerException("columnName must not be null"); } TableName mapStatisticsCalleeTableName = getTableName(); hbaseTemplate.incrementColumnValue(mapStatisticsCalleeTableName, rowKey, getColumnFamilyName(), columnName, increment); } @Override public void flushAll() { if (!useBulk) { throw new IllegalStateException(); } // update statistics by rowkey and column for now. need to update it by rowkey later. Map<TableName, List<Increment>> incrementMap = bulkIncrementer.getIncrements(rowKeyDistributorByHashPrefix); for (Map.Entry<TableName, List<Increment>> e : incrementMap.entrySet()) { TableName tableName = e.getKey(); List<Increment> increments = e.getValue(); if (logger.isDebugEnabled()) { logger.debug("flush {} to [{}] Increment:{}", this.getClass().getSimpleName(), tableName.getNameAsString(), increments.size()); } hbaseTemplate.increment(tableName, increments); } } private byte[] getDistributedKey(byte[] rowKey) { return rowKeyDistributorByHashPrefix.getDistributedKey(rowKey); } @Override public HbaseColumnFamily getColumnFamily() { return HbaseColumnFamily.MAP_STATISTICS_CALLEE_VER2_COUNTER; } }