Skip to content

Commit

Permalink
4
Browse files Browse the repository at this point in the history
  • Loading branch information
morningman committed Aug 30, 2024
1 parent 0460d0a commit 75afcfd
Show file tree
Hide file tree
Showing 5 changed files with 79 additions and 38 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@

import com.github.benmanes.caffeine.cache.CacheLoader;
import com.github.benmanes.caffeine.cache.RemovalListener;
import com.github.benmanes.caffeine.cache.stats.CacheStats;
import com.google.common.collect.Maps;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
Expand Down Expand Up @@ -296,4 +297,15 @@ public <T> MetaCache<T> buildMetaCache(String name,
maxSize, namesCacheLoader, metaObjCacheLoader, removalListener);
return metaCache;
}

public static Map<String, String> getCacheStats(CacheStats cacheStats, long estimatedSize) {
Map<String, String> stats = Maps.newHashMap();
stats.put("hit_ratio", String.valueOf(cacheStats.hitRate()));
stats.put("hit_count", String.valueOf(cacheStats.hitCount()));
stats.put("read_count", String.valueOf(cacheStats.hitCount() + cacheStats.missCount()));
stats.put("eviction_count", String.valueOf(cacheStats.evictionCount()));
stats.put("average_load_penalty", String.valueOf(cacheStats.averageLoadPenalty()));
stats.put("estimated_size", String.valueOf(estimatedSize));
return stats;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
import org.apache.doris.common.util.LocationPath;
import org.apache.doris.common.util.Util;
import org.apache.doris.datasource.CacheException;
import org.apache.doris.datasource.ExternalMetaCacheMgr;
import org.apache.doris.datasource.hive.AcidInfo.DeleteDeltaInfo;
import org.apache.doris.datasource.property.PropertyConverter;
import org.apache.doris.fs.FileSystemCache;
Expand All @@ -52,7 +53,6 @@

import com.github.benmanes.caffeine.cache.CacheLoader;
import com.github.benmanes.caffeine.cache.LoadingCache;
import com.github.benmanes.caffeine.cache.stats.CacheStats;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
Expand Down Expand Up @@ -1139,19 +1139,12 @@ public HivePartitionValues copy() {
*/
public Map<String, Map<String, String>> getStats() {
Map<String, Map<String, String>> res = Maps.newHashMap();
res.put("hive_partition_values_cache", getCacheStats(partitionValuesCache.stats()));
res.put("hive_partition_cache", getCacheStats(partitionCache.stats()));
res.put("hive_file_cache", getCacheStats(fileCacheRef.get().stats()));
res.put("hive_partition_values_cache", ExternalMetaCacheMgr.getCacheStats(partitionValuesCache.stats(),
partitionCache.estimatedSize()));
res.put("hive_partition_cache",
ExternalMetaCacheMgr.getCacheStats(partitionCache.stats(), partitionCache.estimatedSize()));
res.put("hive_file_cache",
ExternalMetaCacheMgr.getCacheStats(fileCacheRef.get().stats(), fileCacheRef.get().estimatedSize()));
return res;
}

private Map<String, String> getCacheStats(CacheStats cacheStats) {
Map<String, String> stats = Maps.newHashMap();
stats.put("hit_ratio", String.valueOf(cacheStats.hitRate()));
stats.put("hit_count", String.valueOf(cacheStats.hitCount()));
stats.put("read_count", String.valueOf(cacheStats.hitCount() + cacheStats.missCount()));
stats.put("eviction_count", String.valueOf(cacheStats.evictionCount()));
stats.put("average_load_penalty", String.valueOf(cacheStats.averageLoadPenalty()));
return stats;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -21,13 +21,15 @@
import org.apache.doris.common.Config;
import org.apache.doris.common.util.Util;
import org.apache.doris.datasource.CacheException;
import org.apache.doris.datasource.ExternalMetaCacheMgr;
import org.apache.doris.datasource.TablePartitionValues;
import org.apache.doris.datasource.TablePartitionValues.TablePartitionKey;
import org.apache.doris.datasource.hive.HMSExternalCatalog;
import org.apache.doris.datasource.hive.HMSExternalTable;

import com.github.benmanes.caffeine.cache.LoadingCache;
import com.google.common.base.Preconditions;
import com.google.common.collect.Maps;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.common.table.timeline.HoodieTimeline;
Expand All @@ -37,6 +39,7 @@

import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.OptionalLong;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
Expand All @@ -55,7 +58,7 @@ public HudiCachedPartitionProcessor(long catalogId, ExecutorService executor) {
OptionalLong.of(28800L),
OptionalLong.of(Config.external_cache_expire_time_minutes_after_access * 60),
Config.max_external_table_cache_num,
false,
true,
null);
this.partitionCache = partitionCacheFactory.buildCache(key -> new TablePartitionValues(), null, executor);
}
Expand Down Expand Up @@ -167,4 +170,11 @@ public TablePartitionValues getPartitionValues(HMSExternalTable table, HoodieTab
throw new CacheException("Failed to get hudi partitions: " + Util.getRootCauseMessage(e), e);
}
}

public Map<String, Map<String, String>> getCacheStats() {
Map<String, Map<String, String>> res = Maps.newHashMap();
res.put("hudi_partition_cache", ExternalMetaCacheMgr.getCacheStats(partitionCache.stats(),
partitionCache.estimatedSize()));
return res;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import org.apache.doris.common.Config;
import org.apache.doris.common.UserException;
import org.apache.doris.datasource.CatalogIf;
import org.apache.doris.datasource.ExternalMetaCacheMgr;
import org.apache.doris.datasource.hive.HMSExternalCatalog;
import org.apache.doris.datasource.hive.HiveMetaStoreClientHelper;
import org.apache.doris.datasource.property.constants.HMSProperties;
Expand All @@ -31,6 +32,7 @@
import com.github.benmanes.caffeine.cache.LoadingCache;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.apache.hadoop.conf.Configuration;
import org.apache.iceberg.ManifestFiles;
import org.apache.iceberg.SerializableTable;
Expand Down Expand Up @@ -58,15 +60,15 @@ public IcebergMetadataCache(ExecutorService executor) {
OptionalLong.of(28800L),
OptionalLong.of(Config.external_cache_expire_time_minutes_after_access * 60),
Config.max_external_table_cache_num,
false,
true,
null);
this.snapshotListCache = snapshotListCacheFactory.buildCache(key -> loadSnapshots(key), null, executor);

CacheFactory tableCacheFactory = new CacheFactory(
OptionalLong.of(28800L),
OptionalLong.of(Config.external_cache_expire_time_minutes_after_access * 60),
Config.max_external_table_cache_num,
false,
true,
null);
this.tableCache = tableCacheFactory.buildCache(key -> loadTable(key), null, executor);
}
Expand Down Expand Up @@ -246,4 +248,13 @@ public int hashCode() {
return Objects.hash(catalog.getId(), dbName, tableName);
}
}

public Map<String, Map<String, String>> getCacheStats() {
Map<String, Map<String, String>> res = Maps.newHashMap();
res.put("iceberg_snapshot_cache", ExternalMetaCacheMgr.getCacheStats(snapshotListCache.stats(),
snapshotListCache.estimatedSize()));
res.put("iceberg_table_cache", ExternalMetaCacheMgr.getCacheStats(tableCache.stats(),
tableCache.estimatedSize()));
return res;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -41,9 +41,12 @@
import org.apache.doris.common.util.TimeUtils;
import org.apache.doris.datasource.CatalogIf;
import org.apache.doris.datasource.ExternalCatalog;
import org.apache.doris.datasource.ExternalMetaCacheMgr;
import org.apache.doris.datasource.InternalCatalog;
import org.apache.doris.datasource.hive.HMSExternalCatalog;
import org.apache.doris.datasource.hive.HiveMetaStoreCache;
import org.apache.doris.datasource.hudi.source.HudiCachedPartitionProcessor;
import org.apache.doris.datasource.iceberg.IcebergExternalCatalog;
import org.apache.doris.datasource.iceberg.IcebergMetadataCache;
import org.apache.doris.datasource.maxcompute.MaxComputeExternalCatalog;
import org.apache.doris.job.common.JobType;
Expand Down Expand Up @@ -1268,34 +1271,46 @@ private static TFetchSchemaTableDataResult tablePropertiesMetadataResult(TSchema
private static TFetchSchemaTableDataResult metaCacheStatsMetadataResult(TSchemaTableRequestParams params) {
List<TRow> dataBatch = Lists.newArrayList();
TFetchSchemaTableDataResult result = new TFetchSchemaTableDataResult();
ExternalMetaCacheMgr mgr = Env.getCurrentEnv().getExtMetaCacheMgr();
for (CatalogIf catalogIf : Env.getCurrentEnv().getCatalogMgr().getCopyOfCatalog()) {
if (!(catalogIf instanceof HMSExternalCatalog)) {
continue;
}
HMSExternalCatalog catalog = (HMSExternalCatalog) catalogIf;
HiveMetaStoreCache cache = Env.getCurrentEnv().getExtMetaCacheMgr()
.getMetaStoreCache(catalog);
if (cache != null) {
Map<String, Map<String, String>> stats = cache.getStats();
for (Map.Entry<String, Map<String, String>> entry : stats.entrySet()) {
String cacheName = entry.getKey();
Map<String, String> cacheStats = entry.getValue();
for (Map.Entry<String, String> cacheStatsEntry : cacheStats.entrySet()) {
String metricName = cacheStatsEntry.getKey();
String metricValue = cacheStatsEntry.getValue();
TRow trow = new TRow();
trow.addToColumnValue(new TCell().setStringVal(catalog.getName())); // CATALOG_NAME
trow.addToColumnValue(new TCell().setStringVal(cacheName)); // CACHE_NAME
trow.addToColumnValue(new TCell().setStringVal(metricName)); // METRIC_NAME
trow.addToColumnValue(new TCell().setStringVal(metricValue)); // METRIC_VALUE
dataBatch.add(trow);
}
if (catalogIf instanceof HMSExternalCatalog) {
HMSExternalCatalog catalog = (HMSExternalCatalog) catalogIf;
// 1. hive metastore cache
HiveMetaStoreCache cache = mgr.getMetaStoreCache(catalog);
if (cache != null) {
fillBatch(dataBatch, cache.getStats(), catalog.getName());
}
// 2. hudi cache
HudiCachedPartitionProcessor processor
= (HudiCachedPartitionProcessor) mgr.getHudiPartitionProcess(catalog);
fillBatch(dataBatch, processor.getCacheStats(), catalog.getName());
} else if (catalogIf instanceof IcebergExternalCatalog) {
// 3. iceberg cache
IcebergMetadataCache icebergCache = mgr.getIcebergMetadataCache();
fillBatch(dataBatch, icebergCache.getCacheStats(), catalogIf.getName());
}
}

result.setDataBatch(dataBatch);
result.setStatus(new TStatus(TStatusCode.OK));
return result;
}

private static void fillBatch(List<TRow> dataBatch, Map<String, Map<String, String>> stats,
String catalogName) {
for (Map.Entry<String, Map<String, String>> entry : stats.entrySet()) {
String cacheName = entry.getKey();
Map<String, String> cacheStats = entry.getValue();
for (Map.Entry<String, String> cacheStatsEntry : cacheStats.entrySet()) {
String metricName = cacheStatsEntry.getKey();
String metricValue = cacheStatsEntry.getValue();
TRow trow = new TRow();
trow.addToColumnValue(new TCell().setStringVal(catalogName)); // CATALOG_NAME
trow.addToColumnValue(new TCell().setStringVal(cacheName)); // CACHE_NAME
trow.addToColumnValue(new TCell().setStringVal(metricName)); // METRIC_NAME
trow.addToColumnValue(new TCell().setStringVal(metricValue)); // METRIC_VALUE
dataBatch.add(trow);
}
}
}
}

0 comments on commit 75afcfd

Please sign in to comment.