From 933f46d3f2565e69b54bf300c9162ab31c8338da Mon Sep 17 00:00:00 2001 From: Puneet Saraswat <61435908+saraswatpuneet@users.noreply.github.com> Date: Wed, 7 Aug 2024 14:40:58 -0500 Subject: [PATCH 1/3] Bug 340: Content hash is out of spec in announcements (#343) # Problem Content watcher bug #340 in calculating `contentHash`, ## Details - [x] Updated content-watcher to calculate correct contentHash - [x] Updated content-watcher - [x] Update app template (if needed): Note: social app template simply uses contentHash as key and do not really do any hash validation so no updates necessary Closes: #340 # Solution What I/we did to solve this problem Fixed by following DSNP spec and hashing the bytes of content in line with DSNP --------- Co-authored-by: Joe Caputo Co-authored-by: Wil Wade --- .../dsnp.announcement.processor.ts | 10 ++++- .../libs/common/src/utils/ipfs.client.spec.ts | 43 +++++++++++++++++++ .../libs/common/src/utils/ipfs.client.ts | 14 +++--- .../libs/common/src/ipfs/ipfs.dsnp.ts | 12 +++--- .../libs/common/src/utils/ipfs.client.ts | 14 +++--- 5 files changed, 71 insertions(+), 22 deletions(-) create mode 100644 services/content-publishing/libs/common/src/utils/ipfs.client.spec.ts diff --git a/services/content-publishing/apps/worker/src/request_processor/dsnp.announcement.processor.ts b/services/content-publishing/apps/worker/src/request_processor/dsnp.announcement.processor.ts index f7a4064b..3baecd2b 100644 --- a/services/content-publishing/apps/worker/src/request_processor/dsnp.announcement.processor.ts +++ b/services/content-publishing/apps/worker/src/request_processor/dsnp.announcement.processor.ts @@ -179,7 +179,10 @@ export class DsnpAnnouncementProcessor { attachment: attachments, }); const noteString = JSON.stringify(note); - const [cid, hash] = await this.pinBufferToIPFS(Buffer.from(noteString)); + const toUint8Array = new TextEncoder(); + const encoded = toUint8Array.encode(noteString); + + const [cid, hash] = await this.pinBufferToIPFS(Buffer.from(encoded)); const ipfsUrl = this.formIpfsUrl(cid); return [cid, ipfsUrl, hash]; } @@ -397,8 +400,11 @@ export class DsnpAnnouncementProcessor { icon: attachments, tag: this.prepareTags(content.profile.tag), }; + const toUint8Array = new TextEncoder(); const profileString = JSON.stringify(profileActivity); - const [cid, hash] = await this.pinBufferToIPFS(Buffer.from(profileString)); + const profileEncoded = toUint8Array.encode(profileString); + + const [cid, hash] = await this.pinBufferToIPFS(Buffer.from(profileEncoded)); return createProfile(dsnpUserId, this.formIpfsUrl(cid), hash); } diff --git a/services/content-publishing/libs/common/src/utils/ipfs.client.spec.ts b/services/content-publishing/libs/common/src/utils/ipfs.client.spec.ts new file mode 100644 index 00000000..22138a51 --- /dev/null +++ b/services/content-publishing/libs/common/src/utils/ipfs.client.spec.ts @@ -0,0 +1,43 @@ +import { Test, TestingModule } from '@nestjs/testing'; +import { IpfsService } from './ipfs.client'; +import { ConfigService } from '#libs/config'; +import { Logger } from '@nestjs/common'; + +jest.mock('axios'); + +describe('IpfsService Tests', () => { + let service: IpfsService; + let configService: ConfigService; + let logger: Logger; + + beforeEach(async () => { + const module: TestingModule = await Test.createTestingModule({ + providers: [ + IpfsService, + { + provide: ConfigService, + useValue: { + ipfsEndpoint: 'http://localhost:5001', + ipfsBasicAuthUser: '', + ipfsBasicAuthSecret: '', + ipfsGatewayUrl: 'http://localhost:8080/ipfs/[CID]', + }, + }, + ], + }).compile(); + + service = module.get(IpfsService); + configService = module.get(ConfigService); + logger = new Logger(IpfsService.name); + }); + + + it('should be defined', () => { + expect(service).toBeDefined(); + }); + + it("hashes blake2b correctly ABC", async () => { + const mb = await service.ipfsHashBuffer(Buffer.from("abc")); + expect(mb).toMatch("bciqlu6awx6hqdt7kifaubxs5vyrchmadmgrzmf32ts2bb73b6iablli"); + }); +}); \ No newline at end of file diff --git a/services/content-publishing/libs/common/src/utils/ipfs.client.ts b/services/content-publishing/libs/common/src/utils/ipfs.client.ts index 65fd2d0f..7630c562 100644 --- a/services/content-publishing/libs/common/src/utils/ipfs.client.ts +++ b/services/content-publishing/libs/common/src/utils/ipfs.client.ts @@ -5,10 +5,9 @@ import axios from 'axios'; import FormData from 'form-data'; import { extension as getExtension } from 'mime-types'; import { CID } from 'multiformats/cid'; -import { blake2b256 as hasher } from '@multiformats/blake2/blake2b'; -import { create } from 'multiformats/hashes/digest'; +import { sha256 } from "multiformats/hashes/sha2"; import { randomUUID } from 'crypto'; -import { base58btc } from 'multiformats/bases/base58'; +import { base32 } from 'multiformats/bases/base32'; import { ConfigService } from '#libs/config'; export interface FilePin { @@ -127,11 +126,12 @@ export class IpfsService { return response && response.data && JSON.stringify(response.data).indexOf(v0Cid) >= 0; } - private async ipfsHashBuffer(fileBuffer: Buffer): Promise { + public async ipfsHashBuffer(fileBuffer: Buffer): Promise { + // Hash with sha256 + // Encode with base32 this.logger.debug(`Hashing file buffer with length: ${fileBuffer.length}`); - const hashed = await hasher.digest(fileBuffer); - const hash = create(hasher.code, hashed.bytes); - return base58btc.encode(hash.bytes); + const hash = await sha256.digest(fileBuffer); + return base32.encode(hash.bytes); } public ipfsUrl(cid: string): string { diff --git a/services/content-watcher/libs/common/src/ipfs/ipfs.dsnp.ts b/services/content-watcher/libs/common/src/ipfs/ipfs.dsnp.ts index 6b2f4987..47316c89 100644 --- a/services/content-watcher/libs/common/src/ipfs/ipfs.dsnp.ts +++ b/services/content-watcher/libs/common/src/ipfs/ipfs.dsnp.ts @@ -98,7 +98,7 @@ export class IPFSContentProcessor extends BaseConsumer { if (isBroadcast(mapRecord)) { announcementResponse.announcement = { fromId: mapRecord.fromId, - contentHash: bases.base16.encode(mapRecord.contentHash as never), + contentHash: mapRecord.contentHash, url: mapRecord.url, announcementType: mapRecord.announcementType, }; @@ -108,7 +108,7 @@ export class IPFSContentProcessor extends BaseConsumer { announcementResponse.announcement = { fromId: mapRecord.fromId, targetAnnouncementType: mapRecord.targetAnnouncementType, - targetContentHash: bases.base58btc.encode(mapRecord.targetContentHash as any), + targetContentHash: mapRecord.targetContentHash, announcementType: mapRecord.announcementType, }; queue = this.tombstoneQueue; @@ -129,7 +129,7 @@ export class IPFSContentProcessor extends BaseConsumer { announcementType: mapRecord.announcementType, url: mapRecord.url, inReplyTo: mapRecord.inReplyTo, - contentHash: bases.base58btc.encode(mapRecord.contentHash as any), + contentHash: mapRecord.contentHash, }; queue = this.replyQueue; typeName = 'Reply'; @@ -138,7 +138,7 @@ export class IPFSContentProcessor extends BaseConsumer { fromId: mapRecord.fromId, announcementType: mapRecord.announcementType, url: mapRecord.url, - contentHash: bases.base58btc.encode(mapRecord.contentHash as any), + contentHash: mapRecord.contentHash, }; queue = this.profileQueue; typeName = 'Profile'; @@ -147,9 +147,9 @@ export class IPFSContentProcessor extends BaseConsumer { fromId: mapRecord.fromId, announcementType: mapRecord.announcementType, url: mapRecord.url, - contentHash: bases.base58btc.encode(mapRecord.contentHash as any), + contentHash: mapRecord.contentHash, targetAnnouncementType: mapRecord.targetAnnouncementType, - targetContentHash: bases.base58btc.encode(mapRecord.targetContentHash as any), + targetContentHash: mapRecord.targetContentHash, }; queue = this.updateQueue; typeName = 'Update'; diff --git a/services/content-watcher/libs/common/src/utils/ipfs.client.ts b/services/content-watcher/libs/common/src/utils/ipfs.client.ts index 76bba229..613888ee 100644 --- a/services/content-watcher/libs/common/src/utils/ipfs.client.ts +++ b/services/content-watcher/libs/common/src/utils/ipfs.client.ts @@ -5,11 +5,10 @@ import axios from 'axios'; import FormData from 'form-data'; import { extension as getExtension } from 'mime-types'; import { CID } from 'multiformats/cid'; -import { blake2b256 as hasher } from '@multiformats/blake2/blake2b'; -import { create } from 'multiformats/hashes/digest'; +import { sha256 } from 'multiformats/hashes/sha2'; import { randomUUID } from 'crypto'; -import { base58btc } from 'multiformats/bases/base58'; import { AppConfigService } from '../config/config.service'; +import { base32 } from 'multiformats/bases/base32'; export interface FilePin { cid: string; @@ -127,11 +126,12 @@ export class IpfsService { return response && response.data && JSON.stringify(response.data).indexOf(v0Cid) >= 0; } - private async ipfsHashBuffer(fileBuffer: Buffer): Promise { + public async ipfsHashBuffer(fileBuffer: Buffer): Promise { + // Hash with sha256 + // Encode with base32 this.logger.debug(`Hashing file buffer with length: ${fileBuffer.length}`); - const hashed = await hasher.digest(fileBuffer); - const hash = create(hasher.code, hashed.bytes); - return base58btc.encode(hash.bytes); + const hash = await sha256.digest(fileBuffer); + return base32.encode(hash.bytes); } public ipfsUrl(cid: string): string { From 669e73ab1b727b6254e24e7047337bae74cb0bd5 Mon Sep 17 00:00:00 2001 From: Matthew Orris <1466844+mattheworris@users.noreply.github.com> Date: Wed, 7 Aug 2024 17:36:22 -0400 Subject: [PATCH 2/3] bug: Make sure blockCount is not longer than the current chain length (#348) # Problem When running a local development frequency node, the chain length will be quite short. This results in trying to scan a blocklist that includes negative numbers, which causes the scan to fail and no previous content to be found. # Solution ```javascript // Make sure blockCount is not longer than the current chain length if (job.data.blockCount >= startBlock) { job.data.blockCount = startBlock; } ``` ## Steps to Verify: 1. Run SAT with a local frequency node and verify that the initial scan completes correctly. --- .../libs/common/src/crawler/crawler.service.ts | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/services/content-watcher/libs/common/src/crawler/crawler.service.ts b/services/content-watcher/libs/common/src/crawler/crawler.service.ts index 6bec8225..9e180cc6 100644 --- a/services/content-watcher/libs/common/src/crawler/crawler.service.ts +++ b/services/content-watcher/libs/common/src/crawler/crawler.service.ts @@ -37,6 +37,10 @@ export class CrawlerService extends BaseConsumer { job.data.startBlock = startBlock; this.logger.debug(`No starting block specified; starting from end of chain at block ${startBlock}`); } + // Make sure blockCount is not longer than the current chain length + if (job.data.blockCount >= startBlock) { + job.data.blockCount = startBlock; + } let blockList = new Array(job.data.blockCount).fill(0).map((_v, index) => startBlock - index); blockList.reverse(); From c4372bfb5be748e7fc07511a7c3917eb43fe2584 Mon Sep 17 00:00:00 2001 From: Matthew Orris <1466844+mattheworris@users.noreply.github.com> Date: Thu, 8 Aug 2024 16:03:14 -0400 Subject: [PATCH 3/3] Namespace Redis Connections (#349) We want to allow for all services to use the same Redis. - Make sure each service has a different Redis connection namespace for non-bull redis connections. - Make sure all Bull Queues have unique names via prefixing the queues - Note this as a BREAKING change Services: - [x] Graph Service - [x] Account Service - [x] Content Publishing Service - [x] Content Watcher Service Closes #189 --- services/account/ENVIRONMENT.md | 1 + services/account/apps/api/src/api.module.ts | 1 + services/account/apps/worker/src/worker.module.ts | 1 + services/account/env.template | 3 +++ .../account/libs/common/src/config/config.service.spec.ts | 5 +++++ services/account/libs/common/src/config/config.service.ts | 5 +++++ services/account/libs/common/src/config/env.config.ts | 1 + services/account/libs/common/src/queues/queues.module.ts | 1 + services/content-publishing/ENVIRONMENT.md | 1 + services/content-publishing/apps/api/src/api.module.ts | 5 ++++- services/content-publishing/apps/worker/src/worker.module.ts | 2 +- services/content-publishing/env.template | 3 +++ .../libs/common/src/config/config.service.spec.ts | 5 +++++ .../libs/common/src/config/config.service.ts | 5 +++++ .../content-publishing/libs/common/src/config/env.config.ts | 1 + .../libs/common/src/queues/queues.module.ts | 1 + services/graph/ENVIRONMENT.md | 2 +- services/graph/libs/common/src/config/env.config.ts | 2 +- 18 files changed, 41 insertions(+), 4 deletions(-) diff --git a/services/account/ENVIRONMENT.md b/services/account/ENVIRONMENT.md index 511b7914..4022caf5 100644 --- a/services/account/ENVIRONMENT.md +++ b/services/account/ENVIRONMENT.md @@ -7,6 +7,7 @@ This application recognizes the following environment variables: | `API_PORT` | HTTP port that the application listens on | 1025 - 65535 | | 3000 | | `BLOCKCHAIN_SCAN_INTERVAL_SECONDS` | How many seconds to delay between successive scans of the chain for new content (after end of chain is reached) | > 0 | | 12 | | `TRUST_UNFINALIZED_BLOCKS` | Whether to examine blocks that have not been finalized when tracking extrinsic completion | boolean | | false | +| `CACHE_KEY_PREFIX` | Prefix to use for Redis cache keys | string | | account: | | `CAPACITY_LIMIT` | Maximum amount of provider capacity this app is allowed to use (per epoch) type: 'percentage' 'amount' value: number (may be percentage, ie '80', or absolute amount of capacity) | JSON [(example)](./env.template) | Y | | | `FREQUENCY_URL` | Blockchain node address | http(s): or ws(s): URL | Y | | | `FREQUENCY_HTTP_URL` | Blockchain node address resolvable from the client browser | http(s): URL | Y | | diff --git a/services/account/apps/api/src/api.module.ts b/services/account/apps/api/src/api.module.ts index c93de1c7..8da7c8ec 100644 --- a/services/account/apps/api/src/api.module.ts +++ b/services/account/apps/api/src/api.module.ts @@ -50,6 +50,7 @@ import { ApiService, AccountsService, HandlesService, DelegationService, KeysSer config: [ { url: configService.redisUrl.toString(), + keyPrefix: configService.cacheKeyPrefix, maxRetriesPerRequest: null, onClientCreated(client) { redisEventsToEventEmitter(client, eventEmitter); diff --git a/services/account/apps/worker/src/worker.module.ts b/services/account/apps/worker/src/worker.module.ts index b296d4a2..6c314ab0 100644 --- a/services/account/apps/worker/src/worker.module.ts +++ b/services/account/apps/worker/src/worker.module.ts @@ -41,6 +41,7 @@ import { TransactionPublisherModule } from './transaction_publisher/publisher.mo config: [ { url: configService.redisUrl.toString(), + keyPrefix: configService.cacheKeyPrefix, maxRetriesPerRequest: null, onClientCreated(client) { redisEventsToEventEmitter(client, eventEmitter); diff --git a/services/account/env.template b/services/account/env.template index cee27b7d..dcd328ac 100644 --- a/services/account/env.template +++ b/services/account/env.template @@ -66,3 +66,6 @@ SIWF_DOMAIN=localhost # Enable debug mode for development DEBUG=true + +# Prefix to use for Redis cache keys +CACHE_KEY_PREFIX=account: diff --git a/services/account/libs/common/src/config/config.service.spec.ts b/services/account/libs/common/src/config/config.service.spec.ts index f30650b1..154ae774 100644 --- a/services/account/libs/common/src/config/config.service.spec.ts +++ b/services/account/libs/common/src/config/config.service.spec.ts @@ -54,6 +54,7 @@ describe('AccountSericeConfig', () => { HEALTH_CHECK_MAX_RETRY_INTERVAL_SECONDS: undefined, HEALTH_CHECK_MAX_RETRIES: undefined, CAPACITY_LIMIT: undefined, + CACHE_KEY_PREFIX: undefined, }; beforeAll(() => { @@ -226,5 +227,9 @@ describe('AccountSericeConfig', () => { it('should get capacity limit', () => { expect(accountServiceConfig.capacityLimit).toStrictEqual(JSON.parse(ALL_ENV.CAPACITY_LIMIT!)); }); + + it('should get cache key prefix', () => { + expect(accountServiceConfig.cacheKeyPrefix).toStrictEqual(ALL_ENV.CACHE_KEY_PREFIX?.toString()); + }); }); }); diff --git a/services/account/libs/common/src/config/config.service.ts b/services/account/libs/common/src/config/config.service.ts index 5017cafd..59eea745 100644 --- a/services/account/libs/common/src/config/config.service.ts +++ b/services/account/libs/common/src/config/config.service.ts @@ -21,6 +21,7 @@ export interface ConfigEnvironmentVariables { HEALTH_CHECK_MAX_RETRY_INTERVAL_SECONDS: number; HEALTH_CHECK_MAX_RETRIES: number; CAPACITY_LIMIT: string; + CACHE_KEY_PREFIX: string; } /// Config service to get global app and provider-specific config values. @@ -32,6 +33,10 @@ export class ConfigService { this.capacityLimitObj = JSON.parse(this.nestConfigService.get('CAPACITY_LIMIT')!); } + public get cacheKeyPrefix(): string { + return this.nestConfigService.get('CACHE_KEY_PREFIX')!; + } + public get blockchainScanIntervalSeconds(): number { return this.nestConfigService.get('BLOCKCHAIN_SCAN_INTERVAL_SECONDS') ?? 12; } diff --git a/services/account/libs/common/src/config/env.config.ts b/services/account/libs/common/src/config/env.config.ts index 4465afbc..8b9f8cb2 100644 --- a/services/account/libs/common/src/config/env.config.ts +++ b/services/account/libs/common/src/config/env.config.ts @@ -4,6 +4,7 @@ import { ConfigModuleOptions } from '@nestjs/config'; export const configModuleOptions: ConfigModuleOptions = { isGlobal: true, validationSchema: Joi.object({ + CACHE_KEY_PREFIX: Joi.string().default('account:'), BLOCKCHAIN_SCAN_INTERVAL_SECONDS: Joi.number().min(1).default(12), TRUST_UNFINALIZED_BLOCKS: Joi.bool().default(false), REDIS_URL: Joi.string().uri().required(), diff --git a/services/account/libs/common/src/queues/queues.module.ts b/services/account/libs/common/src/queues/queues.module.ts index 4eacfa32..c2811bef 100644 --- a/services/account/libs/common/src/queues/queues.module.ts +++ b/services/account/libs/common/src/queues/queues.module.ts @@ -28,6 +28,7 @@ export class QueueModule { imports: [ConfigModule], useFactory: (configService: ConfigService) => ({ connection: new Redis(configService.redisUrl.toString(), redisOptions || {}), + prefix: `${configService.cacheKeyPrefix}:bull`, }), inject: [ConfigService], }), diff --git a/services/content-publishing/ENVIRONMENT.md b/services/content-publishing/ENVIRONMENT.md index 32e20363..9c4c598d 100644 --- a/services/content-publishing/ENVIRONMENT.md +++ b/services/content-publishing/ENVIRONMENT.md @@ -5,6 +5,7 @@ This application recognizes the following environment variables: | Name | Description | Range/Type | Required? | Default | | ----------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | :--------------------------------: | :----------: | :-----: | | `API_PORT` | HTTP port that the application listens on | 1025 - 65535 | | 3000 | +| `CACHE_KEY_PREFIX` | Prefix to use for Redis cache keys | string | | content-publishing-service: | | `CAPACITY_LIMIT` | Maximum amount of provider capacity this app is allowed to use (per epoch) type: 'percentage' 'amount' value: number (may be percentage, ie '80', or absolute amount of capacity) | JSON [(example)](./env.template) | Y | | | `FREQUENCY_URL` | Blockchain node address | http(s): or ws(s): URL | Y | | | `PROVIDER_ACCOUNT_SEED_PHRASE` | Seed phrase for provider MSA control key | string | Y | | diff --git a/services/content-publishing/apps/api/src/api.module.ts b/services/content-publishing/apps/api/src/api.module.ts index 3df00f97..7080ff83 100644 --- a/services/content-publishing/apps/api/src/api.module.ts +++ b/services/content-publishing/apps/api/src/api.module.ts @@ -21,7 +21,10 @@ import { AssetControllerV1, ContentControllerV1, ProfileControllerV1 } from './c { imports: [ConfigModule], useFactory: (configService: ConfigService) => ({ - config: [{ url: configService.redisUrl.toString() }], + config: [{ + url: configService.redisUrl.toString(), + keyPrefix: configService.cacheKeyPrefix, + }], }), inject: [ConfigService], }, diff --git a/services/content-publishing/apps/worker/src/worker.module.ts b/services/content-publishing/apps/worker/src/worker.module.ts index f7fd8da7..3197fe85 100644 --- a/services/content-publishing/apps/worker/src/worker.module.ts +++ b/services/content-publishing/apps/worker/src/worker.module.ts @@ -19,7 +19,7 @@ import { RequestProcessorModule } from './request_processor/request.processor.mo { imports: [ConfigModule], useFactory: (configService: ConfigService) => ({ - config: [{ url: configService.redisUrl.toString() }], + config: [{ url: configService.redisUrl.toString(), keyPrefix: configService.cacheKeyPrefix }], }), inject: [ConfigService], }, diff --git a/services/content-publishing/env.template b/services/content-publishing/env.template index 02bc678b..cad8d851 100644 --- a/services/content-publishing/env.template +++ b/services/content-publishing/env.template @@ -49,3 +49,6 @@ BATCH_MAX_COUNT=1000 # Base delay in seconds used for exponential backoff while waiting for # uploaded assets to be verified available before publishing a content notice. ASSET_UPLOAD_VERIFICATION_DELAY_SECONDS=5 + +# Prefix to use for Redis cache keys +CACHE_KEY_PREFIX=content-publishing-service: diff --git a/services/content-publishing/libs/common/src/config/config.service.spec.ts b/services/content-publishing/libs/common/src/config/config.service.spec.ts index 9be0f138..ed058d65 100644 --- a/services/content-publishing/libs/common/src/config/config.service.spec.ts +++ b/services/content-publishing/libs/common/src/config/config.service.spec.ts @@ -51,6 +51,7 @@ describe('ContentPublishingConfigService', () => { BATCH_INTERVAL_SECONDS: undefined, BATCH_MAX_COUNT: undefined, ASSET_UPLOAD_VERIFICATION_DELAY_SECONDS: undefined, + CACHE_KEY_PREFIX: undefined, }; beforeAll(() => { @@ -198,5 +199,9 @@ describe('ContentPublishingConfigService', () => { it('should get batch max count', () => { expect(contentPublishingConfigService.batchMaxCount).toStrictEqual(parseInt(ALL_ENV.BATCH_MAX_COUNT as string, 10)); }); + + it('should get cache key prefix', () => { + expect(contentPublishingConfigService.cacheKeyPrefix).toStrictEqual(ALL_ENV.CACHE_KEY_PREFIX?.toString()); + }); }); }); diff --git a/services/content-publishing/libs/common/src/config/config.service.ts b/services/content-publishing/libs/common/src/config/config.service.ts index bb626254..686a9cff 100644 --- a/services/content-publishing/libs/common/src/config/config.service.ts +++ b/services/content-publishing/libs/common/src/config/config.service.ts @@ -20,6 +20,7 @@ export interface ConfigEnvironmentVariables { BATCH_INTERVAL_SECONDS: number; BATCH_MAX_COUNT: number; ASSET_UPLOAD_VERIFICATION_DELAY_SECONDS: number; + CACHE_KEY_PREFIX: string; } /// Config service to get global app and provider-specific config values. @@ -31,6 +32,10 @@ export class ConfigService { this.capacityLimit = JSON.parse(nestConfigService.get('CAPACITY_LIMIT')!); } + public get cacheKeyPrefix(): string { + return this.nestConfigService.get('CACHE_KEY_PREFIX')!; + } + public get environment(): ChainEnvironment { return this.nestConfigService.get('CHAIN_ENVIRONMENT')!; } diff --git a/services/content-publishing/libs/common/src/config/env.config.ts b/services/content-publishing/libs/common/src/config/env.config.ts index b22d3f4c..b7634d8e 100644 --- a/services/content-publishing/libs/common/src/config/env.config.ts +++ b/services/content-publishing/libs/common/src/config/env.config.ts @@ -59,5 +59,6 @@ export const configModuleOptions: ConfigModuleOptions = { return value; }) .required(), + CACHE_KEY_PREFIX: Joi.string().default('content-publishing:'), }), }; diff --git a/services/content-publishing/libs/common/src/queues/queues.module.ts b/services/content-publishing/libs/common/src/queues/queues.module.ts index e34ec268..0778b2a4 100644 --- a/services/content-publishing/libs/common/src/queues/queues.module.ts +++ b/services/content-publishing/libs/common/src/queues/queues.module.ts @@ -24,6 +24,7 @@ import * as QueueConstants from './queue.constants'; password: password || undefined, db: pathname?.length > 1 ? Number(pathname.slice(1)) : undefined, }, + prefix: `${configService.cacheKeyPrefix}:bull`, }; }, inject: [ConfigService], diff --git a/services/graph/ENVIRONMENT.md b/services/graph/ENVIRONMENT.md index 12b31ba5..6e2baf3a 100644 --- a/services/graph/ENVIRONMENT.md +++ b/services/graph/ENVIRONMENT.md @@ -5,7 +5,7 @@ This application recognizes the following environment variables: | Name | Description | Range/Type | Required? | Default | | ----------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | :--------------------------------: | :----------: | :-----: | | `API_PORT` | HTTP port that the application listens on | 1025 - 65535 | | 3000 | -| `CACHE_KEY_PREFIX` | Prefix to use for Redis cache keys | string | | content-watcher: | +| `CACHE_KEY_PREFIX` | Prefix to use for Redis cache keys | string | | graph: | | `CAPACITY_LIMIT` | Maximum amount of provider capacity this app is allowed to use (per epoch) type: 'percentage' 'amount' value: number (may be percentage, ie '80', or absolute amount of capacity) | JSON [(example)](./env.template) | Y | | |`DEBOUNCE_SECONDS`|Number of seconds to retain pending graph updates in the Redis cache to avoid redundant fetches from the chain|>= 0||| | > 0 | | 100 | | `FREQUENCY_URL` | Blockchain node address | http(s): or ws(s): URL | Y | | diff --git a/services/graph/libs/common/src/config/env.config.ts b/services/graph/libs/common/src/config/env.config.ts index 7856d6fa..12dc333e 100644 --- a/services/graph/libs/common/src/config/env.config.ts +++ b/services/graph/libs/common/src/config/env.config.ts @@ -4,7 +4,7 @@ import { ConfigModuleOptions } from '@nestjs/config'; export const configModuleOptions: ConfigModuleOptions = { isGlobal: true, validationSchema: Joi.object({ - CACHE_KEY_PREFIX: Joi.string().default('graph-service:'), + CACHE_KEY_PREFIX: Joi.string().default('graph:'), REDIS_URL: Joi.string().uri().required(), FREQUENCY_URL: Joi.string().uri().required(), QUEUE_HIGH_WATER: Joi.number().min(100).default(1000),