diff --git a/packages/dapi-grpc/clients/platform/v0/nodejs/PlatformPromiseClient.js b/packages/dapi-grpc/clients/platform/v0/nodejs/PlatformPromiseClient.js index b2a6d25bb2..a6190b4ac7 100644 --- a/packages/dapi-grpc/clients/platform/v0/nodejs/PlatformPromiseClient.js +++ b/packages/dapi-grpc/clients/platform/v0/nodejs/PlatformPromiseClient.js @@ -62,6 +62,8 @@ const { GetTotalCreditsInPlatformResponse: PBJSGetTotalCreditsInPlatformResponse, GetStatusRequest: PBJSGetStatusRequest, GetStatusResponse: PBJSGetStatusResponse, + GetIdentityBalanceRequest: PBJSGetIdentityBalanceRequest, + GetIdentityBalanceResponse: PBJSGetIdentityBalanceResponse, }, }, }, @@ -88,6 +90,7 @@ const { GetIdentityKeysResponse: ProtocGetIdentityKeysResponse, GetTotalCreditsInPlatformResponse: ProtocGetTotalCreditsInPlatformResponse, GetStatusResponse: ProtocGetStatusResponse, + GetIdentityBalanceResponse: ProtocGetIdentityBalanceResponse, } = require('./platform_protoc'); const getPlatformDefinition = require('../../../../lib/getPlatformDefinition'); @@ -186,6 +189,10 @@ class PlatformPromiseClient { this.client.getStatus.bind(this.client), ); + this.client.getIdentityBalance = promisify( + this.client.getIdentityBalance.bind(this.client), + ); + this.protocolVersion = undefined; } @@ -762,6 +769,35 @@ class PlatformPromiseClient { ); } + getIdentityBalance( + getIdentityBalanceRequest, + metadata = {}, + options = {}, + ) { + if (!isObject(metadata)) { + throw new Error('metadata must be an object'); + } + + return this.client.getIdentityBalance( + getIdentityBalanceRequest, + convertObjectToMetadata(metadata), + { + interceptors: [ + jsonToProtobufInterceptorFactory( + jsonToProtobufFactory( + ProtocGetIdentityBalanceResponse, + PBJSGetIdentityBalanceResponse, + ), + protobufToJsonFactory( + PBJSGetIdentityBalanceRequest, + ), + ), + ], + ...options, + }, + ); + } + /** * @param {string} protocolVersion */ diff --git a/packages/dapi-grpc/test/unit/clients/platform/v0/nodejs/PlatformPromiseClient.spec.js b/packages/dapi-grpc/test/unit/clients/platform/v0/nodejs/PlatformPromiseClient.spec.js index 5987cbfd0f..fc3eb575fa 100644 --- a/packages/dapi-grpc/test/unit/clients/platform/v0/nodejs/PlatformPromiseClient.spec.js +++ b/packages/dapi-grpc/test/unit/clients/platform/v0/nodejs/PlatformPromiseClient.spec.js @@ -22,6 +22,7 @@ describe('PlatformPromiseClient', () => { getIdentityContractNonce: this.sinon.stub().resolves(response), getIdentityNonce: this.sinon.stub().resolves(response), getIdentityKeys: this.sinon.stub().resolves(response), + getIdentityBalance: this.sinon.stub().resolves(response), }; }); @@ -170,4 +171,14 @@ describe('PlatformPromiseClient', () => { .to.be.calledOnceWith(request); }); }); + + describe('#getIdentityBalance', () => { + it('should get identity balance', async () => { + const result = await platformPromiseClient.getIdentityBalance(request); + + expect(result).to.equal(response); + expect(platformPromiseClient.client.getIdentityBalance) + .to.be.calledOnceWith(request); + }); + }); }); diff --git a/packages/dashmate/README.md b/packages/dashmate/README.md index 85d8106697..96f04dc39b 100644 --- a/packages/dashmate/README.md +++ b/packages/dashmate/README.md @@ -19,6 +19,7 @@ Distribution package for Dash node installation - [Restart node](#restart-node) - [Show node status](#show-node-status) - [Execute Core CLI command](#execute-core-cli-command) + - [Doctor](#doctor) - [Reset node data](#reset-node-data) - [Full node](#full-node) - [Node groups](#node-groups) @@ -277,6 +278,55 @@ $ dashmate core cli "getblockcount" 1337 ``` +### Doctor + +The `doctor` command analyzes the node configuration and state to provide a list of potential problems and solutions. + +``` +Dashmate node diagnostic. Bring your node to the doctor + +USAGE + $ dashmate doctor [--config ] [-v] [-s ] + +FLAGS + -s, --samples= path to the samples archive + -v, --verbose use verbose mode for output + --config= configuration name to use + +DESCRIPTION + Dashmate node diagnostic. Bring your node to the doctor + +COMMANDS + doctor report Dashmate node diagnostic report +``` + +The `doctor report` command collects all useful debugging info into a `.tar.gz` archive in your current working directory. + +The archive will include: + +- System information +- The node configuration +- Service logs, metrics and status + +Collected data will not contain any private information which is already not available publicly. +All sensitive data like private keys or passwords is obfuscated. + +``` +Dashmate node diagnostic report + +USAGE + $ dashmate doctor report [--config ] [-v] + +FLAGS + -v, --verbose use verbose mode for output + --config= configuration name to use + +DESCRIPTION + Dashmate node diagnostic report + + The command collects diagnostic information and creates an obfuscated archive for further investigation +``` + ### Reset node data The `reset` command removes all data corresponding to the specified config and allows you to start a node from scratch. @@ -340,29 +390,6 @@ DESCRIPTION Reindex Core data ``` -### Full node -It is also possible to start a full node instead of a masternode. Modify the config setting as follows: -```bash -dashmate config set core.masternode.enable false -``` -### Doctor - -The `doctor` command collects all useful debugging info into a .tar archive in your current working directory. - -Archive will contain all core and platform debugging data and logs for each running service. - -``` -USAGE - $ dashmate doctor [--config ] [-v] - -FLAGS - -v, --verbose use verbose mode for output - --config= configuration name to use - -DESCRIPTION - Generate a report about masternode -``` - ### Full node It is also possible to start a full node instead of a masternode. Modify the config setting as follows: ```bash diff --git a/packages/dashmate/configs/defaults/getBaseConfigFactory.js b/packages/dashmate/configs/defaults/getBaseConfigFactory.js index 3d689e1b78..534dcffb7f 100644 --- a/packages/dashmate/configs/defaults/getBaseConfigFactory.js +++ b/packages/dashmate/configs/defaults/getBaseConfigFactory.js @@ -14,7 +14,7 @@ const { version } = JSON.parse(fs.readFileSync(path.join(PACKAGE_ROOT_DIR, 'pack * @param {HomeDir} homeDir * @returns {getBaseConfig} */ -export default function getBaseConfigFactory(homeDir) { +export default function getBaseConfigFactory() { const prereleaseTag = semver.prerelease(version) === null ? '' : `-${semver.prerelease(version)[0]}`; const dockerImageVersion = `${semver.major(version)}${prereleaseTag}`; @@ -134,12 +134,17 @@ export default function getBaseConfigFactory(homeDir) { }, }, log: { - file: { - categories: [], - path: homeDir.joinPath('logs', 'base', 'core.log'), + filePath: null, + debug: { + enabled: false, + ips: false, + sourceLocations: false, + threadNames: false, + timeMicros: false, + includeOnly: [], + exclude: [], }, }, - logIps: 0, indexes: [], }, platform: { diff --git a/packages/dashmate/configs/defaults/getMainnetConfigFactory.js b/packages/dashmate/configs/defaults/getMainnetConfigFactory.js index f4ed9aa785..0f83c4a7a1 100644 --- a/packages/dashmate/configs/defaults/getMainnetConfigFactory.js +++ b/packages/dashmate/configs/defaults/getMainnetConfigFactory.js @@ -26,13 +26,6 @@ export default function getMainnetConfigFactory(homeDir, getBaseConfig) { subnet: '172.26.24.0/24', }, }, - core: { - log: { - file: { - path: homeDir.joinPath('logs', 'mainnet', 'core.log'), - }, - }, - }, network: NETWORK_MAINNET, platform: { enable: false, diff --git a/packages/dashmate/configs/defaults/getTestnetConfigFactory.js b/packages/dashmate/configs/defaults/getTestnetConfigFactory.js index a818991202..a8aec0152b 100644 --- a/packages/dashmate/configs/defaults/getTestnetConfigFactory.js +++ b/packages/dashmate/configs/defaults/getTestnetConfigFactory.js @@ -30,11 +30,6 @@ export default function getTestnetConfigFactory(homeDir, getBaseConfig) { rpc: { port: 19998, }, - log: { - file: { - path: homeDir.joinPath('logs', 'testnet', 'core.log'), - }, - }, spork: { address: 'yjPtiKh2uwk3bDutTEA2q9mCtXyiZRWn55', }, diff --git a/packages/dashmate/configs/getConfigFileMigrationsFactory.js b/packages/dashmate/configs/getConfigFileMigrationsFactory.js index a3bcf67897..6953fbfa07 100644 --- a/packages/dashmate/configs/getConfigFileMigrationsFactory.js +++ b/packages/dashmate/configs/getConfigFileMigrationsFactory.js @@ -853,11 +853,36 @@ export default function getConfigFileMigrationsFactory(homeDir, defaultConfigs) }); return configFile; }, - '1.3.0-dev.2': (configFile) => { + '1.3.0-dev.3': (configFile) => { Object.entries(configFile.configs) .forEach(([, options]) => { options.platform.drive.abci.docker.image = 'dashpay/drive:1-dev'; options.platform.dapi.api.docker.image = 'dashpay/dapi:1-dev'; + + // Update core log settings + options.core.log.filePath = null; + options.core.log.debug = { + enabled: false, + ips: !!options.core.logIps, + sourceLocations: false, + threadNames: false, + timeMicros: false, + includeOnly: [], + exclude: [], + }; + + // If debug log was enabled + if (options.core.log.file.categories.length > 0) { + options.core.log.filePath = options.core.log.file.path; + options.core.log.debug.enabled = true; + + if (!options.core.log.file.categories.includes('all')) { + options.core.log.debug.includeOnly = options.core.log.file.categories; + } + } + + delete options.core.log.file; + delete options.core.logIps; }); return configFile; }, diff --git a/packages/dashmate/docker-compose.yml b/packages/dashmate/docker-compose.yml index 16d02a739e..67aae0bfa9 100644 --- a/packages/dashmate/docker-compose.yml +++ b/packages/dashmate/docker-compose.yml @@ -38,7 +38,6 @@ services: volumes: - core_data:/home/dash - ${DASHMATE_HOME_DIR:?err}/${CONFIG_NAME:?err}/core/dash.conf:/home/dash/.dashcore/dash.conf:ro - - ${CORE_LOG_DIRECTORY_PATH:?err}:/var/log/dash command: - dashd stop_grace_period: 30s @@ -123,7 +122,6 @@ services: volumes: - drive_tenderdash:/tenderdash - ${DASHMATE_HOME_DIR:?err}/${CONFIG_NAME:?err}/platform/drive/tenderdash:/tenderdash/config:ro - - ${PLATFORM_DRIVE_TENDERDASH_LOG_DIRECTORY_PATH:?err}:/var/log/tenderdash stop_grace_period: 10s profiles: - platform diff --git a/packages/dashmate/package.json b/packages/dashmate/package.json index d7b6bb4c98..0d382c15e8 100644 --- a/packages/dashmate/package.json +++ b/packages/dashmate/package.json @@ -159,6 +159,9 @@ }, "wallet": { "description": "Wallet related commands" + }, + "doctor": { + "description": "Node diagnostics and reporting" } }, "topicSeparator": " " diff --git a/packages/dashmate/src/commands/doctor.js b/packages/dashmate/src/commands/doctor.js deleted file mode 100644 index c66e18c754..0000000000 --- a/packages/dashmate/src/commands/doctor.js +++ /dev/null @@ -1,281 +0,0 @@ -import process from 'process'; -import { Flags } from '@oclif/core'; -import { Listr } from 'listr2'; -import chalk from 'chalk'; -import ConfigBaseCommand from '../oclif/command/ConfigBaseCommand.js'; -import Report from '../doctor/report.js'; -import { DASHMATE_VERSION } from '../constants.js'; -import obfuscateConfig from '../config/obfuscateConfig.js'; -import MuteOneLineError from '../oclif/errors/MuteOneLineError.js'; -import hideString from '../util/hideString.js'; -import obfuscateObjectRecursive from '../util/obfuscateObjectRecursive.js'; - -/** - * - * @param {string} url - * @return {Promise} - */ -async function fetchTextOrError(url) { - try { - const response = await fetch(url); - - return await response.text(); - } catch (e) { - return e.toString(); - } -} - -export default class DoctorCommand extends ConfigBaseCommand { - static description = 'Dashmate node diagnostic. Bring your node to a doctor'; - - static flags = { - ...ConfigBaseCommand.flags, - verbose: Flags.boolean({ char: 'v', description: 'use verbose mode for output', default: false }), - }; - - /** - * @param {Object} args - * @param {Object} flags - * @param createRpcClient - * @param {DockerCompose} dockerCompose - * @param {getConnectionHost} getConnectionHost - * @param {Config} config - * @param createTenderdashRpcClient - * @param getServiceList - * @param getOperatingSystemInfo - * @return {Promise} - */ - async runWithDependencies( - args, - { verbose: isVerbose }, - createRpcClient, - dockerCompose, - getConnectionHost, - config, - createTenderdashRpcClient, - getServiceList, - getOperatingSystemInfo, - ) { - const tasks = new Listr( - [ - { - task: async (ctx, task) => { - const agreement = await task.prompt({ - type: 'toggle', - name: 'confirm', - header: chalk` Dashmate is going to collect all necessary debug data from the node to create a report, including: - - - System information - - The node configuration - - Service logs, metrics and status - - Collected data will contain only anonymous information. All sensitive data like private keys or passwords is obfuscated. - - The report will be created as an TAR archive in {bold.cyanBright ${process.cwd()}} - You can use the report to analyze your node condition yourself or send it to the Dash Core Group ({underline.cyanBright support@dash.org}) in case you need help.\n`, - message: 'Create a report?', - enabled: 'Yes', - disabled: 'No', - }); - - if (!agreement) { - throw new Error('Operation is cancelled'); - } - - ctx.report = new Report(); - }, - }, - { - title: 'System information', - task: async (ctx) => { - const osInfo = await getOperatingSystemInfo(); - - ctx.report.setSystemInfo(osInfo); - }, - }, - { - title: 'The node configuration', - task: async (ctx) => { - ctx.report.setDashmateVersion(DASHMATE_VERSION); - ctx.report.setDashmateConfig(obfuscateConfig(config)); - }, - }, - { - title: 'Core status', - task: async (ctx) => { - const rpcClient = createRpcClient({ - port: config.get('core.rpc.port'), - user: 'dashmate', - pass: config.get('core.rpc.users.dashmate.password'), - host: await getConnectionHost(config, 'core', 'core.rpc.host'), - }); - - const coreCalls = [ - rpcClient.getBestChainLock(), - rpcClient.quorum('listextended'), - rpcClient.getBlockchainInfo(), - rpcClient.getPeerInfo(), - ]; - - if (config.get('core.masternode.enable')) { - coreCalls.push(rpcClient.masternode('status')); - } - - const [ - getBestChainLock, - quorums, - getBlockchainInfo, - getPeerInfo, - masternodeStatus, - ] = (await Promise.allSettled(coreCalls)).map((e) => e.value?.result || e.reason); - - ctx.report.setServiceInfo('core', 'bestChainLock', getBestChainLock); - ctx.report.setServiceInfo('core', 'quorums', quorums); - ctx.report.setServiceInfo('core', 'blockchainInfo', getBlockchainInfo); - ctx.report.setServiceInfo('core', 'peerInfo', getPeerInfo); - ctx.report.setServiceInfo('core', 'masternodeStatus', masternodeStatus); - }, - }, - { - title: 'Tenderdash status', - enabled: () => config.get('platform.enable'), - task: async (ctx) => { - const tenderdashRPCClient = createTenderdashRpcClient({ - host: config.get('platform.drive.tenderdash.rpc.host'), - port: config.get('platform.drive.tenderdash.rpc.port'), - }); - - // Tenderdash requires to pass all params, so we use basic fetch - async function fetchValidators() { - const url = `http://${config.get('platform.drive.tenderdash.rpc.host')}:${config.get('platform.drive.tenderdash.rpc.port')}/validators?request_quorum_info=true`; - const response = await fetch(url, 'GET'); - return response.json(); - } - - const [ - status, - genesis, - peers, - abciInfo, - consensusState, - validators, - ] = await Promise.allSettled([ - tenderdashRPCClient.request('status', []), - tenderdashRPCClient.request('genesis', []), - tenderdashRPCClient.request('net_info', []), - tenderdashRPCClient.request('abci_info', []), - tenderdashRPCClient.request('dump_consensus_state', []), - fetchValidators(), - ]); - - ctx.report.setServiceInfo('drive_tenderdash', 'status', status); - ctx.report.setServiceInfo('drive_tenderdash', 'validators', validators); - ctx.report.setServiceInfo('drive_tenderdash', 'genesis', genesis); - ctx.report.setServiceInfo('drive_tenderdash', 'peers', peers); - ctx.report.setServiceInfo('drive_tenderdash', 'abciInfo', abciInfo); - ctx.report.setServiceInfo('drive_tenderdash', 'consensusState', consensusState); - }, - }, - { - title: 'Metrics', - enabled: () => config.get('platform.enable'), - task: async (ctx, task) => { - if (config.get('platform.drive.tenderdash.metrics.enabled')) { - // eslint-disable-next-line no-param-reassign - task.output = 'Reading Tenderdash metrics'; - - const url = `http://${config.get('platform.drive.tenderdash.rpc.host')}:${config.get('platform.drive.tenderdash.rpc.port')}/metrics`; - - const result = fetchTextOrError(url); - - ctx.report.setServiceInfo('drive_tenderdash', 'metrics', result); - } - - if (config.get('platform.drive.abci.metrics.enabled')) { - // eslint-disable-next-line no-param-reassign - task.output = 'Reading Drive metrics'; - - const url = `http://${config.get('platform.drive.abci.rpc.host')}:${config.get('platform.drive.abci.rpc.port')}/metrics`; - - const result = fetchTextOrError(url); - - ctx.report.setServiceInfo('drive_abci', 'metrics', result); - } - - if (config.get('platform.gateway.metrics.enabled')) { - // eslint-disable-next-line no-param-reassign - task.output = 'Reading Gateway metrics'; - - const url = `http://${config.get('platform.gateway.metrics.host')}:${config.get('platform.gateway.metrics.port')}/metrics`; - - const result = fetchTextOrError(url); - - ctx.report.setServiceInfo('gateway', 'metrics', result); - } - }, - }, - { - title: 'Logs', - task: async (ctx, task) => { - const services = await getServiceList(config); - - // eslint-disable-next-line no-param-reassign - task.output = `Pulling logs from ${services.map((e) => e.name)}`; - - await Promise.all( - services.map(async (service) => { - const [inspect, logs] = (await Promise.allSettled([ - dockerCompose.inspectService(config, service.name), - dockerCompose.logs(config, [service.name]), - ])).map((e) => e.value || e.reason); - - // Hide username & external ip from logs - logs.out = logs.out.replaceAll(process.env.USER, hideString(process.env.USER)); - logs.err = logs.err.replaceAll(process.env.USER, hideString(process.env.USER)); - - // Hide username & external ip from inspect - obfuscateObjectRecursive(inspect, (_field, value) => (typeof value === 'string' - ? value.replaceAll(process.env.USER, hideString(process.env.USER)) : value)); - - ctx.report.setServiceInfo(service.name, 'stdOut', logs.out); - ctx.report.setServiceInfo(service.name, 'stdErr', logs.err); - ctx.report.setServiceInfo(service.name, 'dockerInspect', inspect); - }), - ); - }, - }, - { - title: 'Create an archive', - task: async (ctx, task) => { - const archivePath = process.cwd(); - - await ctx.report.archive(archivePath); - - // eslint-disable-next-line no-param-reassign - task.output = chalk`Saved to {bold.cyanBright ${archivePath}/dashmate-report-${ctx.report.date.toISOString()}.tar.gz}`; - }, - options: { - persistentOutput: true, - }, - }, - ], - { - renderer: isVerbose ? 'verbose' : 'default', - rendererOptions: { - clearOutput: false, - showTimer: isVerbose, - bottomBar: true, - removeEmptyLines: false, - }, - }, - ); - - try { - await tasks.run({ - isVerbose, - }); - } catch (e) { - throw new MuteOneLineError(e); - } - } -} diff --git a/packages/dashmate/src/commands/doctor/index.js b/packages/dashmate/src/commands/doctor/index.js new file mode 100644 index 0000000000..f4ecd4dba8 --- /dev/null +++ b/packages/dashmate/src/commands/doctor/index.js @@ -0,0 +1,146 @@ +import process from 'process'; +import { Flags } from '@oclif/core'; +import { Listr } from 'listr2'; +import chalk from 'chalk'; +import { SEVERITY } from '../../doctor/Prescription.js'; +import ConfigBaseCommand from '../../oclif/command/ConfigBaseCommand.js'; +import Samples from '../../doctor/Samples.js'; +import MuteOneLineError from '../../oclif/errors/MuteOneLineError.js'; + +export default class DoctorCommand extends ConfigBaseCommand { + static description = 'Dashmate node diagnostics. Bring your node to the doctor'; + + static flags = { + ...ConfigBaseCommand.flags, + verbose: Flags.boolean({ char: 'v', description: 'use verbose mode for output', default: false }), + samples: Flags.string({ char: 's', description: 'path to the samples archive', default: '' }), + }; + + /** + * @param {Object} args + * @param {Object} flags + * @param {Config} config + * @param {analyseSamples} analyseSamples + * @param {collectSamplesTask} collectSamplesTask + * @param {unarchiveSamples} unarchiveSamples + * @return {Promise} + */ + async runWithDependencies( + args, + { + verbose: isVerbose, + samples: samplesFile, + }, + config, + analyseSamples, + collectSamplesTask, + unarchiveSamples, + ) { + const tasks = new Listr( + [ + { + title: 'Collecting samples', + enabled: () => !samplesFile, + task: async () => collectSamplesTask(config), + }, + { + title: 'Analyzing samples', + task: async (ctx) => { + ctx.prescription = analyseSamples(ctx.samples); + }, + }, + ], + { + renderer: isVerbose ? 'verbose' : 'default', + rendererOptions: { + clearOutput: false, + showTimer: isVerbose, + removeEmptyLines: false, + collapse: false, + }, + }, + ); + + let samples; + if (samplesFile) { + samples = await unarchiveSamples(samplesFile); + } else { + samples = new Samples(); + } + + let ctx; + try { + ctx = await tasks.run({ + isVerbose, + samples, + }); + } catch (e) { + throw new MuteOneLineError(e); + } + + const problems = ctx.prescription.getOrderedProblems(); + if (problems.length === 0) { + // eslint-disable-next-line no-console + console.log(chalk`\n The doctor didn't find any problems with your node. + + If issues still persist, please use {bold.cyanBright dashmate doctor report} to create an archive + of the already collected data for further investigation. + + You can use it to analyze the node's condition yourself or send it to the Dash Core Group support team ({underline.cyanBright support@dash.org}) for help.`); + + return; + } + + const problemsString = problems.map((problem, index) => { + let numberedDescription = `${index + 1}. ${problem.getDescription()}`; + if (problem.getSeverity() === SEVERITY.HIGH) { + numberedDescription = chalk.red(numberedDescription); + } else if (problem.getSeverity() === SEVERITY.MEDIUM) { + numberedDescription = chalk.yellow(numberedDescription); + } + + const indentedDescription = numberedDescription.split('\n') + .map((line, i) => { + let size = 5; + if (i === 0) { + size = 3; + } + + return ' '.repeat(size) + line; + }).join('\n'); + + const indentedSolution = problem.getSolution().split('\n') + .map((line) => ' '.repeat(6) + line).join('\n'); + + return `${indentedDescription}\n\n${indentedSolution}`; + }).join('\n\n'); + + const plural = problems.length > 1 ? 's' : ''; + + const severity = ctx.prescription.getSeverity(); + + let problemsCount = `${problems.length} problem${plural}`; + if (severity === SEVERITY.HIGH) { + problemsCount = chalk.red(problemsCount); + } else if (severity === SEVERITY.MEDIUM) { + problemsCount = chalk.yellow(problemsCount); + } + + const prescriptionString = chalk`\n ${problemsCount} found: + +${problemsString} + + + Use {bold.cyanBright dashmate doctor report} to create an archive + of the already collected data for further investigation. + + You can use it to analyze the node's condition yourself or send it to the Dash Core Group support team ({underline.cyanBright support@dash.org}) for help.`; + + // eslint-disable-next-line no-console + console.log(prescriptionString); + + if (severity === SEVERITY.HIGH) { + process.exitCode = 1; + } + } +} diff --git a/packages/dashmate/src/commands/doctor/report.js b/packages/dashmate/src/commands/doctor/report.js new file mode 100644 index 0000000000..c826dc8ad7 --- /dev/null +++ b/packages/dashmate/src/commands/doctor/report.js @@ -0,0 +1,105 @@ +import process from 'process'; +import { Flags } from '@oclif/core'; +import { Listr } from 'listr2'; +import chalk from 'chalk'; +import Samples from '../../doctor/Samples.js'; +import ConfigBaseCommand from '../../oclif/command/ConfigBaseCommand.js'; +import MuteOneLineError from '../../oclif/errors/MuteOneLineError.js'; + +export default class ReportCommand extends ConfigBaseCommand { + static description = `Dashmate node diagnostic report + +The command collects diagnostic information and creates an obfuscated archive for further investigation`; + + static flags = { + ...ConfigBaseCommand.flags, + verbose: Flags.boolean({ char: 'v', description: 'use verbose mode for output', default: false }), + }; + + /** + * @param {Object} args + * @param {Object} flags + * @param {Config} config + * @param {collectSamplesTask} collectSamplesTask + * @param {archiveSamples} archiveSamples + * @return {Promise} + */ + async runWithDependencies( + args, + { + verbose: isVerbose, + }, + config, + collectSamplesTask, + archiveSamples, + ) { + const tasks = new Listr( + [ + { + task: async (ctx, task) => { + const agreement = await task.prompt({ + type: 'toggle', + name: 'confirm', + header: chalk` Do you want to create an archive of diagnostic information to help with debugging? + + The archive will include: + + - System information + - The node configuration + - Service logs, metrics and status + + Collected data will not contain any information which is not already publicly known. + All sensitive data like private keys or passwords is obfuscated. + + The archive will be compressed with TAR/GZIP and placed in {bold.cyanBright ${process.cwd()}} + You can use it to analyze the node's condition yourself or send it to the Dash Core Group support team ({underline.cyanBright support@dash.org}) for help.\n`, + message: 'Create an archive?', + enabled: 'Yes', + disabled: 'No', + }); + + if (!agreement) { + throw new Error('Archive creation was declined'); + } + }, + }, + { + title: 'Collecting samples', + task: async () => collectSamplesTask(config), + }, + { + title: 'Creating archive', + task: async (ctx, task) => { + const archivePath = process.cwd(); + + await archiveSamples(ctx.samples, archivePath); + + // eslint-disable-next-line no-param-reassign + task.output = chalk`Saved to {bold.cyanBright ${archivePath}/dashmate-report-${ctx.samples.date.toISOString()}.tar.gz}`; + }, + options: { + persistentOutput: true, + }, + }, + ], + { + renderer: isVerbose ? 'verbose' : 'default', + rendererOptions: { + clearOutput: false, + showTimer: isVerbose, + removeEmptyLines: false, + collapse: false, + }, + }, + ); + + try { + await tasks.run({ + isVerbose, + samples: new Samples(), + }); + } catch (e) { + throw new MuteOneLineError(e); + } + } +} diff --git a/packages/dashmate/src/config/configJsonSchema.js b/packages/dashmate/src/config/configJsonSchema.js index 0e9dc266d1..8da6b4ce48 100644 --- a/packages/dashmate/src/config/configJsonSchema.js +++ b/packages/dashmate/src/config/configJsonSchema.js @@ -401,36 +401,59 @@ export default { log: { type: 'object', properties: { - file: { + filePath: { + type: ['null', 'string'], + minLength: 1, + }, + debug: { type: 'object', properties: { - categories: { + enabled: { + type: 'boolean', + }, + ips: { + type: 'boolean', + }, + sourceLocations: { + type: 'boolean', + }, + threadNames: { + type: 'boolean', + }, + timeMicros: { + type: 'boolean', + }, + includeOnly: { type: 'array', uniqueItems: true, items: { type: 'string', - enum: ['all', 'net', 'tor', 'mempool', 'http', 'bench', 'zmq', 'walletdb', 'rpc', 'estimatefee', + enum: ['net', 'tor', 'mempool', 'http', 'bench', 'zmq', 'walletdb', 'rpc', 'estimatefee', 'addrman', 'selectcoins', 'reindex', 'cmpctblock', 'rand', 'prune', 'proxy', 'mempoolrej', 'libevent', 'coindb', 'qt', 'leveldb', 'chainlocks', 'gobject', 'instantsend', 'llmq', 'llmq-dkg', 'llmq-sigs', 'mnpayments', 'mnsync', 'coinjoin', 'spork', 'netconn', ], }, }, - path: { - type: 'string', - minLength: 1, + exclude: { + type: 'array', + uniqueItems: true, + items: { + type: 'string', + enum: ['net', 'tor', 'mempool', 'http', 'bench', 'zmq', 'walletdb', 'rpc', 'estimatefee', + 'addrman', 'selectcoins', 'reindex', 'cmpctblock', 'rand', 'prune', 'proxy', 'mempoolrej', + 'libevent', 'coindb', 'qt', 'leveldb', 'chainlocks', 'gobject', 'instantsend', 'llmq', + 'llmq-dkg', 'llmq-sigs', 'mnpayments', 'mnsync', 'coinjoin', 'spork', 'netconn', + ], + }, }, }, additionalProperties: false, - required: ['categories', 'path'], + required: ['enabled', 'ips', 'sourceLocations', 'threadNames', 'timeMicros', 'includeOnly', 'exclude'], }, }, additionalProperties: false, - required: ['file'], - }, - logIps: { - type: 'integer', - enum: [0, 1], + required: ['filePath', 'debug'], }, indexes: { type: ['array'], @@ -444,7 +467,7 @@ export default { }, }, required: ['docker', 'p2p', 'rpc', 'spork', 'masternode', 'miner', 'devnet', 'log', - 'logIps', 'indexes', 'insight'], + 'indexes', 'insight'], additionalProperties: false, }, platform: { diff --git a/packages/dashmate/src/config/generateEnvsFactory.js b/packages/dashmate/src/config/generateEnvsFactory.js index fcb0491ecf..781ef7bdaa 100644 --- a/packages/dashmate/src/config/generateEnvsFactory.js +++ b/packages/dashmate/src/config/generateEnvsFactory.js @@ -1,4 +1,3 @@ -import path from 'path'; import os from 'os'; import convertObjectToEnvs from './convertObjectToEnvs.js'; import { DASHMATE_HELPER_DOCKER_IMAGE } from '../constants.js'; @@ -20,7 +19,6 @@ export default function generateEnvsFactory(configFile, homeDir, getConfigProfil * COMPOSE_PROJECT_NAME: string, * COMPOSE_FILE: string, * COMPOSE_PATH_SEPARATOR: string, - * CORE_LOG_DIRECTORY_PATH: string * }} */ function generateEnvs(config) { @@ -69,13 +67,6 @@ export default function generateEnvsFactory(configFile, homeDir, getConfigProfil const { uid, gid } = os.userInfo(); - // Determine logs directory to mount into tenderdash container - let tenderdashLogDirectoryPath = homeDir.joinPath('logs', config.get('network')); - const tenderdashLogFilePath = config.get('platform.drive.tenderdash.log.path'); - if (tenderdashLogFilePath !== null) { - tenderdashLogDirectoryPath = path.dirname(tenderdashLogFilePath); - } - let driveAbciMetricsUrl = ''; if (config.get('platform.drive.abci.metrics.enabled')) { driveAbciMetricsUrl = 'http://0.0.0.0:29090'; @@ -92,11 +83,7 @@ export default function generateEnvsFactory(configFile, homeDir, getConfigProfil COMPOSE_PATH_SEPARATOR: ':', DOCKER_BUILDKIT: 1, COMPOSE_DOCKER_CLI_BUILD: 1, - CORE_LOG_DIRECTORY_PATH: path.dirname( - config.get('core.log.file.path'), - ), DASHMATE_HELPER_DOCKER_IMAGE, - PLATFORM_DRIVE_TENDERDASH_LOG_DIRECTORY_PATH: tenderdashLogDirectoryPath, PLATFORM_GATEWAY_RATE_LIMITER_METRICS_DISABLED: !config.get('platform.gateway.rateLimiter.metrics.enabled'), PLATFORM_DRIVE_ABCI_METRICS_URL: driveAbciMetricsUrl, ...convertObjectToEnvs(config.getOptions()), diff --git a/packages/dashmate/src/config/obfuscateConfig.js b/packages/dashmate/src/config/obfuscateConfig.js index 1a9e8db044..99982ab387 100644 --- a/packages/dashmate/src/config/obfuscateConfig.js +++ b/packages/dashmate/src/config/obfuscateConfig.js @@ -1,25 +1,30 @@ import lodash from 'lodash'; import obfuscateObjectRecursive from '../util/obfuscateObjectRecursive.js'; +import Config from './Config.js'; import hideString from '../util/hideString.js'; +/** + * @param {Config} config + * @return {Config} + */ export default function obfuscateConfig( config, ) { const username = process.env.USER; - const cloned = lodash.cloneDeep(config); + const clonedOptions = lodash.cloneDeep(config.getOptions()); // sanitize [password, apiKey, privateKey, externalIp] fields in the dashmate config - obfuscateObjectRecursive(cloned, (field, value) => (typeof value === 'string' && field === 'password' ? hideString(value) : value)); - obfuscateObjectRecursive(cloned, (field, value) => (typeof value === 'string' && field === 'key' ? hideString(value) : value)); - obfuscateObjectRecursive(cloned, (field, value) => (typeof value === 'string' && field === 'apiKey' ? hideString(value) : value)); - obfuscateObjectRecursive(cloned, (field, value) => (typeof value === 'string' && field === 'privateKey' ? hideString(value) : value)); + obfuscateObjectRecursive(clonedOptions, (field, value) => (typeof value === 'string' && field === 'password' ? hideString(value) : value)); + obfuscateObjectRecursive(clonedOptions, (field, value) => (typeof value === 'string' && field === 'key' ? hideString(value) : value)); + obfuscateObjectRecursive(clonedOptions, (field, value) => (typeof value === 'string' && field === 'apiKey' ? hideString(value) : value)); + obfuscateObjectRecursive(clonedOptions, (field, value) => (typeof value === 'string' && field === 'privateKey' ? hideString(value) : value)); // sanitize also usernames & external ip from the rest of the fields values - obfuscateObjectRecursive(cloned, (_field, value) => (typeof value === 'string' ? value.replaceAll( + obfuscateObjectRecursive(clonedOptions, (_field, value) => (typeof value === 'string' ? value.replaceAll( username, hideString(username), ) : value)); - return cloned; + return new Config(config.getName(), clonedOptions); } diff --git a/packages/dashmate/src/core/startCoreFactory.js b/packages/dashmate/src/core/startCoreFactory.js index 86401ee6df..3da7e54b8d 100644 --- a/packages/dashmate/src/core/startCoreFactory.js +++ b/packages/dashmate/src/core/startCoreFactory.js @@ -59,8 +59,10 @@ export default function startCoreFactory( coreCommand.push('--disablewallet=1'); } - const logFilePath = config.get('core.log.file.path'); - ensureFileMountExists(logFilePath, 0o666); + const logFilePath = config.get('core.log.filePath'); + if (logFilePath !== null) { + ensureFileMountExists(logFilePath, 0o666); + } const coreContainer = await dockerCompose.runService( config, diff --git a/packages/dashmate/src/createDIContainer.js b/packages/dashmate/src/createDIContainer.js index e478ffdf1d..9f3b861623 100644 --- a/packages/dashmate/src/createDIContainer.js +++ b/packages/dashmate/src/createDIContainer.js @@ -15,6 +15,14 @@ import ConfigFileJsonRepository from './config/configFile/ConfigFileJsonReposito import createConfigFileFactory from './config/configFile/createConfigFileFactory.js'; import migrateConfigFileFactory from './config/configFile/migrateConfigFileFactory.js'; import DefaultConfigs from './config/DefaultConfigs.js'; +import analyseConfigFactory from './doctor/analyse/analyseConfigFactory.js'; +import analyseCoreFactory from './doctor/analyse/analyseCoreFactory.js'; +import analysePlatformFactory from './doctor/analyse/analysePlatformFactory.js'; +import analyseServiceContainersFactory from './doctor/analyse/analyseServiceContainersFactory.js'; +import analyseSystemResourcesFactory from './doctor/analyse/analyseSystemResourcesFactory.js'; +import analyseSamplesFactory from './doctor/analyseSamplesFactory.js'; +import archiveSamples from './doctor/archiveSamples.js'; +import unarchiveSamplesFactory from './doctor/unarchiveSamplesFactory.js'; import renderTemplateFactory from './templates/renderTemplateFactory.js'; import renderServiceTemplatesFactory from './templates/renderServiceTemplatesFactory.js'; @@ -113,6 +121,9 @@ import writeConfigTemplatesFactory from './templates/writeConfigTemplatesFactory import importCoreDataTaskFactory from './listr/tasks/setup/regular/importCoreDataTaskFactory.js'; import verifySystemRequirementsTaskFactory from './listr/tasks/setup/regular/verifySystemRequirementsTaskFactory.js'; +import collectSamplesTaskFactory from './listr/tasks/doctor/collectSamplesTaskFactory.js'; +import verifySystemRequirementsFactory from './doctor/verifySystemRequirementsFactory.js'; +import validateZeroSslCertificateFactory from './ssl/zerossl/validateZeroSslCertificateFactory.js'; /** * @param {Object} [options] @@ -306,6 +317,30 @@ export default async function createDIContainer(options = {}) { importCoreDataTask: asFunction(importCoreDataTaskFactory).singleton(), verifySystemRequirementsTask: asFunction(verifySystemRequirementsTaskFactory) .singleton(), + collectSamplesTask: asFunction(collectSamplesTaskFactory).singleton(), + }); + + /** + * SSL + */ + container.register({ + validateZeroSslCertificate: asFunction(validateZeroSslCertificateFactory).singleton(), + getCertificate: asValue(getCertificate), + }); + + /** + * Doctor + */ + container.register({ + verifySystemRequirements: asFunction(verifySystemRequirementsFactory), + analyseSamples: asFunction(analyseSamplesFactory).singleton(), + analyseSystemResources: asFunction(analyseSystemResourcesFactory).singleton(), + analyseServiceContainers: asFunction(analyseServiceContainersFactory).singleton(), + analyseConfig: asFunction(analyseConfigFactory).singleton(), + analyseCore: asFunction(analyseCoreFactory).singleton(), + analysePlatform: asFunction(analysePlatformFactory).singleton(), + unarchiveSamples: asFunction(unarchiveSamplesFactory).singleton(), + archiveSamples: asValue(archiveSamples), }); /** diff --git a/packages/dashmate/src/docker/DockerCompose.js b/packages/dashmate/src/docker/DockerCompose.js index 7826b32593..c9231128be 100644 --- a/packages/dashmate/src/docker/DockerCompose.js +++ b/packages/dashmate/src/docker/DockerCompose.js @@ -41,6 +41,11 @@ export default class DockerCompose { */ #isDockerSetupVerified = false; + /** + * @type {Error} + */ + #dockerVerifiicationError; + /** * @type {HomeDir} */ @@ -499,14 +504,24 @@ export default class DockerCompose { */ async throwErrorIfNotInstalled() { if (this.#isDockerSetupVerified) { - return; + if (this.#dockerVerifiicationError) { + throw this.#dockerVerifiicationError; + } else { + return; + } } - this.#isDockerSetupVerified = true; + try { + await this.throwErrorIfDockerIsNotInstalled(); - await this.throwErrorIfDockerIsNotInstalled(); + await this.throwErrorIfDockerComposeIsNotInstalled(); + } catch (e) { + this.#dockerVerifiicationError = e; - await this.throwErrorIfDockerComposeIsNotInstalled(); + throw e; + } finally { + this.#isDockerSetupVerified = true; + } } /** diff --git a/packages/dashmate/src/docker/getServiceListFactory.js b/packages/dashmate/src/docker/getServiceListFactory.js index 7fee42f5fc..eff5fac757 100644 --- a/packages/dashmate/src/docker/getServiceListFactory.js +++ b/packages/dashmate/src/docker/getServiceListFactory.js @@ -13,7 +13,7 @@ export default function getServiceListFactory(generateEnvs, getConfigProfiles) { /** * Returns list of services and corresponding docker images from the config * - * @typedef {getServiceList} + * @typedef {function} getServiceList * @param {Config} config * @return {Object[]} */ diff --git a/packages/dashmate/src/doctor/Prescription.js b/packages/dashmate/src/doctor/Prescription.js new file mode 100644 index 0000000000..07bd6cd634 --- /dev/null +++ b/packages/dashmate/src/doctor/Prescription.js @@ -0,0 +1,40 @@ +export const SEVERITY = { + LOW: 1, + MEDIUM: 2, + HIGH: 3, +}; + +export class Prescription { + /** + * @type {Problem[]} + */ + #orderedProblems; + + /** + * @param {Problem[]} problems + */ + constructor(problems) { + const orderedProblems = [...problems]; + orderedProblems.sort((a, b) => b.getSeverity() - a.getSeverity()); + this.#orderedProblems = orderedProblems; + } + + /** + * @return {number} - Severity level + */ + getSeverity() { + return this.#orderedProblems + .reduce((severity, problem) => ( + Math.max(severity, problem.getSeverity()) + ), SEVERITY.LOW); + } + + /** + * Get problems ordered by severity level + * + * @return {Problem[]} + */ + getOrderedProblems() { + return this.#orderedProblems; + } +} diff --git a/packages/dashmate/src/doctor/Problem.js b/packages/dashmate/src/doctor/Problem.js new file mode 100644 index 0000000000..64cff07c31 --- /dev/null +++ b/packages/dashmate/src/doctor/Problem.js @@ -0,0 +1,48 @@ +export default class Problem { + /** + * @type {string} + */ + #description; + + /** + * @type {string} + */ + #solution; + + /** + * @type {number} + */ + #severity; + + /** + * @param {string} description + * @param {string} solution + * @param {number} severity + */ + constructor(description, solution, severity) { + this.#description = description; + this.#solution = solution; + this.#severity = severity; + } + + /** + * @return {string} + */ + getDescription() { + return this.#description; + } + + /** + * @return {string} + */ + getSolution() { + return this.#solution; + } + + /** + * @return {number} + */ + getSeverity() { + return this.#severity; + } +} diff --git a/packages/dashmate/src/doctor/Samples.js b/packages/dashmate/src/doctor/Samples.js new file mode 100644 index 0000000000..5d2c2b80d6 --- /dev/null +++ b/packages/dashmate/src/doctor/Samples.js @@ -0,0 +1,101 @@ +export default class Samples { + /** + * @type {Date} + */ + date; + + /** + * @type {Object} + */ + systemInfo = {}; + + /** + * @type {string} + */ + #dockerError; + + /** + * @type {string} + */ + #dashmateVersion; + + /** + * @type {Config} + */ + #dashmateConfig; + + /** + * @type {Object} + */ + #services = {}; + + constructor() { + this.date = new Date(); + } + + setSystemInfo(systemInfo) { + this.systemInfo = systemInfo; + } + + getSystemInfo() { + return this.systemInfo; + } + + /** + * @param {Error} error + */ + setDockerError(error) { + this.#dockerError = error.toString(); + } + + /** + * @param {string} errorString + */ + setStringifiedDockerError(errorString) { + this.#dockerError = errorString; + } + + /** + * @return {string} + */ + getStringifiedDockerError() { + return this.#dockerError; + } + + setDashmateVersion(version) { + this.#dashmateVersion = version; + } + + getDashmateVersion() { + return this.#dashmateVersion; + } + + /** + * @param {Config} config + */ + setDashmateConfig(config) { + this.#dashmateConfig = config; + } + + /** + * @return {Config} + */ + getDashmateConfig() { + return this.#dashmateConfig; + } + + setServiceInfo(service, key, data) { + this.#services[service] = { + ...(this.#services[service] ?? {}), + [key]: data, + }; + } + + getServices() { + return this.#services; + } + + getServiceInfo(service, key) { + return this.#services[service]?.[key]; + } +} diff --git a/packages/dashmate/src/doctor/analyse/analyseConfigFactory.js b/packages/dashmate/src/doctor/analyse/analyseConfigFactory.js new file mode 100644 index 0000000000..7396668d43 --- /dev/null +++ b/packages/dashmate/src/doctor/analyse/analyseConfigFactory.js @@ -0,0 +1,214 @@ +import chalk from 'chalk'; +import { NETWORK_LOCAL, NETWORK_MAINNET } from '../../constants.js'; +import { ERRORS } from '../../ssl/zerossl/validateZeroSslCertificateFactory.js'; +import { SEVERITY } from '../Prescription.js'; +import Problem from '../Problem.js'; + +export default function analyseConfigFactory() { + /** + * @typedef analyseConfig + * @param {Samples} samples + * @return {Problem[]} + */ + function analyseConfig(samples) { + const config = samples.getDashmateConfig(); + + const problems = []; + + if (config?.get('platform.enable')) { + // Gateway admin is disabled while metrics are enabled + if (config.get('platform.gateway.metrics.enabled') && !config.get('platform.gateway.admin.enabled')) { + const problem = new Problem( + 'Gateway admin is disabled while metrics are enabled', + chalk`Please enable gateway admin: {bold.cyanBright dashmate config set platform.gateway.admin.enabled true}`, + SEVERITY.HIGH, + ); + + problems.push(problem); + } + + // Platform Node ID + const masternodeStatus = samples.getServiceInfo('core', 'masternodeStatus'); + const platformNodeId = masternodeStatus?.dmnState?.platformNodeId; + if (platformNodeId && config.get('platform.drive.tenderdash.node.id') !== platformNodeId) { + const problem = new Problem( + 'Platform Node ID doesn\'t match the one found in the ProReg transaction', + chalk`Please set the correct Node ID and Node Key: + {bold.cyanBright dashmate config set platform.drive.tenderdash.node.id ID + dashmate config set platform.drive.tenderdash.node.key KEY} + Or update the Node ID in the masternode list using a ProServUp transaction`, + SEVERITY.HIGH, + ); + + problems.push(problem); + } + + // SSL certificate + const ssl = samples.getServiceInfo('gateway', 'ssl'); + if (ssl?.error) { + switch (ssl.error) { + case 'disabled': + if (config.get('network') !== NETWORK_LOCAL) { + const problem = new Problem( + 'SSL certificates are disabled. Clients won\'t be able to connect securely', + chalk`Please enable and set up SSL certificates {bold.cyanBright https://docs.dash.org/en/stable/masternodes/dashmate.html#ssl-certificate}`, + SEVERITY.HIGH, + ); + + problems.push(problem); + } + break; + case 'self-signed': + if (config.get('network') === NETWORK_MAINNET) { + const problem = new Problem( + 'Self-signed SSL certificate is used on mainnet. Clients won\'t be able to connect securely', + chalk`Please use valid SSL certificates {bold.cyanBright https://docs.dash.org/en/stable/masternodes/dashmate.html#ssl-certificate}`, + SEVERITY.HIGH, + ); + + problems.push(problem); + } + break; + default: { + const { + description, + solution, + } = { + // File provider error + 'not-valid': { + description: 'SSL certificate files are not valid', + solution: chalk`Please make sure the certificate chain contains the actual server certificate at the top of the file, and it corresponds to the private key + +Certificate chain file path: {bold.cyanBright ${ssl?.data?.chainFilePath}} +Private key file path: {bold.cyanBright ${ssl?.data?.privateFilePath}}`, + }, + // File provider error + 'not-exist': { + description: 'SSL certificate files are not found', + solution: chalk`Please get an SSL certificate and place the certificate files in the correct location. + +Certificate chain file path: {bold.cyanBright ${ssl?.data?.chainFilePath}} +Private key file path: {bold.cyanBright ${ssl?.data?.privateFilePath}} + +Or use ZeroSSL https://docs.dash.org/en/stable/masternodes/dashmate.html#ssl-certificate`, + }, + // ZeroSSL validation errors + [ERRORS.API_KEY_IS_NOT_SET]: { + description: 'ZeroSSL API key is not set.', + solution: chalk`Please obtain your API key from {underline.cyanBright https://app.zerossl.com/developer} +And then update your configuration with {block.cyanBright dashmate config set platform.gateway.ssl.providerConfigs.zerossl.apiKey [KEY]}`, + }, + [ERRORS.EXTERNAL_IP_IS_NOT_SET]: { + description: 'External IP is not set.', + solution: chalk`Please update your configuration to include your external IP using {block.cyanBright dashmate config set externalIp [IP]}`, + }, + [ERRORS.CERTIFICATE_ID_IS_NOT_SET]: { + description: 'ZeroSSL certificate is not configured', + solution: chalk`Please run {bold.cyanBright dashmate ssl obtain} to get a new certificate`, + }, + [ERRORS.PRIVATE_KEY_IS_NOT_PRESENT]: { + description: chalk`ZeroSSL private key file not found in ${ssl?.data?.privateKeyFilePath}.`, + solution: chalk`Please regenerate the certificate using {bold.cyanBright dashmate ssl obtain --force} +and revoke the previous certificate in the ZeroSSL dashboard`, + }, + [ERRORS.EXTERNAL_IP_MISMATCH]: { + description: chalk`ZeroSSL IP ${ssl?.data?.certificate.common_name} does not match external IP ${ssl?.data?.externalIp}.`, + solution: chalk`Please regenerate the certificate using {bold.cyanBright dashmate ssl obtain --force} + and revoke the previous certificate in the ZeroSSL dashboard`, + }, + [ERRORS.CSR_FILE_IS_NOT_PRESENT]: { + description: chalk`ZeroSSL certificate request file not found in ${ssl?.data?.csrFilePath}. +This makes auto-renewal impossible.`, + solution: chalk`If you need auto renew, please regenerate the certificate using {bold.cyanBright dashmate ssl obtain --force} +and revoke the previous certificate in the ZeroSSL dashboard`, + }, + [ERRORS.CERTIFICATE_EXPIRES_SOON]: { + description: chalk`ZeroSSL certificate expires at ${ssl?.data?.certificate.expires}.`, + solution: chalk`Please run {bold.cyanBright dashmate ssl obtain} to get a new one`, + }, + [ERRORS.CERTIFICATE_IS_NOT_VALIDATED]: { + description: chalk`ZeroSSL certificate is not approved.`, + solution: chalk`Please run {bold.cyanBright dashmate ssl obtain} to confirm certificate`, + }, + [ERRORS.CERTIFICATE_IS_NOT_VALID]: { + description: chalk`ZeroSSL certificate is not valid.`, + solution: chalk`Please run {bold.cyanBright dashmate ssl zerossl obtain} to get a new one.`, + }, + }[ssl.error] ?? {}; + + if (description) { + const problem = new Problem( + description, + solution, + SEVERITY.HIGH, + ); + + problems.push(problem); + } + break; + } + } + } + + if (samples?.getDashmateConfig()?.get('network') !== NETWORK_LOCAL) { + // Core P2P port + const coreP2pPort = samples.getServiceInfo('core', 'p2pPort'); + if (coreP2pPort && coreP2pPort !== 'OPEN') { + const port = config.get('core.p2p.port'); + const externalIp = config.get('externalIp'); + const problem = new Problem( + 'Core P2P port is unavailable for incoming connections.', + chalk`Please ensure that port ${port} on your public IP address ${externalIp} is open +for incoming connections. You may need to configure your firewall to +ensure this port is accessible from the public internet. If you are using +Network Address Translation (NAT), please enable port forwarding for port 80 +and all Dash service ports listed above.`, + SEVERITY.HIGH, + ); + + problems.push(problem); + } + + // Gateway HTTP port + const gatewayHttpPort = samples.getServiceInfo('gateway', 'httpPort'); + if (gatewayHttpPort && gatewayHttpPort !== 'OPEN') { + const port = config.get('platform.gateway.listeners.dapiAndDrive.port'); + const externalIp = config.get('externalIp'); + const problem = new Problem( + 'Gateway HTTP port is unavailable for incoming connections.', + chalk`Please ensure that port ${port} on your public IP address ${externalIp} is open +for incoming connections. You may need to configure your firewall to +ensure this port is accessible from the public internet. If you are using +Network Address Translation (NAT), please enable port forwarding for port 80 +and all Dash service ports listed above.`, + SEVERITY.HIGH, + ); + + problems.push(problem); + } + + // Tenderdash P2P port + const tenderdashP2pPort = samples.getServiceInfo('drive_tenderdash', 'p2pPort'); + if (tenderdashP2pPort && tenderdashP2pPort !== 'OPEN') { + const port = config.get('platform.drive.tenderdash.p2p.port'); + const externalIp = config.get('externalIp'); + const problem = new Problem( + 'Tenderdash P2P port is unavailable for incoming connections.', + chalk`Please ensure that port ${port} on your public IP address ${externalIp} is open +for incoming connections. You may need to configure your firewall to +ensure this port is accessible from the public internet. If you are using +Network Address Translation (NAT), please enable port forwarding for port 80 +and all Dash service ports listed above.`, + SEVERITY.HIGH, + ); + + problems.push(problem); + } + } + } + + return problems; + } + + return analyseConfig; +} diff --git a/packages/dashmate/src/doctor/analyse/analyseCoreFactory.js b/packages/dashmate/src/doctor/analyse/analyseCoreFactory.js new file mode 100644 index 0000000000..eb947e8a6e --- /dev/null +++ b/packages/dashmate/src/doctor/analyse/analyseCoreFactory.js @@ -0,0 +1,92 @@ +import chalk from 'chalk'; +import { SEVERITY } from '../Prescription.js'; +import Problem from '../Problem.js'; + +/** + * @return {analyseCore} + */ +export default function analyseCoreFactory() { + /** + * @typedef {Function} analyseCore + * @param {Samples} samples + * @return {Problem[]} + */ + function analyseCore(samples) { + const problems = []; + + // Core is synced + const masternodeSyncStatus = samples.getServiceInfo('core', 'masternodeSyncStatus'); + + if (masternodeSyncStatus?.IsSynced === false) { + const blockchainInfo = samples.getServiceInfo('core', 'blockchainInfo'); + const verificationProgress = blockchainInfo?.verificationprogress ?? 0; + + const problem = new Problem( + 'Core is syncing blockchain data. Some node services may be temporarily unresponsive', + chalk`Sync is ${(verificationProgress * 100).toFixed(1)}% complete. Please wait until Core is fully synced`, + SEVERITY.MEDIUM, + ); + + problems.push(problem); + } + + // PoSe + if (samples?.getDashmateConfig()?.get('core.masternode.enable')) { + const masternodeStatus = samples.getServiceInfo('core', 'masternodeStatus'); + + const { description, solution, severity } = { + WAITING_FOR_PROTX: { + description: 'The masternode is waiting for ProTx registration confirmation', + solution: chalk`Ensure the ProRegTx transaction has been sent and is confirmed on the network.`, + severity: SEVERITY.HIGH, + }, + POSE_BANNED: { + description: 'The masternode has been banned due to failing Proof-of-Service checks.', + solution: chalk`Submit a ProUpServTx transaction to unban your masternode and ensure +it meets all network requirements.`, + severity: SEVERITY.HIGH, + }, + REMOVED: { + description: 'The masternode has been removed from the network\'s masternode list.', + solution: chalk`Re-register the masternode with a new ProRegTx transaction.`, + severity: SEVERITY.HIGH, + }, + OPERATOR_KEY_CHANGED: { + description: 'The operator key for the masternode has been changed.', + solution: chalk`Update the masternode configuration with the new operator key +using {bold.cyanBright dashmate config set core.masternode.operatorKey }.`, + severity: SEVERITY.HIGH, + }, + PROTX_IP_CHANGED: { + description: 'The IP address registered in the ProTx has changed.', + solution: chalk`Update your masternode\'s configuration with the new IP address.`, + severity: SEVERITY.HIGH, + }, + ERROR: { + description: 'An unknown error has occurred with the masternode.', + solution: chalk`Check the Core logs for detailed error information and troubleshoot accordingly.`, + severity: SEVERITY.HIGH, + }, + UNKNOWN: { + description: 'The masternode status cannot be determined.', + solution: chalk`Check the Core logs for detailed error information and troubleshoot accordingly.`, + severity: SEVERITY.HIGH, + }, + }[masternodeStatus?.state] || {}; + + if (description) { + const problem = new Problem( + description, + solution, + severity, + ); + + problems.push(problem); + } + } + + return problems; + } + + return analyseCore; +} diff --git a/packages/dashmate/src/doctor/analyse/analysePlatformFactory.js b/packages/dashmate/src/doctor/analyse/analysePlatformFactory.js new file mode 100644 index 0000000000..d36b71147e --- /dev/null +++ b/packages/dashmate/src/doctor/analyse/analysePlatformFactory.js @@ -0,0 +1,35 @@ +import { SEVERITY } from '../Prescription.js'; +import Problem from '../Problem.js'; + +/** + * @return {analysePlatform} + */ +export default function analysePlatformFactory() { + /** + * @typedef {Function} analysePlatform + * @param {Samples} samples + * @return {Problem[]} + */ + function analysePlatform(samples) { + const problems = []; + + // Tenderdash is synced + if (samples?.getDashmateConfig()?.get('platform.enable')) { + const status = samples.getServiceInfo('drive_tenderdash', 'status'); + + if (status?.sync_info?.catching_up) { + const problem = new Problem( + 'Drive is syncing blockchain data. Some node services may temporarily be unresponsive.', + 'Please wait until Drive is fully synced', + SEVERITY.MEDIUM, + ); + + problems.push(problem); + } + } + + return problems; + } + + return analysePlatform; +} diff --git a/packages/dashmate/src/doctor/analyse/analyseServiceContainersFactory.js b/packages/dashmate/src/doctor/analyse/analyseServiceContainersFactory.js new file mode 100644 index 0000000000..7cf6b6c696 --- /dev/null +++ b/packages/dashmate/src/doctor/analyse/analyseServiceContainersFactory.js @@ -0,0 +1,110 @@ +import chalk from 'chalk'; +import { SEVERITY } from '../Prescription.js'; +import Problem from '../Problem.js'; + +/** + * @param {getServiceList} getServiceList + * @return {analyseServiceContainers} + */ +export default function analyseServiceContainersFactory( + getServiceList, +) { + /** + * @typedef {analyseServiceContainers} + * @param {Samples} samples + * @return {Problem[]} + */ + function analyseServiceContainers(samples) { + const services = getServiceList(samples.getDashmateConfig()); + + const servicesNotStarted = []; + const servicesFailed = []; + const servicesOOMKilled = []; + + for (const service of services) { + const dockerInspect = samples.getServiceInfo(service.name, 'dockerInspect'); + + if (!dockerInspect) { + continue; + } + + if (dockerInspect.message) { + servicesNotStarted.push({ + service, + message: dockerInspect.message, + }); + } else if ( + dockerInspect.State?.Restarting === true + && dockerInspect.State?.ExitCode !== 0 + ) { + servicesFailed.push({ + service, + message: dockerInspect.State.Error, + code: dockerInspect.State.ExitCode, + }); + } else if (dockerInspect.State?.OOMKilled === true) { + servicesOOMKilled.push({ + service, + }); + } + } + + const problems = []; + + if (servicesNotStarted.length > 0) { + let description; + if (servicesNotStarted.length === 1) { + description = `Service ${servicesNotStarted[0].service.title} isn't started.`; + } else { + description = `Services ${servicesNotStarted.map((e) => e.service.title).join(', ')} aren't started.`; + } + + const problem = new Problem( + description, + chalk`Try {bold.cyanBright dashmate start --force} to make sure all services are started`, + SEVERITY.HIGH, + ); + + problems.push(problem); + } + + for (const failedService of servicesFailed) { + let description = `Service ${failedService.service.title} failed with an error code ${failedService.code}`; + + if (failedService.message?.length > 0) { + description += `\nand message: ${failedService.message}`; + } + + description += '.'; + + const problem = new Problem( + description, + 'Please check service logs or share them with Dash Core Group', + SEVERITY.HIGH, + ); + + problems.push(problem); + } + + if (servicesOOMKilled.length > 0) { + let description; + if (servicesNotStarted.length === 1) { + description = chalk`Service ${servicesNotStarted[0].service.title} was killed due to a lack of memory.`; + } else { + description = chalk`Services ${servicesNotStarted.map((e) => e.service.title).join(', ')} were killed due to lack of memory.`; + } + + const problem = new Problem( + description, + 'Make sure you have enough memory to run the node.', + SEVERITY.HIGH, + ); + + problems.push(problem); + } + + return problems; + } + + return analyseServiceContainers; +} diff --git a/packages/dashmate/src/doctor/analyse/analyseSystemResourcesFactory.js b/packages/dashmate/src/doctor/analyse/analyseSystemResourcesFactory.js new file mode 100644 index 0000000000..a5728a4b41 --- /dev/null +++ b/packages/dashmate/src/doctor/analyse/analyseSystemResourcesFactory.js @@ -0,0 +1,104 @@ +import { SEVERITY } from '../Prescription.js'; +import Problem from '../Problem.js'; + +/** + * @param {verifySystemRequirements} verifySystemRequirements + * @return {analyseSystemResources} + */ +export default function analyseSystemResourcesFactory(verifySystemRequirements) { + /** + * @typedef {analyseSystemResources} + * @param {Samples} samples + * @returns {Problem[]} + */ + function analyseSystemResources(samples) { + const { + cpu, + dockerSystemInfo, + currentLoad, + diskSpace, + fsOpenFiles, + memory, + diskIO, + } = samples.getSystemInfo(); + + // System requirements + const problems = verifySystemRequirements( + { + dockerSystemInfo, + cpu, + memory, + diskSpace, + }, + samples.getDashmateConfig().get('platform.enable'), + { + diskSpace: 5, + }, + ); + + // Current CPU load + const cpuCores = dockerSystemInfo?.NCPU ?? cpu?.cores; + if (cpuCores && currentLoad && (currentLoad.avgLoad / cpuCores) > 0.8) { + const problem = new Problem( + `Average system load ${currentLoad.avgLoad.toFixed(2)} is higher than normal`, + 'Consider upgrading CPUs', + SEVERITY.LOW, + ); + + problems.push(problem); + } + + // Free memory + if (memory && Number.isInteger(memory.free) && memory.free > 0) { + const memoryGb = memory.free / (1024 ** 3); + if (memoryGb < 0.5) { + const problem = new Problem( + `Only ${memoryGb.toFixed(1)}GB RAM is available`, + 'Consider adding RAM', + SEVERITY.LOW, + ); + + problems.push(problem); + } + } + + // Open file descriptors + if (fsOpenFiles?.allocated && fsOpenFiles?.max) { + const available = fsOpenFiles.max - fsOpenFiles.allocated; + if (available < 1000) { + const problem = new Problem( + `${available} available file descriptors left`, + 'Please increase the maximum open file descriptor limit or stop unnecessary processes.', + SEVERITY.HIGH, + ); + + problems.push(problem); + } + } + + // IO wait time + if (diskIO?.tWaitPercent) { + const THRESHOLD = 40; + + const maxDiskIOWaitPercent = Math.max( + diskIO.rWaitPercent, + diskIO.wWaitPercent, + diskIO.tWaitPercent, + ) * 100; + + if (maxDiskIOWaitPercent > THRESHOLD) { + const problem = new Problem( + `Disk IO wait time is ${maxDiskIOWaitPercent.toFixed(0)}%`, + 'Consider upgrading to faster storage', + SEVERITY.LOW, + ); + + problems.push(problem); + } + } + + return problems; + } + + return analyseSystemResources; +} diff --git a/packages/dashmate/src/doctor/analyseSamplesFactory.js b/packages/dashmate/src/doctor/analyseSamplesFactory.js new file mode 100644 index 0000000000..f3aae62e8b --- /dev/null +++ b/packages/dashmate/src/doctor/analyseSamplesFactory.js @@ -0,0 +1,52 @@ +import { Prescription, SEVERITY } from './Prescription.js'; +import Problem from './Problem.js'; + +/** + * @param {analyseSystemResources} analyseSystemResources + * @param {analyseServiceContainers} analyseServiceContainers + * @param {analyseConfig} analyseConfig + * @param {analyseCore} analyseCore + * @param {analysePlatform} analysePlatform + * @return {analyseSamples} + */ +export default function analyseSamplesFactory( + analyseSystemResources, + analyseServiceContainers, + analyseConfig, + analyseCore, + analysePlatform, +) { + /** + * @typedef {Function} analyseSamples + * @param {Samples} samples + * @return {Prescription} + */ + function analyseSamples(samples) { + const problems = []; + + // System resources + problems.push(...analyseSystemResources(samples)); + + // Docker + const dockerError = samples.getStringifiedDockerError(); + if (dockerError) { + problems.push(new Problem( + 'Docker installation error', + dockerError, + SEVERITY.HIGH, + )); + } + + problems.push(...analyseServiceContainers(samples)); + + problems.push(...analyseConfig(samples)); + + problems.push(...analyseCore(samples)); + + problems.push(...analysePlatform(samples)); + + return new Prescription(problems); + } + + return analyseSamples; +} diff --git a/packages/dashmate/src/doctor/archiveSamples.js b/packages/dashmate/src/doctor/archiveSamples.js new file mode 100644 index 0000000000..4f02eb49d8 --- /dev/null +++ b/packages/dashmate/src/doctor/archiveSamples.js @@ -0,0 +1,64 @@ +import os from 'os'; +import path from 'path'; +import fs from 'fs'; +import { create } from 'tar'; + +function writeSampleFile(archiveDir, service, filename, data) { + if (data === undefined || data === null) { + return; + } + + const serviceDir = path.join(archiveDir, service ?? ''); + + let buffer; + let filetype; + + const dataType = typeof data; + + if (dataType === 'string') { + buffer = data; + filetype = '.txt'; + } else { + buffer = JSON.stringify(data, null, 2); + filetype = '.json'; + } + + if (!fs.existsSync(serviceDir)) { + fs.mkdirSync(serviceDir); + } + + fs.writeFileSync(path.join(serviceDir, `${filename}${filetype}`), buffer, 'utf8'); +} + +/** + * @param {Samples} samples + * @param {string} folderPath + */ +export default async function archiveSamples(samples, folderPath) { + const tempDir = os.tmpdir(); + const archiveName = `dashmate-report-${samples.date.toISOString()}`; + const archiveDir = path.join(tempDir, archiveName); + + writeSampleFile(archiveDir, null, 'date', samples.date.toISOString()); + writeSampleFile(archiveDir, null, 'systemInfo', samples.getSystemInfo()); + writeSampleFile(archiveDir, null, 'dockerError', samples.getStringifiedDockerError()); + writeSampleFile(archiveDir, null, 'dashmateConfig', samples.getDashmateConfig()); + writeSampleFile(archiveDir, null, 'dashmateVersion', samples.getDashmateVersion()); + + for (const [serviceName, service] of Object.entries(samples.getServices())) { + for (const [key, data] of Object.entries(service)) { + if (data !== undefined && data !== null) { + writeSampleFile(archiveDir, serviceName, key, data); + } + } + } + + await create( + { + cwd: archiveDir, + gzip: true, + file: path.join(folderPath, `${archiveName}.tar.gz`), + }, + ['.'], + ); +} diff --git a/packages/dashmate/src/doctor/report.js b/packages/dashmate/src/doctor/report.js deleted file mode 100644 index ea5618d42e..0000000000 --- a/packages/dashmate/src/doctor/report.js +++ /dev/null @@ -1,91 +0,0 @@ -import os from 'os'; -import path from 'path'; -import fs from 'fs'; -import { create } from 'tar'; - -export default class Report { - date; - - #systemInfo = {}; - - #dashmateVersion = null; - - #dashmateConfig = null; - - #services = {}; - - constructor() { - this.date = new Date(); - } - - setSystemInfo(systemInfo) { - this.#systemInfo = systemInfo; - } - - setDashmateVersion(version) { - this.#dashmateVersion = version; - } - - setDashmateConfig(config) { - this.#dashmateConfig = config; - } - - setServiceInfo(service, key, data) { - this.#services[service] = { - ...(this.#services[service] ?? {}), - [key]: data, - }; - } - - #writeReportFile(reportDir, service, filename, data) { - const serviceDir = path.join(reportDir, service ?? ''); - - let buffer; - let filetype; - - const dataType = typeof data; - - if (dataType === 'string') { - buffer = data; - filetype = '.txt'; - } else { - buffer = JSON.stringify(data, null, 2); - filetype = '.json'; - } - - if (!fs.existsSync(serviceDir)) { - fs.mkdirSync(serviceDir); - } - - fs.writeFileSync(path.join(serviceDir, `${filename}${filetype}`), buffer, 'utf8'); - } - - async archive(folderPath) { - const tempDir = os.tmpdir(); - const reportName = `dashmate-report-${this.date.toISOString()}`; - const reportDir = path.join(tempDir, reportName); - - this.#writeReportFile(reportDir, null, 'systemInfo', this.#systemInfo); - this.#writeReportFile(reportDir, null, 'dashmateConfig', this.#dashmateConfig); - this.#writeReportFile(reportDir, null, 'dashmateVersion', this.#dashmateVersion); - - for (const service of Object.keys(this.#services)) { - for (const dataKey of Object.keys(this.#services[service])) { - const data = this.#services[service][dataKey]; - - if (data !== undefined && data !== null) { - this.#writeReportFile(reportDir, service, dataKey, data); - } - } - } - - await create( - { - cwd: reportDir, - gzip: true, - file: path.join(folderPath, `${reportName}.tar.gz`), - }, - ['.'], - ); - } -} diff --git a/packages/dashmate/src/doctor/unarchiveSamplesFactory.js b/packages/dashmate/src/doctor/unarchiveSamplesFactory.js new file mode 100644 index 0000000000..1eb53c4b30 --- /dev/null +++ b/packages/dashmate/src/doctor/unarchiveSamplesFactory.js @@ -0,0 +1,115 @@ +import os from 'os'; +import path from 'path'; +import fs from 'fs'; +import { extract } from 'tar'; +import Samples from './Samples.js'; +import Config from '../config/Config.js'; + +function readSampleFile(filePath) { + const data = fs.readFileSync(filePath, 'utf8'); + const ext = path.extname(filePath); + + if (ext === '.json') { + return JSON.parse(data); + } + + return data; +} + +/** + * @param {getServiceList} getServiceList + * @returns {unarchiveSamples} + */ +export default function unarchiveSamplesFactory(getServiceList) { + /** + * @typedef {Function} unarchiveSamples + * @param {string} archiveFilePath + * @returns {Promise} + */ + async function unarchiveSamples(archiveFilePath) { + if (!fs.existsSync(archiveFilePath)) { + throw new Error(`Archive file with logged data not found: ${archiveFilePath}`); + } + + const samples = new Samples(); + + const tempDir = os.tmpdir(); + const archiveFileName = path.basename(archiveFilePath, '.tar.gz'); + const extractDir = path.join(tempDir, archiveFileName); + fs.mkdirSync(extractDir, { recursive: true }); + + await extract({ + file: archiveFilePath, + cwd: extractDir, + }); + + if (process.env.DEBUG) { + // eslint-disable-next-line no-console + console.debug(`Extracted logged data to: ${extractDir}`); + } + + const dateFilePath = path.join(extractDir, 'date.txt'); + if (fs.existsSync(dateFilePath)) { + samples.date = readSampleFile(dateFilePath); + } + + const systemInfoFilePath = path.join(extractDir, 'systemInfo.json'); + if (fs.existsSync(systemInfoFilePath)) { + samples.setSystemInfo(readSampleFile(systemInfoFilePath)); + } + + const dockerErrorFilePath = path.join(extractDir, 'dockerError.txt'); + if (fs.existsSync(dockerErrorFilePath)) { + samples.setStringifiedDockerError(readSampleFile(dockerErrorFilePath)); + } + + const dashmateConfigFilePath = path.join(extractDir, 'dashmateConfig.json'); + if (fs.existsSync(dashmateConfigFilePath)) { + const configProperties = readSampleFile(dashmateConfigFilePath); + if (configProperties?.options) { + const config = new Config(configProperties.name, configProperties.options); + samples.setDashmateConfig(config); + } + } + + const dashmateVersionFilePath = path.join(extractDir, 'dashmateVersion.txt'); + if (fs.existsSync(dashmateVersionFilePath)) { + samples.setDashmateVersion(readSampleFile(dashmateVersionFilePath)); + } + + const serviceNames = getServiceList(samples.getDashmateConfig()) + .map((service) => service.name); + + for (const serviceName of serviceNames) { + const serviceDir = path.join(extractDir, serviceName); + + if (!fs.statSync(serviceDir) + .isDirectory()) { + continue; + } + + const files = fs.readdirSync(serviceDir); + + for (const file of files) { + const filePath = path.join(serviceDir, file); + + const ext = path.extname(file); + if (ext !== '.txt' && ext !== '.json' && !fs.statSync(filePath) + .isDirectory()) { + continue; + } + + const data = readSampleFile(filePath); + const key = path.basename(file, ext); + samples.setServiceInfo(serviceName, key, data); + } + } + + if (!process.env.DEBUG) { + fs.rmSync(extractDir, { recursive: true }); + } + + return samples; + } + return unarchiveSamples; +} diff --git a/packages/dashmate/src/doctor/verifySystemRequirementsFactory.js b/packages/dashmate/src/doctor/verifySystemRequirementsFactory.js new file mode 100644 index 0000000000..66f7355d47 --- /dev/null +++ b/packages/dashmate/src/doctor/verifySystemRequirementsFactory.js @@ -0,0 +1,130 @@ +import { SEVERITY } from './Prescription.js'; +import Problem from './Problem.js'; + +/** + * @return {verifySystemRequirements} + */ +export default function verifySystemRequirementsFactory() { + /** + * @typedef {Function} verifySystemRequirements + * @param {Object} systemInfo + * @param {Object} systemInfo.dockerSystemInfo + * @param {Object} systemInfo.cpu + * @param {Object} systemInfo.memory + * @param {Object} systemInfo.diskSpace + * @param {boolean} isHP + * @param {Object} [overrideRequirements] + * @param {Number} [overrideRequirements.diskSpace] + * @returns {Problem[]} + */ + function verifySystemRequirements( + { + dockerSystemInfo, + cpu, + memory, + diskSpace, + }, + isHP, + overrideRequirements = {}, + ) { + const MINIMUM_CPU_CORES = isHP ? 4 : 2; + const MINIMUM_CPU_FREQUENCY = 2.4; // GHz + const MINIMUM_RAM = isHP ? 8 : 4; // GB + const MINIMUM_DISK_SPACE = overrideRequirements.diskSpace ?? (isHP ? 200 : 100); // GB + + const problems = []; + + // CPU cores + const cpuCores = dockerSystemInfo?.NCPU ?? cpu?.cores; + + if (Number.isInteger(cpuCores)) { + if (cpuCores < MINIMUM_CPU_CORES) { + const problem = new Problem( + `${cpuCores} CPU cores detected. At least ${MINIMUM_CPU_CORES} are required`, + `Consider upgrading CPUs to make sure the node can provide timely responses +for required network services and avoid Proof-of-Service bans`, + SEVERITY.MEDIUM, + ); + + problems.push(problem); + } + } else if (process.env.DEBUG) { + // eslint-disable-next-line no-console + console.warn('Can\'t get CPU core information'); + } + + // Memory + const totalMemory = dockerSystemInfo?.MemTotal ?? memory?.total; + + if (Number.isInteger(totalMemory)) { + const totalMemoryGb = totalMemory / (1024 ** 3); // Convert to GB + + if (totalMemoryGb < MINIMUM_RAM) { + const problem = new Problem( + `${totalMemoryGb.toFixed(2)}GB RAM detected. At least ${MINIMUM_RAM}GB is required`, + `Consider upgrading RAM to make sure the node can provide timely responses +for required network services and avoid Proof-of-Service bans`, + SEVERITY.MEDIUM, + ); + + problems.push(problem); + } + } else if (process.env.DEBUG) { + // eslint-disable-next-line no-console + console.warn('Can\'t get memory information'); + } + + // CPU speed + if (cpu && Number.isFinite(cpu.speed) && cpu.speed !== 0) { + if (cpu.speed < MINIMUM_CPU_FREQUENCY) { + const problem = new Problem( + `${cpu.speed.toFixed(1)}GHz CPU frequency detected. At least ${MINIMUM_CPU_FREQUENCY}GHz is required`, + `Consider upgrading CPUs to make sure the node can provide timely responses +for required network services and avoid Proof-of-Service bans`, + SEVERITY.MEDIUM, + ); + + problems.push(problem); + } + } else if (process.env.DEBUG) { + // eslint-disable-next-line no-console + console.warn('Can\'t get CPU frequency'); + } + + // Check swap information + if (memory && Number.isInteger(memory.swaptotal)) { + const swapTotalGb = (memory.swaptotal / (1024 ** 3)); // Convert bytes to GB + + if (swapTotalGb < 2) { + const problem = new Problem( + `Swap space is ${swapTotalGb.toFixed(2)}GB. 2GB is recommended`, + `Consider enabling SWAP to make sure the node can provide timely responses +for required network services and avoid Proof-of-Service bans`, + SEVERITY.LOW, + ); + + problems.push(problem); + } + } + + // Get disk usage info + if (diskSpace) { + const availableDiskSpace = diskSpace.available / (1024 ** 3); // Convert to GB + + if (availableDiskSpace < MINIMUM_DISK_SPACE) { + const problem = new Problem( + `${availableDiskSpace.toFixed(2)}GB of available disk space detected. At least ${MINIMUM_DISK_SPACE}GB is required`, + `Consider increasing disk space to make sure the node can provide timely responses +for required network services and avoid Proof-of-Service bans`, + MINIMUM_DISK_SPACE - availableDiskSpace < 5 ? SEVERITY.HIGH : SEVERITY.MEDIUM, + ); + + problems.push(problem); + } + } + + return problems; + } + + return verifySystemRequirements; +} diff --git a/packages/dashmate/src/listr/prompts/validators/validateSslCertificateFiles.js b/packages/dashmate/src/listr/prompts/validators/validateSslCertificateFiles.js new file mode 100644 index 0000000000..8c23a84069 --- /dev/null +++ b/packages/dashmate/src/listr/prompts/validators/validateSslCertificateFiles.js @@ -0,0 +1,33 @@ +import crypto from 'node:crypto'; +import fs from 'node:fs'; + +/** + * @param {string} chainFilePath + * @param {string} privateFilePath + * @return {boolean} + */ +export default function validateSslCertificateFiles(chainFilePath, privateFilePath) { + const bundlePem = fs.readFileSync(chainFilePath, 'utf8'); + const privateKeyPem = fs.readFileSync(privateFilePath, 'utf8'); + + // Step 2: Create a signature using the private key + const data = 'This is a test message'; + const sign = crypto.createSign('SHA256'); + sign.update(data); + sign.end(); + + const signature = sign.sign(privateKeyPem, 'hex'); + + // Verify the signature using the public key from the certificate + const verify = crypto.createVerify('SHA256'); + verify.update(data); + verify.end(); + + // Extract the public key from the first certificate in the bundle + const certificate = crypto.createPublicKey({ + key: bundlePem, + format: 'pem', + }); + + return verify.verify(certificate, signature, 'hex'); +} diff --git a/packages/dashmate/src/listr/tasks/doctor/collectSamplesTaskFactory.js b/packages/dashmate/src/listr/tasks/doctor/collectSamplesTaskFactory.js new file mode 100644 index 0000000000..1c4d7540be --- /dev/null +++ b/packages/dashmate/src/listr/tasks/doctor/collectSamplesTaskFactory.js @@ -0,0 +1,366 @@ +import fs from 'fs'; +import { Listr } from 'listr2'; +import path from 'path'; +import process from 'process'; +import obfuscateConfig from '../../../config/obfuscateConfig.js'; +import { DASHMATE_VERSION } from '../../../constants.js'; +import Certificate from '../../../ssl/zerossl/Certificate.js'; +import providers from '../../../status/providers.js'; +import hideString from '../../../util/hideString.js'; +import obfuscateObjectRecursive from '../../../util/obfuscateObjectRecursive.js'; +import validateSslCertificateFiles from '../../prompts/validators/validateSslCertificateFiles.js'; + +/** + * + * @param {string} url + * @return {Promise} + */ +async function fetchTextOrError(url) { + try { + const response = await fetch(url); + + return await response.text(); + } catch (e) { + return e.toString(); + } +} + +/** + * @param {DockerCompose} dockerCompose + * @param {createRpcClient} createRpcClient + * @param {getConnectionHost} getConnectionHost + * @param {createTenderdashRpcClient} createTenderdashRpcClient + * @param {getServiceList} getServiceList + * @param {getOperatingSystemInfo} getOperatingSystemInfo + * @param {HomeDir} homeDir + * @param {validateZeroSslCertificate} validateZeroSslCertificate + * @return {collectSamplesTask} + */ +export default function collectSamplesTaskFactory( + dockerCompose, + createRpcClient, + getConnectionHost, + createTenderdashRpcClient, + getServiceList, + getOperatingSystemInfo, + homeDir, + validateZeroSslCertificate, +) { + /** + * @typedef {function} collectSamplesTask + * @param config + * @return {Listr} + */ + function collectSamplesTask(config) { + return new Listr( + [ + { + title: 'System information', + task: async (ctx) => { + // Sample docker installation errors + try { + await dockerCompose.throwErrorIfNotInstalled(); + } catch (e) { + ctx.samples.setDockerError(e); + } + + // Operating system info + const osInfo = await getOperatingSystemInfo(); + + ctx.samples.setSystemInfo(osInfo); + }, + }, + { + title: 'Configuration', + task: async (ctx) => { + ctx.samples.setDashmateVersion(DASHMATE_VERSION); + ctx.samples.setDashmateConfig(obfuscateConfig(config)); + + return new Listr([ + { + enabled: () => config.get('platform.enable'), + title: 'Gateway SSL certificates', + task: async () => { + if (!config.get('platform.gateway.ssl.enabled')) { + ctx.samples.setServiceInfo('gateway', 'ssl', { + error: 'disabled', + }); + + return; + } + + switch (config.get('platform.gateway.ssl.provider')) { + case 'self-signed': { + ctx.samples.setServiceInfo('gateway', 'ssl', { + error: 'self-signed', + }); + + return; + } + case 'zerossl': { + const { + error, + data, + } = validateZeroSslCertificate(config, Certificate.EXPIRATION_LIMIT_DAYS); + + obfuscateObjectRecursive(data, (_field, value) => (typeof value === 'string' ? value.replaceAll( + process.env.USER, + hideString(process.env.USER), + ) : value)); + + ctx.samples.setServiceInfo('gateway', 'ssl', { + error, + data, + }); + + return; + } + case 'file': { + // SSL certificate + const certificatesDir = homeDir.joinPath( + config.getName(), + 'platform', + 'gateway', + 'ssl', + ); + + const chainFilePath = path.join(certificatesDir, 'bundle.crt'); + const privateFilePath = path.join(certificatesDir, 'private.key'); + + const data = { + chainFilePath, + privateFilePath, + }; + + obfuscateObjectRecursive(data, (_field, value) => (typeof value === 'string' ? value.replaceAll( + process.env.USER, + hideString(process.env.USER), + ) : value)); + + if (!fs.existsSync(chainFilePath) || !fs.existsSync(privateFilePath)) { + ctx.samples.setServiceInfo('gateway', 'ssl', { + error: 'not-exist', + data, + }); + + return; + } + + const isValid = validateSslCertificateFiles(chainFilePath, privateFilePath); + + if (!isValid) { + ctx.samples.setServiceInfo('gateway', 'ssl', { + error: 'not-valid', + data, + }); + } + + return; + } + default: + throw new Error('Unknown SSL provider'); + } + }, + }, + { + title: 'Core P2P port', + task: async () => { + const port = config.get('core.p2p.port'); + const response = await providers.mnowatch.checkPortStatus(port); + + ctx.samples.setServiceInfo('core', 'p2pPort', response); + }, + }, + { + title: 'Gateway HTTP port', + enabled: () => config.get('platform.enable'), + task: async () => { + const port = config.get('platform.gateway.listeners.dapiAndDrive.port'); + const response = await providers.mnowatch.checkPortStatus(port); + + ctx.samples.setServiceInfo('gateway', 'httpPort', response); + }, + }, + { + title: 'Tenderdash P2P port', + task: async () => { + const port = config.get('platform.drive.tenderdash.p2p.port'); + const response = await providers.mnowatch.checkPortStatus(port); + + ctx.samples.setServiceInfo('drive_tenderdash', 'p2pPort', response); + }, + }, + ]); + }, + }, + { + title: 'Core status', + task: async (ctx) => { + const rpcClient = createRpcClient({ + port: config.get('core.rpc.port'), + user: 'dashmate', + pass: config.get('core.rpc.users.dashmate.password'), + host: await getConnectionHost(config, 'core', 'core.rpc.host'), + }); + + const coreCalls = [ + rpcClient.getBestChainLock(), + rpcClient.quorum('listextended'), + rpcClient.getBlockchainInfo(), + rpcClient.getPeerInfo(), + rpcClient.mnsync('status'), + ]; + + if (config.get('core.masternode.enable')) { + coreCalls.push(rpcClient.masternode('status')); + } + + const [ + getBestChainLock, + quorums, + getBlockchainInfo, + getPeerInfo, + masternodeStatus, + masternodeSyncStatus, + ] = (await Promise.allSettled(coreCalls)) + .map((e) => e.value?.result || e.reason); + + ctx.samples.setServiceInfo('core', 'bestChainLock', getBestChainLock); + ctx.samples.setServiceInfo('core', 'quorums', quorums); + ctx.samples.setServiceInfo('core', 'blockchainInfo', getBlockchainInfo); + ctx.samples.setServiceInfo('core', 'peerInfo', getPeerInfo); + ctx.samples.setServiceInfo('core', 'masternodeStatus', masternodeStatus); + ctx.samples.setServiceInfo('core', 'masternodeSyncStatus', masternodeSyncStatus); + }, + }, + { + title: 'Tenderdash status', + enabled: () => config.get('platform.enable'), + task: async (ctx) => { + const tenderdashRPCClient = createTenderdashRpcClient({ + host: config.get('platform.drive.tenderdash.rpc.host'), + port: config.get('platform.drive.tenderdash.rpc.port'), + }); + + // Tenderdash requires to pass all params, so we use basic fetch + async function fetchValidators() { + const url = `http://${config.get('platform.drive.tenderdash.rpc.host')}:${config.get('platform.drive.tenderdash.rpc.port')}/validators?request_quorum_info=true`; + const response = await fetch(url, 'GET'); + return response.json(); + } + + const [ + status, + genesis, + peers, + abciInfo, + consensusState, + validators, + ] = await Promise.allSettled([ + tenderdashRPCClient.request('status', []), + tenderdashRPCClient.request('genesis', []), + tenderdashRPCClient.request('net_info', []), + tenderdashRPCClient.request('abci_info', []), + tenderdashRPCClient.request('dump_consensus_state', []), + fetchValidators(), + ]); + + ctx.samples.setServiceInfo('drive_tenderdash', 'status', status); + ctx.samples.setServiceInfo('drive_tenderdash', 'validators', validators); + ctx.samples.setServiceInfo('drive_tenderdash', 'genesis', genesis); + ctx.samples.setServiceInfo('drive_tenderdash', 'peers', peers); + ctx.samples.setServiceInfo('drive_tenderdash', 'abciInfo', abciInfo); + ctx.samples.setServiceInfo('drive_tenderdash', 'consensusState', consensusState); + }, + }, + { + title: 'Metrics', + enabled: () => config.get('platform.enable'), + task: async (ctx, task) => { + if (config.get('platform.drive.tenderdash.metrics.enabled')) { + // eslint-disable-next-line no-param-reassign + task.output = 'Reading Tenderdash metrics'; + + const url = `http://${config.get('platform.drive.tenderdash.rpc.host')}:${config.get('platform.drive.tenderdash.rpc.port')}/metrics`; + + const result = fetchTextOrError(url); + + ctx.samples.setServiceInfo('drive_tenderdash', 'metrics', result); + } + + if (config.get('platform.drive.abci.metrics.enabled')) { + // eslint-disable-next-line no-param-reassign + task.output = 'Reading Drive metrics'; + + const url = `http://${config.get('platform.drive.abci.rpc.host')}:${config.get('platform.drive.abci.rpc.port')}/metrics`; + + const result = fetchTextOrError(url); + + ctx.samples.setServiceInfo('drive_abci', 'metrics', result); + } + + if (config.get('platform.gateway.metrics.enabled')) { + // eslint-disable-next-line no-param-reassign + task.output = 'Reading Gateway metrics'; + + const url = `http://${config.get('platform.gateway.metrics.host')}:${config.get('platform.gateway.metrics.port')}/metrics`; + + const result = fetchTextOrError(url); + + ctx.samples.setServiceInfo('gateway', 'metrics', result); + } + }, + }, + { + title: 'Logs', + task: async (ctx, task) => { + const services = await getServiceList(config); + + // eslint-disable-next-line no-param-reassign + task.output = `Pulling logs from ${services.map((e) => e.name)}`; + + await Promise.all( + services.map(async (service) => { + const [inspect, logs] = (await Promise.allSettled([ + dockerCompose.inspectService(config, service.name), + dockerCompose.logs(config, [service.name]), + ])).map((e) => e.value || e.reason); + + if (logs?.out) { + // Hide username & external ip from logs + logs.out = logs.out.replaceAll( + process.env.USER, + hideString(process.env.USER), + ); + } + + if (logs?.err) { + logs.err = logs.err.replaceAll( + process.env.USER, + hideString(process.env.USER), + ); + } + + // Hide username & external ip from inspect + obfuscateObjectRecursive(inspect, (_field, value) => ( + typeof value === 'string' + ? value.replaceAll( + process.env.USER, + hideString(process.env.USER), + ) + : value + )); + + ctx.samples.setServiceInfo(service.name, 'stdOut', logs?.out); + ctx.samples.setServiceInfo(service.name, 'stdErr', logs?.err); + ctx.samples.setServiceInfo(service.name, 'dockerInspect', inspect); + }), + ); + }, + }, + ], + ); + } + + return collectSamplesTask; +} diff --git a/packages/dashmate/src/listr/tasks/setup/regular/configureSSLCertificateTaskFactory.js b/packages/dashmate/src/listr/tasks/setup/regular/configureSSLCertificateTaskFactory.js index ef6082771c..50b5f5f050 100644 --- a/packages/dashmate/src/listr/tasks/setup/regular/configureSSLCertificateTaskFactory.js +++ b/packages/dashmate/src/listr/tasks/setup/regular/configureSSLCertificateTaskFactory.js @@ -1,12 +1,14 @@ import fs from 'fs'; import { Listr } from 'listr2'; -import crypto from 'node:crypto'; + +import validateSslCertificateFiles from '../../../prompts/validators/validateSslCertificateFiles.js'; import { PRESET_MAINNET, SSL_PROVIDERS, NODE_TYPE_FULLNODE, } from '../../../../constants.js'; + import validateFileExists from '../../../prompts/validators/validateFileExists.js'; import listCertificates from '../../../../ssl/zerossl/listCertificates.js'; @@ -66,29 +68,7 @@ export default function configureSSLCertificateTaskFactory( return 'the same path for both files'; } - const bundlePem = fs.readFileSync(chainFilePath, 'utf8'); - const privateKeyPem = fs.readFileSync(privateFilePath, 'utf8'); - - // Step 2: Create a signature using the private key - const data = 'This is a test message'; - const sign = crypto.createSign('SHA256'); - sign.update(data); - sign.end(); - - const signature = sign.sign(privateKeyPem, 'hex'); - - // Verify the signature using the public key from the certificate - const verify = crypto.createVerify('SHA256'); - verify.update(data); - verify.end(); - - // Extract the public key from the first certificate in the bundle - const certificate = crypto.createPublicKey({ - key: bundlePem, - format: 'pem', - }); - - const isValid = verify.verify(certificate, signature, 'hex'); + const isValid = validateSslCertificateFiles(chainFilePath, privateFilePath); if (!isValid) { return 'The certificate and private key do not match'; diff --git a/packages/dashmate/src/listr/tasks/setup/regular/verifySystemRequirementsTaskFactory.js b/packages/dashmate/src/listr/tasks/setup/regular/verifySystemRequirementsTaskFactory.js index 3e142af416..7e97b44511 100644 --- a/packages/dashmate/src/listr/tasks/setup/regular/verifySystemRequirementsTaskFactory.js +++ b/packages/dashmate/src/listr/tasks/setup/regular/verifySystemRequirementsTaskFactory.js @@ -6,12 +6,14 @@ import { Listr } from 'listr2'; * @param {Docker} docker * @param {DockerCompose} dockerCompose * @param {getOperatingSystemInfo} getOperatingSystemInfo + * @param {verifySystemRequirements} verifySystemRequirements * @return {verifySystemRequirementsTask} */ export default function verifySystemRequirementsTaskFactory( docker, dockerCompose, getOperatingSystemInfo, + verifySystemRequirements, ) { /** * @typedef {function} verifySystemRequirementsTask @@ -24,72 +26,9 @@ export default function verifySystemRequirementsTaskFactory( task: async (ctx, task) => { await dockerCompose.throwErrorIfNotInstalled(); - const MINIMUM_CPU_CORES = ctx.isHP ? 4 : 2; - const MINIMUM_CPU_FREQUENCY = 2.4; // GHz - const MINIMUM_RAM = ctx.isHP ? 8 : 4; // GB - const MINIMUM_DISK_SPACE = ctx.isHP ? 200 : 100; // GB + const systemInfo = await getOperatingSystemInfo(); - const warnings = []; - - const { - dockerSystemInfo, cpu, memory, diskSpace, - } = await getOperatingSystemInfo(); - - if (dockerSystemInfo) { - if (Number.isInteger(dockerSystemInfo.NCPU)) { - // Check CPU cores - const cpuCores = dockerSystemInfo.NCPU; - - if (cpuCores < MINIMUM_CPU_CORES) { - warnings.push(`${cpuCores} CPU cores detected. At least ${MINIMUM_CPU_CORES} are required`); - } - } else { - // eslint-disable-next-line no-console - console.warn('Can\'t get NCPU from docker info'); - } - - // Check RAM - if (Number.isInteger(dockerSystemInfo.MemTotal)) { - const memoryGb = dockerSystemInfo.MemTotal / (1024 ** 3); // Convert to GB - - if (memoryGb < MINIMUM_RAM) { - warnings.push(`${memoryGb.toFixed(2)}GB RAM detected. At least ${MINIMUM_RAM}GB is required`); - } - } else { - // eslint-disable-next-line no-console - console.warn('Can\'t get MemTotal from docker info'); - } - } - - // Check CPU frequency - if (cpu) { - if (cpu.speed === 0) { - if (process.env.DEBUG) { - // eslint-disable-next-line no-console - console.warn('Can\'t get CPU frequency'); - } - } else if (cpu.speed < MINIMUM_CPU_FREQUENCY) { - warnings.push(`${cpu.speed.toFixed(1)}GHz CPU frequency detected. At least ${MINIMUM_CPU_FREQUENCY}GHz is required`); - } - } - - // Check swap information - if (memory) { - const swapTotalGb = (memory.swaptotal / (1024 ** 3)); // Convert bytes to GB - - if (swapTotalGb < 2) { - warnings.push(`Swap space is ${swapTotalGb.toFixed(2)}GB. 2GB is recommended`); - } - } - - // Get disk usage info - if (diskSpace) { - const availableDiskSpace = diskSpace.available / (1024 ** 3); // Convert to GB - - if (availableDiskSpace < MINIMUM_DISK_SPACE) { - warnings.push(`${availableDiskSpace.toFixed(2)}GB available disk space detected. At least ${MINIMUM_DISK_SPACE}GB is required`); - } - } + const problems = verifySystemRequirements(systemInfo, ctx.isHP); let message = ''; if (ctx.isHP) { @@ -106,12 +45,13 @@ export default function verifySystemRequirementsTaskFactory( Upgrading system resources is recommended before proceeding.`; } - if (warnings.length > 0) { - const warningsText = warnings.map((warning) => ` - ${warning}`).join('\n'); + if (problems.length > 0) { + const problemsText = problems + .map((p) => ` - ${p.getDescription()}`).join('\n'); const header = chalk` Minimum requirements have not been met: -{red ${warningsText}} +{red ${problemsText}} ${message}\n`; diff --git a/packages/dashmate/src/listr/tasks/setup/setupLocalPresetTaskFactory.js b/packages/dashmate/src/listr/tasks/setup/setupLocalPresetTaskFactory.js index 5338eb4178..242f65582a 100644 --- a/packages/dashmate/src/listr/tasks/setup/setupLocalPresetTaskFactory.js +++ b/packages/dashmate/src/listr/tasks/setup/setupLocalPresetTaskFactory.js @@ -139,11 +139,8 @@ export default function setupLocalPresetTaskFactory( config.set('docker.network.subnet', subnet.join('.')); // Setup Core debug logs - const coreLogFilePath = homeDir.joinPath('logs', config.getName(), 'core.log'); - config.set('core.log.file.path', coreLogFilePath); - if (ctx.debugLogs) { - config.set('core.log.file.categories', ['all']); + config.set('core.log.debug.enabled', true); } // Although not all nodes are miners, all nodes should be aware of diff --git a/packages/dashmate/src/listr/tasks/ssl/zerossl/obtainZeroSSLCertificateTaskFactory.js b/packages/dashmate/src/listr/tasks/ssl/zerossl/obtainZeroSSLCertificateTaskFactory.js index 12718ad4de..86bb3db5ca 100644 --- a/packages/dashmate/src/listr/tasks/ssl/zerossl/obtainZeroSSLCertificateTaskFactory.js +++ b/packages/dashmate/src/listr/tasks/ssl/zerossl/obtainZeroSSLCertificateTaskFactory.js @@ -1,9 +1,10 @@ import { Listr } from 'listr2'; import chalk from 'chalk'; -import path from 'path'; import fs from 'fs'; +import lodash from 'lodash'; import wait from '../../../../util/wait.js'; +import { ERRORS } from '../../../../ssl/zerossl/validateZeroSslCertificateFactory.js'; /** * @param {generateCsr} generateCsr @@ -16,6 +17,7 @@ import wait from '../../../../util/wait.js'; * @param {saveCertificateTask} saveCertificateTask * @param {VerificationServer} verificationServer * @param {HomeDir} homeDir + * @param {validateZeroSslCertificate} validateZeroSslCertificate * @return {obtainZeroSSLCertificateTask} */ export default function obtainZeroSSLCertificateTaskFactory( @@ -29,6 +31,7 @@ export default function obtainZeroSSLCertificateTaskFactory( saveCertificateTask, verificationServer, homeDir, + validateZeroSslCertificate, ) { /** * @typedef {obtainZeroSSLCertificateTask} @@ -36,124 +39,69 @@ export default function obtainZeroSSLCertificateTaskFactory( * @return {Promise} */ async function obtainZeroSSLCertificateTask(config) { - // Make sure that required config options are set - const apiKey = config.get('platform.gateway.ssl.providerConfigs.zerossl.apiKey', true); - const externalIp = config.get('externalIp', true); - - const sslConfigDir = homeDir.joinPath(config.getName(), 'platform', 'gateway', 'ssl'); - const csrFilePath = path.join(sslConfigDir, 'csr.pem'); - const privateKeyFilePath = path.join(sslConfigDir, 'private.key'); - const bundleFilePath = path.join(sslConfigDir, 'bundle.crt'); - - // Ensure we have config dir created - fs.mkdirSync(sslConfigDir, { recursive: true }); - return new Listr([ { title: 'Check if certificate already exists and not expiring soon', // Skips the check if force flag is set skip: (ctx) => ctx.force, task: async (ctx, task) => { - const certificateId = await config.get('platform.gateway.ssl.providerConfigs.zerossl.id'); + const { error, data } = await validateZeroSslCertificate(config, ctx.expirationDays); - if (!certificateId) { - // Certificate is not configured - - // eslint-disable-next-line no-param-reassign - task.output = 'Certificate is not configured yet, creating a new one'; - - return; - } + lodash.merge(ctx, data); - // Certificate is already configured - - // Check if certificate files are present - ctx.isCrtFilePresent = fs.existsSync(csrFilePath); - - ctx.isPrivateKeyFilePresent = fs.existsSync(privateKeyFilePath); - - ctx.isBundleFilePresent = fs.existsSync(bundleFilePath); - - // This function will throw an error if certificate with specified ID is not present - const certificate = await getCertificate(apiKey, certificateId); - - // If certificate exists but private key does not, then we can't setup TLS connection - // In this case we need to regenerate certificate or put back this private key - if (!ctx.isPrivateKeyFilePresent) { - throw new Error(`Certificate private key file not found in ${privateKeyFilePath}.\n` - + 'Please regenerate the certificate using the the obtain' - + ' command with the --force flag, and revoke the previous certificate in' - + ' the ZeroSSL dashboard'); - } - - // We need to make sure that external IP and certificate IP match - if (certificate.common_name !== externalIp) { - throw new Error(`Certificate IPe ${certificate.common_name} does not match external IP ${externalIp}.\n` - + 'Please change the external IP in config or regenerate the certificate ' - + ' using the obtain command with the --force flag, and revoke the previous' - + ' certificate in the ZeroSSL dashboard'); - } - - if (!certificate.isExpiredInDays(ctx.expirationDays)) { - // Certificate is not going to expire soon - - if (certificate.status === 'issued') { - // Certificate is valid, so we might need only to download certificate bundle - ctx.certificate = certificate; + // Ensure we have config dir created + fs.mkdirSync(ctx.sslConfigDir, { recursive: true }); + switch (error) { + case undefined: // eslint-disable-next-line no-param-reassign - task.output = `Certificate is valid and expires at ${certificate.expires}`; - } else if (['pending_validation', 'draft'].includes(certificate.status)) { - // Certificate is already created, so we just need to pass validation - // and download certificate file - ctx.certificate = certificate; - - // We need to download new certificate bundle - ctx.isBundleFilePresent = false; - - // eslint-disable-next-line no-param-reassign - task.output = 'Certificate was already created, but not validated yet.'; - } else { - // Certificate is not valid, so we need to re-create it - - // We need to download certificate bundle - ctx.isBundleFilePresent = false; - - if (!ctx.isCrtFilePresent) { - throw new Error(`Certificate request file not found in ${csrFilePath}.\n` - + 'To create a new certificate, please use the obtain' - + ' command with the --force flag and revoke the previous certificate' - + ' in the ZeroSSL dashboard'); - } - - ctx.csr = fs.readFileSync(csrFilePath, 'utf8'); - + task.output = `Certificate is valid and expires at ${ctx.certificate.expires}`; + break; + case ERRORS.API_KEY_IS_NOT_SET: + throw new Error('ZeroSSL API key is not set. Please set it in the config file'); + case ERRORS.EXTERNAL_IP_IS_NOT_SET: + throw new Error('External IP is not set. Please set it in the config file'); + case ERRORS.CERTIFICATE_ID_IS_NOT_SET: // eslint-disable-next-line no-param-reassign - task.output = 'Certificate is not valid. Create a new one'; - } - } else { - // Certificate is going to expire soon, we need to obtain a new one - - // We need to download new certificate bundle - ctx.isBundleFilePresent = false; - - if (!ctx.isCrtFilePresent) { - throw new Error(`Certificate request file not found in ${csrFilePath}.\n` + task.output = 'Certificate is not configured yet, creating a new one'; + break; + case ERRORS.PRIVATE_KEY_IS_NOT_PRESENT: + // If certificate exists but private key does not, then we can't set up TLS connection + // In this case we need to regenerate certificate or put back this private key + throw new Error(`Certificate private key file not found in ${ctx.privateKeyFilePath}.\n` + + 'Please regenerate the certificate using the obtain' + + ' command with the --force flag and revoke the previous certificate in' + + ' the ZeroSSL dashboard'); + case ERRORS.EXTERNAL_IP_MISMATCH: + throw new Error(`Certificate IPe ${ctx.certificate.common_name} does not match external IP ${ctx.externalIp}.\n` + + 'Please change the external IP in config. Otherwise, regenerate the certificate ' + + ' using the obtain command with the --force flag and revoke the previous' + + ' certificate in the ZeroSSL dashboard'); + case ERRORS.CSR_FILE_IS_NOT_PRESENT: + throw new Error(`Certificate request file not found in ${ctx.csrFilePath}.\n` + 'To renew certificate please use the obtain' + ' command with the --force flag, and revoke the previous certificate in' + ' the ZeroSSL dashboard'); - } - - ctx.csr = fs.readFileSync(csrFilePath, 'utf8'); - - // eslint-disable-next-line no-param-reassign - task.output = `Certificate exists but expires in less than ${ctx.expirationDays} days at ${certificate.expires}. Obtain a new one`; + case ERRORS.CERTIFICATE_EXPIRES_SOON: + // eslint-disable-next-line no-param-reassign + task.output = `Certificate exists but expires in less than ${ctx.expirationDays} days at ${ctx.certificate.expires}. Obtain a new one`; + break; + case ERRORS.CERTIFICATE_IS_NOT_VALIDATED: + // eslint-disable-next-line no-param-reassign + task.output = 'Certificate was already created, but has not been validated yet.'; + break; + case ERRORS.CERTIFICATE_IS_NOT_VALID: + // eslint-disable-next-line no-param-reassign + task.output = 'Certificate is not valid. Create a new one'; + break; + default: + throw new Error(`Unknown error: ${error}`); } }, }, { title: 'Generate a keypair', - enabled: (ctx) => !ctx.isCrtFilePresent, + enabled: (ctx) => !ctx.isCsrFilePresent, task: async (ctx) => { ctx.keyPair = await generateKeyPair(); ctx.privateKeyFile = ctx.keyPair.privateKey; @@ -161,11 +109,11 @@ export default function obtainZeroSSLCertificateTaskFactory( }, { title: 'Generate certificate request', - enabled: (ctx) => !ctx.isCrtFilePresent, + enabled: (ctx) => !ctx.isCsrFilePresent, task: async (ctx) => { ctx.csr = await generateCsr( ctx.keyPair, - externalIp, + ctx.externalIp, ); }, }, @@ -175,8 +123,8 @@ export default function obtainZeroSSLCertificateTaskFactory( task: async (ctx) => { ctx.certificate = await createZeroSSLCertificate( ctx.csr, - externalIp, - apiKey, + ctx.externalIp, + ctx.apiKey, ); config.set('platform.gateway.ssl.enabled', true); @@ -188,7 +136,7 @@ export default function obtainZeroSSLCertificateTaskFactory( title: 'Set up verification server', skip: (ctx) => ctx.certificate && !['pending_validation', 'draft'].includes(ctx.certificate.status), task: async (ctx) => { - const validationResponse = ctx.certificate.validation.other_methods[externalIp]; + const validationResponse = ctx.certificate.validation.other_methods[ctx.externalIp]; await verificationServer.setup( config, @@ -209,14 +157,14 @@ export default function obtainZeroSSLCertificateTaskFactory( let retry; do { try { - await verifyDomain(ctx.certificate.id, apiKey); + await verifyDomain(ctx.certificate.id, ctx.apiKey); } catch (e) { if (ctx.noRetry !== true) { retry = await task.prompt({ type: 'toggle', header: chalk` An error occurred during verification: {red ${e.message}} - Please ensure that port 80 on your public IP address ${externalIp} is open + Please ensure that port 80 on your public IP address ${ctx.externalIp} is open for incoming HTTP connections. You may need to configure your firewall to ensure this port is accessible from the public internet. If you are using Network Address Translation (NAT), please enable port forwarding for port 80 @@ -245,7 +193,7 @@ export default function obtainZeroSSLCertificateTaskFactory( try { ctx.certificateFile = await downloadCertificate( ctx.certificate.id, - apiKey, + ctx.apiKey, ); // eslint-disable-next-line no-param-reassign @@ -271,30 +219,30 @@ export default function obtainZeroSSLCertificateTaskFactory( title: 'Save certificate private key file', enabled: (ctx) => !ctx.isPrivateKeyFilePresent, task: async (ctx, task) => { - fs.writeFileSync(privateKeyFilePath, ctx.privateKeyFile, 'utf8'); + fs.writeFileSync(ctx.privateKeyFilePath, ctx.privateKeyFile, 'utf8'); // eslint-disable-next-line no-param-reassign - task.output = privateKeyFilePath; + task.output = ctx.privateKeyFilePath; }, }, { title: 'Save certificate request file', - enabled: (ctx) => !ctx.isCrtFilePresent, + enabled: (ctx) => !ctx.isCsrFilePresent, task: async (ctx, task) => { - fs.writeFileSync(csrFilePath, ctx.csr, 'utf8'); + fs.writeFileSync(ctx.csrFilePath, ctx.csr, 'utf8'); // eslint-disable-next-line no-param-reassign - task.output = csrFilePath; + task.output = ctx.csrFilePath; }, }, { title: 'Save certificate file', skip: (ctx) => ctx.isBundleFilePresent, task: async (ctx, task) => { - fs.writeFileSync(bundleFilePath, ctx.certificateFile, 'utf8'); + fs.writeFileSync(ctx.bundleFilePath, ctx.certificateFile, 'utf8'); // eslint-disable-next-line no-param-reassign - task.output = bundleFilePath; + task.output = ctx.bundleFilePath; }, }, { diff --git a/packages/dashmate/src/listr/tasks/startNodeTaskFactory.js b/packages/dashmate/src/listr/tasks/startNodeTaskFactory.js index b364bcf224..06bff9e7ed 100644 --- a/packages/dashmate/src/listr/tasks/startNodeTaskFactory.js +++ b/packages/dashmate/src/listr/tasks/startNodeTaskFactory.js @@ -35,8 +35,10 @@ export default function startNodeTaskFactory( throw new Error(`'core.miner.enable' option only works with local network. Your network is ${config.get('network')}.`); } - const coreLogFilePath = config.get('core.log.file.path'); - ensureFileMountExists(coreLogFilePath, 0o666); + const coreLogFilePath = config.get('core.log.filePath'); + if (coreLogFilePath !== null) { + ensureFileMountExists(coreLogFilePath, 0o666); + } // Check Drive log files are created if (config.get('platform.enable')) { diff --git a/packages/dashmate/src/ssl/zerossl/validateZeroSslCertificateFactory.js b/packages/dashmate/src/ssl/zerossl/validateZeroSslCertificateFactory.js new file mode 100644 index 0000000000..f4d9956a9d --- /dev/null +++ b/packages/dashmate/src/ssl/zerossl/validateZeroSslCertificateFactory.js @@ -0,0 +1,139 @@ +import fs from 'fs'; +import path from 'path'; + +export const ERRORS = { + API_KEY_IS_NOT_SET: 'API_KEY_IS_NOT_SET', + EXTERNAL_IP_IS_NOT_SET: 'EXTERNAL_IP_IS_NOT_SET', + CERTIFICATE_ID_IS_NOT_SET: 'CERTIFICATE_ID_IS_NOT_SET', + PRIVATE_KEY_IS_NOT_PRESENT: 'PRIVATE_KEY_IS_NOT_PRESENT', + EXTERNAL_IP_MISMATCH: 'EXTERNAL_IP_MISMATCH', + CSR_FILE_IS_NOT_PRESENT: 'CSR_FILE_IS_NOT_PRESENT', + CERTIFICATE_EXPIRES_SOON: 'CERTIFICATE_EXPIRES_SOON', + CERTIFICATE_IS_NOT_VALIDATED: 'CERTIFICATE_IS_NOT_VALIDATED', + CERTIFICATE_IS_NOT_VALID: 'CERTIFICATE_IS_NOT_VALID', +}; + +/** + * @param {HomeDir} homeDir + * @param {getCertificate} getCertificate + * @return {validateZeroSslCertificate} + */ +export default function validateZeroSslCertificateFactory(homeDir, getCertificate) { + /** + * @typedef {validateZeroSslCertificate} + * @param {Config} config + * @param {number} expirationDays + * @return {Promise<{ [error: String], [data: Object] }>} + */ + async function validateZeroSslCertificate(config, expirationDays) { + const data = {}; + + data.sslConfigDir = homeDir.joinPath(config.getName(), 'platform', 'gateway', 'ssl'); + data.csrFilePath = path.join(data.sslConfigDir, 'csr.pem'); + data.privateKeyFilePath = path.join(data.sslConfigDir, 'private.key'); + data.bundleFilePath = path.join(data.sslConfigDir, 'bundle.crt'); + + data.apiKey = config.get('platform.gateway.ssl.providerConfigs.zerossl.apiKey'); + + if (!data.apiKey) { + return { + error: ERRORS.API_KEY_IS_NOT_SET, + data, + }; + } + + data.externalIp = config.get('externalIp'); + + if (!data.externalIp) { + return { + error: ERRORS.EXTERNAL_IP_IS_NOT_SET, + data, + }; + } + + const certificateId = config.get('platform.gateway.ssl.providerConfigs.zerossl.id'); + + if (!certificateId) { + return { + error: ERRORS.CERTIFICATE_ID_IS_NOT_SET, + data, + }; + } + + // Certificate is already configured + + // Check if certificate files are present + data.isCsrFilePresent = fs.existsSync(data.csrFilePath); + data.isPrivateKeyFilePresent = fs.existsSync(data.privateKeyFilePath); + data.isBundleFilePresent = fs.existsSync(data.bundleFilePath); + + // This function will throw an error if certificate with specified ID is not present + const certificate = await getCertificate(data.apiKey, certificateId); + + data.isExpiresSoon = certificate.isExpiredInDays(expirationDays); + + // If certificate exists but private key does not, then we can't setup TLS connection + // In this case we need to regenerate a certificate or put back this private key + if (!data.isPrivateKeyFilePresent) { + return { + error: ERRORS.PRIVATE_KEY_IS_NOT_PRESENT, + data, + }; + } + + // We need to make sure that external IP and certificate IP match + if (certificate.common_name !== data.externalIp) { + return { + error: ERRORS.EXTERNAL_IP_MISMATCH, + data, + }; + } + + if (['pending_validation', 'draft'].includes(certificate.status)) { + // Certificate is already created, so we just need to pass validation + // and download certificate file + data.certificate = certificate; + + // We need to download new certificate bundle + data.isBundleFilePresent = false; + + return { + error: ERRORS.CERTIFICATE_IS_NOT_VALIDATED, + data, + }; + } + + if (certificate.status !== 'issued' || data.isExpiresSoon) { + // Certificate is going to expire soon, or current certificate is not valid + // we need to obtain a new one + + // We need to download new certificate bundle + data.isBundleFilePresent = false; + + if (!data.isCsrFilePresent) { + return { + error: ERRORS.CSR_FILE_IS_NOT_PRESENT, + data, + }; + } + + data.csr = fs.readFileSync(data.csrFilePath, 'utf8'); + + return { + error: data.isExpiresSoon + ? ERRORS.CERTIFICATE_EXPIRES_SOON + : ERRORS.CERTIFICATE_IS_NOT_VALID, + data, + }; + } + + // Certificate is valid, so we might need only to download certificate bundle + data.certificate = certificate; + + return { + data, + }; + } + + return validateZeroSslCertificate; +} diff --git a/packages/dashmate/src/test/mock/getConfigMock.js b/packages/dashmate/src/test/mock/getConfigMock.js index 1961c48f28..641a1b8720 100644 --- a/packages/dashmate/src/test/mock/getConfigMock.js +++ b/packages/dashmate/src/test/mock/getConfigMock.js @@ -17,7 +17,6 @@ export default function getConfigMock(sinon) { configMock.get.withArgs('platform.drive.tenderdash.rpc.host').returns('127.0.0.1'); configMock.get.withArgs('platform.drive.tenderdash.rpc.port').returns('8103'); configMock.get.withArgs('platform.enable').returns(true); - configMock.get.withArgs('core.log.file.path').returns('/Users/user/.dashmate/logs/base/core.log'); return configMock; } diff --git a/packages/dashmate/templates/core/dash.conf.dot b/packages/dashmate/templates/core/dash.conf.dot index 807431f984..790575a9fb 100644 --- a/packages/dashmate/templates/core/dash.conf.dot +++ b/packages/dashmate/templates/core/dash.conf.dot @@ -3,14 +3,30 @@ daemon=0 # leave this set to 0 for Docker logtimestamps=1 maxconnections=256 reindex={{?it.reindex}}1{{??}}0{{?}} -{{~it.core.log.file.categories :category}} -{{? category === 'all'}} + +{{? it.core.log.debug.enabled }} +logips={{? it.core.log.debug.ips }}1{{??}}0{{?}} +logsourcelocations={{? it.core.log.debug.sourceLocations }}1{{??}}0{{?}} +logthreadnames={{? it.core.log.debug.threadNames }}1{{??}}0{{?}} +logtimemicros={{? it.core.log.debug.timeMicros }}1{{??}}0{{?}} + + {{? it.core.log.debug.includeOnly.length === 0 }} debug=1 + {{??}} + {{~it.core.log.debug.includeOnly :category}} +debug={{= category }} + {{~}} + {{?}} + {{~it.core.log.debug.exclude :category}} +debugexclude={{= category}} + {{~}} +{{?}} + +{{? it.core.log.filePath === null }} +nodebuglogfile=1 {{??}} -debug={{=category}}{{?}}{{~}} -debuglogfile=/var/log/dash/{{= it.core.log.file.path.split('/').pop() }} -logips={{=it.core.logIps }} -fallbackfee=0.00001 +debuglogfile=/var/log/dash/debug.log }} +{{?}} # JSON RPC server=1 diff --git a/packages/dashmate/templates/dynamic-compose.yml.dot b/packages/dashmate/templates/dynamic-compose.yml.dot index fbe122735e..6ed120930f 100644 --- a/packages/dashmate/templates/dynamic-compose.yml.dot +++ b/packages/dashmate/templates/dynamic-compose.yml.dot @@ -7,6 +7,10 @@ services: {{~ it.core.docker.commandArgs :arg }} - {{=arg}} {{~}} + {{? it.core.log.filePath !== null }} + volumes: + - {{=it.core.log.filePath}}:/var/log/dash/debug.log + {{?}} {{ driveLogs = Object.entries(it.platform.drive.abci.logs).filter(([, settings]) => settings.destination !== 'stderr' && settings.destination !== 'stdout'); }} {{? driveLogs.length > 0 }} @@ -18,6 +22,12 @@ services: {{~}} {{?}} + {{? it.platform.drive.tenderdash.log.path !== null }} + drive_tenderdash: + volumes: + - {{=it.platform.drive.tenderdash.log.path}}:/var/log/tenderdash/tenderdash.log + {{?}} + {{ gatewayLogs = it.platform.gateway.log.accessLogs.filter((l) => l.type === 'file'); }} {{? gatewayLogs.length > 0 }} gateway: diff --git a/packages/dashmate/templates/platform/drive/tenderdash/config.toml.dot b/packages/dashmate/templates/platform/drive/tenderdash/config.toml.dot index 9caf3e128b..a818a35640 100644 --- a/packages/dashmate/templates/platform/drive/tenderdash/config.toml.dot +++ b/packages/dashmate/templates/platform/drive/tenderdash/config.toml.dot @@ -54,7 +54,7 @@ db-dir = "data" log-level = "{{=it.platform.drive.tenderdash.log.level}}" # Path to the log file. This parameter is an additional option to the existing stderr output -{{? it.platform.drive.tenderdash.log.path }}log-file-path = "/var/log/tenderdash/{{= it.platform.drive.tenderdash.log.path.split('/').pop() }}"{{?}} +{{? it.platform.drive.tenderdash.log.path }}log-file-path = "/var/log/tenderdash/tenderdash.log"{{?}} # Output format: 'plain' (colored text) or 'json' log-format = "{{=it.platform.drive.tenderdash.log.format}}" diff --git a/packages/dashmate/test/unit/doctor/verifySystemRequirementsFactory.spec.js b/packages/dashmate/test/unit/doctor/verifySystemRequirementsFactory.spec.js new file mode 100644 index 0000000000..38590b3fbd --- /dev/null +++ b/packages/dashmate/test/unit/doctor/verifySystemRequirementsFactory.spec.js @@ -0,0 +1,147 @@ +import verifySystemRequirementsFactory from '../../../src/doctor/verifySystemRequirementsFactory.js'; +import Problem from '../../../src/doctor/Problem.js'; + +describe('verifySystemRequirementsFactory', () => { + let verifySystemRequirements; + + beforeEach(() => { + verifySystemRequirements = verifySystemRequirementsFactory(); + }); + + describe('CPU cores', () => { + it('should return a problem if CPU cores are less than minimum for non evonode', () => { + const systemInfo = { + dockerSystemInfo: { NCPU: 1 }, + }; + + const problems = verifySystemRequirements(systemInfo, false); + + expect(problems).to.have.lengthOf(1); + expect(problems[0]).to.be.an.instanceOf(Problem); + expect(problems[0].getDescription()).to.include('1 CPU cores detected'); + }); + + it('should return a problem if CPU cores are less than minimum for evonode', () => { + const systemInfo = { + dockerSystemInfo: { NCPU: 2 }, + }; + + const problems = verifySystemRequirements(systemInfo, true); + + expect(problems).to.have.lengthOf(1); + expect(problems[0]).to.be.an.instanceOf(Problem); + expect(problems[0].getDescription()).to.include('2 CPU cores detected'); + }); + + it('should return a problem if CPU cores are less than minimum and docker info is not present', () => { + const systemInfo = { + cpu: { cores: 1 }, + }; + + const problems = verifySystemRequirements(systemInfo, false); + + expect(problems).to.have.lengthOf(1); + expect(problems[0]).to.be.an.instanceOf(Problem); + expect(problems[0].getDescription()).to.include('1 CPU cores detected'); + }); + + it('should not return anything if CPU cores information is not available', () => { + const systemInfo = { }; + + const problems = verifySystemRequirements(systemInfo, false); + + expect(problems).to.have.lengthOf(0); + }); + }); + + describe('CPU speed', () => { + it('should return a problem if CPU speed is less than minimum', () => { + const systemInfo = { + cpu: { speed: 1.5 }, + }; + + const problems = verifySystemRequirements(systemInfo, false); + + expect(problems).to.have.lengthOf(1); + expect(problems[0]).to.be.an.instanceOf(Problem); + expect(problems[0].getDescription()).to.include('1.5GHz CPU frequency detected'); + }); + + it('should return a problem if CPU speed is not detected', () => { + const systemInfo = { + cpu: { speed: 0 }, + }; + + const problems = verifySystemRequirements(systemInfo, false); + + expect(problems).to.have.lengthOf(0); + }); + }); + + describe('RAM', () => { + it('should return a problem if RAM is less than minimum (from Docker info)', () => { + const systemInfo = { + dockerSystemInfo: { MemTotal: 2 * 1024 ** 3 }, + }; + + const problems = verifySystemRequirements(systemInfo, false); + + expect(problems).to.have.lengthOf(1); + expect(problems[0]).to.be.an.instanceOf(Problem); + expect(problems[0].getDescription()).to.include('2.00GB RAM detected'); + }); + + it('should return a problem if RAM is less than minimum (from memory)', () => { + const systemInfo = { + memory: { total: 2 * 1024 ** 3 }, + }; + + const problems = verifySystemRequirements(systemInfo, false); + + expect(problems).to.have.lengthOf(1); + expect(problems[0]).to.be.an.instanceOf(Problem); + expect(problems[0].getDescription()).to.include('2.00GB RAM detected'); + }); + }); + + describe('Swap', () => { + it('should return a problem if swap space is less than recommended', () => { + const systemInfo = { + memory: { swaptotal: 1024 ** 3 }, + }; + + const problems = verifySystemRequirements(systemInfo, false); + + expect(problems).to.have.lengthOf(1); + expect(problems[0]).to.be.an.instanceOf(Problem); + expect(problems[0].getDescription()).to.include('Swap space is 1.00GB'); + }); + }); + + describe('Disk space', () => { + it('should return a problem if disk space is less than minimum', () => { + const systemInfo = { + diskSpace: { available: 50 * 1024 ** 3 }, + }; + + const problems = verifySystemRequirements(systemInfo, false); + + expect(problems).to.have.lengthOf(1); + expect(problems[0]).to.be.an.instanceOf(Problem); + expect(problems[0].getDescription()).to.include('50.00GB of available disk space detected'); + }); + }); + + it('should not return any problems if all requirements are met', () => { + const systemInfo = { + dockerSystemInfo: { NCPU: 4, MemTotal: 8 * 1024 ** 3 }, + cpu: { cores: 4, speed: 3.0 }, + memory: { total: 8 * 1024 ** 3, swaptotal: 2 * 1024 ** 3 }, + diskSpace: { available: 500 * 1024 ** 3 }, + }; + + const problems = verifySystemRequirements(systemInfo, false); + + expect(problems).to.have.lengthOf(0); + }); +}); diff --git a/packages/dashmate/test/unit/ssl/zerossl/validateZeroSslCertificateFactory.spec.js b/packages/dashmate/test/unit/ssl/zerossl/validateZeroSslCertificateFactory.spec.js new file mode 100644 index 0000000000..44c94a2f18 --- /dev/null +++ b/packages/dashmate/test/unit/ssl/zerossl/validateZeroSslCertificateFactory.spec.js @@ -0,0 +1,192 @@ +import fs from 'fs'; +import path from 'path'; +import validateZeroSslCertificateFactory, { ERRORS } from '../../../../src/ssl/zerossl/validateZeroSslCertificateFactory.js'; + +describe('validateZeroSslCertificateFactory', () => { + let config; + let expirationDays; + let homeDir; + let getCertificate; + let validateZeroSslCertificate; + + beforeEach(function beforeEach() { + config = { + get: this.sinon.stub(), + getName: this.sinon.stub(), + }; + + expirationDays = 30; + + homeDir = { + joinPath: this.sinon.stub(), + }; + + getCertificate = this.sinon.stub(); + + config.getName.returns('my-config'); + + homeDir.joinPath.callsFake((...args) => path.join('/home/dir', ...args)); + + config.get.withArgs('platform.gateway.ssl.providerConfigs.zerossl.apiKey').returns('test-api-key'); + config.get.withArgs('externalIp').returns('1.2.3.4'); + config.get.withArgs('platform.gateway.ssl.providerConfigs.zerossl.id').returns('certificate-id'); + + this.sinon.stub(fs, 'existsSync').returns(true); + + validateZeroSslCertificate = validateZeroSslCertificateFactory(homeDir, getCertificate); + }); + + it('should return API_KEY_IS_NOT_SET error when apiKey is not set', async () => { + config.get.withArgs('platform.gateway.ssl.providerConfigs.zerossl.apiKey').returns(null); + + const result = await validateZeroSslCertificate(config, expirationDays); + + expect(result.error).to.equal(ERRORS.API_KEY_IS_NOT_SET); + }); + + it('should return EXTERNAL_IP_IS_NOT_SET error when externalIp is not set', async () => { + config.get.withArgs('externalIp').returns(null); + + const result = await validateZeroSslCertificate(config, expirationDays); + + expect(result.error).to.equal(ERRORS.EXTERNAL_IP_IS_NOT_SET); + }); + + it('should return CERTIFICATE_ID_IS_NOT_SET error when certificateId is not set', async () => { + config.get.withArgs('platform.gateway.ssl.providerConfigs.zerossl.id').returns(null); + + const result = await validateZeroSslCertificate(config, expirationDays); + + expect(result.error).to.equal(ERRORS.CERTIFICATE_ID_IS_NOT_SET); + }); + + it('should return PRIVATE_KEY_IS_NOT_PRESENT error when private key file is not present', async function it() { + const sslConfigDir = path.join('/home/dir', 'my-config', 'platform', 'gateway', 'ssl'); + const privateKeyFilePath = path.join(sslConfigDir, 'private.key'); + + fs.existsSync.withArgs(privateKeyFilePath).returns(false); + + getCertificate.resolves({ + common_name: '1.2.3.4', + status: 'issued', + isExpiredInDays: this.sinon.stub().returns(false), + }); + + const result = await validateZeroSslCertificate(config, expirationDays); + + expect(result.error).to.equal(ERRORS.PRIVATE_KEY_IS_NOT_PRESENT); + }); + + it('should return EXTERNAL_IP_MISMATCH error when certificate common_name does not match externalIp', async function it() { + const certificate = { + common_name: '5.6.7.8', + status: 'issued', + isExpiredInDays: this.sinon.stub().returns(false), + }; + + getCertificate.resolves(certificate); + + const result = await validateZeroSslCertificate(config, expirationDays); + + expect(result.error).to.equal(ERRORS.EXTERNAL_IP_MISMATCH); + }); + + it('should return CERTIFICATE_IS_NOT_VALIDATED error when certificate status is pending_validation', async function it() { + const certificate = { + common_name: '1.2.3.4', + status: 'pending_validation', + isExpiredInDays: this.sinon.stub().returns(false), + }; + + getCertificate.resolves(certificate); + + const result = await validateZeroSslCertificate(config, expirationDays); + + expect(result.error).to.equal(ERRORS.CERTIFICATE_IS_NOT_VALIDATED); + expect(result.data.certificate).to.equal(certificate); + }); + + it('should return CERTIFICATE_IS_NOT_VALIDATED error when certificate status is draft', async function it() { + const certificate = { + common_name: '1.2.3.4', + status: 'draft', + isExpiredInDays: this.sinon.stub().returns(false), + }; + + getCertificate.resolves(certificate); + + const result = await validateZeroSslCertificate(config, expirationDays); + + expect(result.error).to.equal(ERRORS.CERTIFICATE_IS_NOT_VALIDATED); + expect(result.data.certificate).to.equal(certificate); + }); + + it('should return CSR_FILE_IS_NOT_PRESENT error when certificate is not issued and csr file is not present', async function it() { + const certificate = { + common_name: '1.2.3.4', + status: 'revoked', + isExpiredInDays: this.sinon.stub().returns(false), + }; + + getCertificate.resolves(certificate); + + const sslConfigDir = path.join('/home/dir', 'my-config', 'platform', 'gateway', 'ssl'); + const csrFilePath = path.join(sslConfigDir, 'csr.pem'); + + fs.existsSync.withArgs(csrFilePath).returns(false); + + const result = await validateZeroSslCertificate(config, expirationDays); + + expect(result.error).to.equal(ERRORS.CSR_FILE_IS_NOT_PRESENT); + }); + + it('should return CERTIFICATE_IS_NOT_VALID error when certificate is not issued and csr file is present', async function it() { + const certificate = { + common_name: '1.2.3.4', + status: 'revoked', + isExpiredInDays: this.sinon.stub().returns(false), + }; + + getCertificate.resolves(certificate); + + this.sinon.stub(fs, 'readFileSync').returns('csr content'); + + const result = await validateZeroSslCertificate(config, expirationDays); + + expect(result.error).to.equal(ERRORS.CERTIFICATE_IS_NOT_VALID); + expect(result.data.csr).to.equal('csr content'); + }); + + it('should return CERTIFICATE_EXPIRES_SOON error when certificate is expiring soon and csr file is present', async function it() { + const certificate = { + common_name: '1.2.3.4', + status: 'issued', + isExpiredInDays: this.sinon.stub().returns(true), + }; + + getCertificate.resolves(certificate); + + this.sinon.stub(fs, 'readFileSync').returns('csr content'); + + const result = await validateZeroSslCertificate(config, expirationDays); + + expect(result.error).to.equal(ERRORS.CERTIFICATE_EXPIRES_SOON); + expect(result.data.csr).to.equal('csr content'); + }); + + it('should return data when certificate is valid and not expiring soon', async function it() { + const certificate = { + common_name: '1.2.3.4', + status: 'issued', + isExpiredInDays: this.sinon.stub().returns(false), + }; + + getCertificate.resolves(certificate); + + const result = await validateZeroSslCertificate(config, expirationDays); + + expect(result.error).to.be.undefined(); + expect(result.data).to.exist(); + expect(result.data.certificate).to.equal(certificate); + }); +}); diff --git a/packages/js-dapi-client/lib/methods/platform/PlatformMethodsFacade.js b/packages/js-dapi-client/lib/methods/platform/PlatformMethodsFacade.js index 035b7afbfc..8de255abde 100644 --- a/packages/js-dapi-client/lib/methods/platform/PlatformMethodsFacade.js +++ b/packages/js-dapi-client/lib/methods/platform/PlatformMethodsFacade.js @@ -15,6 +15,7 @@ const getIdentityNonceFactory = require('./getIdentityNonce/getIdentityNonceFact const getIdentityKeysFactory = require('./getIdentityKeys/getIdentityKeysFactory'); const getTotalCreditsInPlatformFactory = require('./getTotalCreditsInPlatform/getTotalCreditsInPlatformFactory'); const getStatusFactory = require('./getStatus/getStatusFactory'); +const getIdentityBalanceFactory = require('./getIdentityBalance/getIdentityBalanceFactory'); class PlatformMethodsFacade { /** @@ -40,6 +41,7 @@ class PlatformMethodsFacade { this.getIdentityKeys = getIdentityKeysFactory(grpcTransport); this.getTotalCreditsInPlatform = getTotalCreditsInPlatformFactory(grpcTransport); this.getStatus = getStatusFactory(grpcTransport); + this.getIdentityBalance = getIdentityBalanceFactory(grpcTransport); } } diff --git a/packages/js-dapi-client/lib/methods/platform/getEpochsInfo/getEpochsInfoFactory.js b/packages/js-dapi-client/lib/methods/platform/getEpochsInfo/getEpochsInfoFactory.js index 9ae984dbc9..9982c6f9e6 100644 --- a/packages/js-dapi-client/lib/methods/platform/getEpochsInfo/getEpochsInfoFactory.js +++ b/packages/js-dapi-client/lib/methods/platform/getEpochsInfo/getEpochsInfoFactory.js @@ -28,7 +28,7 @@ function getEpochsInfoFactory(grpcTransport) { const getEpochInfosRequest = new GetEpochsInfoRequest(); getEpochInfosRequest.setV0( new GetEpochsInfoRequestV0() - .setStartEpoch(new UInt32Value([startEpoch])) + .setStartEpoch(typeof startEpoch === 'number' ? new UInt32Value([startEpoch]) : undefined) .setCount(count) .setAscending(!!options.ascending) .setProve(!!options.prove), diff --git a/packages/js-dapi-client/lib/methods/platform/getIdentityBalance/GetIdentityBalanceResponse.js b/packages/js-dapi-client/lib/methods/platform/getIdentityBalance/GetIdentityBalanceResponse.js new file mode 100644 index 0000000000..b5b4a59b31 --- /dev/null +++ b/packages/js-dapi-client/lib/methods/platform/getIdentityBalance/GetIdentityBalanceResponse.js @@ -0,0 +1,43 @@ +const AbstractResponse = require('../response/AbstractResponse'); +const InvalidResponseError = require('../response/errors/InvalidResponseError'); + +class GetIdentityBalanceResponse extends AbstractResponse { + /** + * @param {number} balance + * @param {Metadata} metadata + * @param {Proof} [proof] + */ + constructor(balance, metadata, proof = undefined) { + super(metadata, proof); + + this.balance = balance; + } + + /** + * @returns {number} + */ + getBalance() { + return this.balance; + } + + /** + * @param proto + * @returns {GetIdentityBalanceResponse} + */ + static createFromProto(proto) { + const balance = proto.getV0().getBalance(); + const { metadata, proof } = AbstractResponse.createMetadataAndProofFromProto(proto); + + if ((balance === null || balance === undefined) && !proof) { + throw new InvalidResponseError('Balance is not defined'); + } + + return new GetIdentityBalanceResponse( + balance, + metadata, + proof, + ); + } +} + +module.exports = GetIdentityBalanceResponse; diff --git a/packages/js-dapi-client/lib/methods/platform/getIdentityBalance/getIdentityBalanceFactory.js b/packages/js-dapi-client/lib/methods/platform/getIdentityBalance/getIdentityBalanceFactory.js new file mode 100644 index 0000000000..93862ccff9 --- /dev/null +++ b/packages/js-dapi-client/lib/methods/platform/getIdentityBalance/getIdentityBalanceFactory.js @@ -0,0 +1,71 @@ +const { + v0: { + PlatformPromiseClient, + GetIdentityBalanceRequest, + }, +} = require('@dashevo/dapi-grpc'); + +const GetIdentityBalanceResponse = require('./GetIdentityBalanceResponse'); +const InvalidResponseError = require('../response/errors/InvalidResponseError'); + +/** + * @param {GrpcTransport} grpcTransport + * @returns {getIdentityBalance} + */ +function getIdentityBalanceFactory(grpcTransport) { + /** + * Fetch the identity balance by id + * @typedef {getIdentityBalance} + * @param {Buffer} id + * @param {DAPIClientOptions & {prove: boolean}} [options] + * @returns {Promise} + */ + async function getIdentityBalance(id, options = {}) { + const { GetIdentityBalanceRequestV0 } = GetIdentityBalanceRequest; + const getIdentityBalanceRequest = new GetIdentityBalanceRequest(); + // need to convert objects inherited from Buffer to pure buffer as google protobuf + // doesn't support extended buffers + // https://github.com/protocolbuffers/protobuf/blob/master/js/binary/utils.js#L1049 + if (Buffer.isBuffer(id)) { + // eslint-disable-next-line no-param-reassign + id = Buffer.from(id); + } + + getIdentityBalanceRequest.setV0( + new GetIdentityBalanceRequestV0() + .setId(id) + .setProve(!!options.prove), + ); + + let lastError; + + // TODO: simple retry before the dapi versioning is properly implemented + for (let i = 0; i < 3; i += 1) { + try { + // eslint-disable-next-line no-await-in-loop + const getIdentityBalanceResponse = await grpcTransport.request( + PlatformPromiseClient, + 'getIdentityBalance', + getIdentityBalanceRequest, + options, + ); + + return GetIdentityBalanceResponse.createFromProto(getIdentityBalanceResponse); + } catch (e) { + if (e instanceof InvalidResponseError) { + lastError = e; + } else { + throw e; + } + } + } + + // If we made it past the cycle it means that the retry didn't work, + // and we're throwing the last error encountered + throw lastError; + } + + return getIdentityBalance; +} + +module.exports = getIdentityBalanceFactory; diff --git a/packages/js-dapi-client/test/integration/methods/platform/PlatformMethodsFacade.spec.js b/packages/js-dapi-client/test/integration/methods/platform/PlatformMethodsFacade.spec.js index cb2c2dd0fa..fbb1f32ed5 100644 --- a/packages/js-dapi-client/test/integration/methods/platform/PlatformMethodsFacade.spec.js +++ b/packages/js-dapi-client/test/integration/methods/platform/PlatformMethodsFacade.spec.js @@ -4,6 +4,7 @@ const { GetDataContractResponse, GetDocumentsResponse, GetIdentityResponse, + GetIdentityBalanceResponse, GetIdentityByPublicKeyHashResponse, GetIdentitiesContractKeysResponse, GetEpochsInfoResponse, @@ -27,6 +28,7 @@ const PlatformMethodsFacade = require('../../../../lib/methods/platform/Platform const { WaitForStateTransitionResultResponseV0 } = WaitForStateTransitionResultResponse; const { GetIdentityResponseV0 } = GetIdentityResponse; +const { GetIdentityBalanceResponseV0 } = GetIdentityBalanceResponse; const { GetIdentityByPublicKeyHashResponseV0 } = GetIdentityByPublicKeyHashResponse; const { GetIdentitiesContractKeysResponseV0 } = GetIdentitiesContractKeysResponse; const { GetDocumentsResponseV0 } = GetDocumentsResponse; @@ -318,4 +320,22 @@ describe('PlatformMethodsFacade', () => { expect(grpcTransportMock.request).to.be.calledOnce(); }); }); + + describe('#getIdentityBalance', () => { + it('should get identity balance', async () => { + const response = new GetIdentityBalanceResponse(); + + response.setV0( + new GetIdentityBalanceResponseV0() + .setMetadata(new ResponseMetadata()) + .setBalance(1337), + ); + + grpcTransportMock.request.resolves(response); + + await platformMethods.getIdentityBalance('41nthkqvHBLnqiMkSbsdTNANzYu9bgdv4etKoRUunY1M'); + + expect(grpcTransportMock.request).to.be.calledOnce(); + }); + }); }); diff --git a/packages/js-dapi-client/test/unit/methods/platform/getIdentityBalance/GetIdentityBalanceResponse.spec.js b/packages/js-dapi-client/test/unit/methods/platform/getIdentityBalance/GetIdentityBalanceResponse.spec.js new file mode 100644 index 0000000000..3df6adb6be --- /dev/null +++ b/packages/js-dapi-client/test/unit/methods/platform/getIdentityBalance/GetIdentityBalanceResponse.spec.js @@ -0,0 +1,126 @@ +const { + v0: { + GetIdentityBalanceResponse, + ResponseMetadata, + Proof: ProofResponse, + }, +} = require('@dashevo/dapi-grpc'); + +const GetIdentityBalanceResponseClass = require('../../../../../lib/methods/platform/getIdentityBalance/GetIdentityBalanceResponse'); +const getMetadataFixture = require('../../../../../lib/test/fixtures/getMetadataFixture'); +const InvalidResponseError = require('../../../../../lib/methods/platform/response/errors/InvalidResponseError'); +const getProofFixture = require('../../../../../lib/test/fixtures/getProofFixture'); +const Proof = require('../../../../../lib/methods/platform/response/Proof'); +const Metadata = require('../../../../../lib/methods/platform/response/Metadata'); + +describe('GetIdentityBalanceResponse', () => { + let getIdentityBalanceResponse; + let metadataFixture; + let balance; + let proto; + let proofFixture; + + beforeEach(async () => { + metadataFixture = getMetadataFixture(); + proofFixture = getProofFixture(); + balance = 1337; + + const { GetIdentityBalanceResponseV0 } = GetIdentityBalanceResponse; + proto = new GetIdentityBalanceResponse(); + + const metadata = new ResponseMetadata(); + metadata.setHeight(metadataFixture.height); + metadata.setCoreChainLockedHeight(metadataFixture.coreChainLockedHeight); + metadata.setTimeMs(metadataFixture.timeMs); + metadata.setProtocolVersion(metadataFixture.protocolVersion); + + proto.setV0( + new GetIdentityBalanceResponseV0() + .setBalance(balance) + .setMetadata(metadata), + ); + + getIdentityBalanceResponse = new GetIdentityBalanceResponseClass( + balance, + new Metadata(metadataFixture), + ); + }); + + it('should return Identity balance', () => { + const identityBalance = getIdentityBalanceResponse.getBalance(); + const identityProof = getIdentityBalanceResponse.getProof(); + + expect(identityBalance).to.equal(balance); + expect(identityProof).to.equal(undefined); + }); + + it('should return proof', () => { + getIdentityBalanceResponse = new GetIdentityBalanceResponseClass( + balance, + new Metadata(metadataFixture), + new Proof(proofFixture), + ); + + const identityBalance = getIdentityBalanceResponse.getBalance(); + const proof = getIdentityBalanceResponse.getProof(); + + expect(identityBalance).to.equal(balance); + expect(proof).to.be.an.instanceOf(Proof); + expect(proof.getGrovedbProof()).to.deep.equal(proofFixture.merkleProof); + expect(proof.getQuorumHash()).to.deep.equal(proofFixture.quorumHash); + expect(proof.getSignature()).to.deep.equal(proofFixture.signature); + expect(proof.getRound()).to.deep.equal(proofFixture.round); + }); + + it('should create an instance from proto', () => { + getIdentityBalanceResponse = GetIdentityBalanceResponseClass.createFromProto(proto); + expect(getIdentityBalanceResponse).to.be + .an.instanceOf(GetIdentityBalanceResponseClass); + expect(getIdentityBalanceResponse.getBalance()).to.equal(balance); + + expect(getIdentityBalanceResponse.getMetadata()) + .to.be.an.instanceOf(Metadata); + expect(getIdentityBalanceResponse.getMetadata().getHeight()) + .to.equal(metadataFixture.height); + expect(getIdentityBalanceResponse.getMetadata().getCoreChainLockedHeight()) + .to.equal(metadataFixture.coreChainLockedHeight); + + expect(getIdentityBalanceResponse.getProof()).to.equal(undefined); + }); + + it('should create an instance with proof from proto', () => { + const proofProto = new ProofResponse(); + + proofProto.setQuorumHash(proofFixture.quorumHash); + proofProto.setSignature(proofFixture.signature); + proofProto.setGrovedbProof(proofFixture.merkleProof); + proofProto.setRound(proofFixture.round); + + proto.getV0().setBalance(undefined); + proto.getV0().setProof(proofProto); + + getIdentityBalanceResponse = GetIdentityBalanceResponseClass.createFromProto(proto); + + expect(getIdentityBalanceResponse.getBalance()).to.equal(0); + expect(getIdentityBalanceResponse.getMetadata()).to.deep.equal(metadataFixture); + + const proof = getIdentityBalanceResponse.getProof(); + expect(proof).to.be.an.instanceOf(Proof); + expect(proof.getGrovedbProof()).to.deep.equal(proofFixture.merkleProof); + expect(proof.getQuorumHash()).to.deep.equal(proofFixture.quorumHash); + expect(proof.getSignature()).to.deep.equal(proofFixture.signature); + expect(proof.getRound()).to.deep.equal(proofFixture.round); + }); + + it('should throw InvalidResponseError if Metadata is not defined', () => { + proto.getV0().setMetadata(undefined); + + try { + getIdentityBalanceResponse = GetIdentityBalanceResponseClass.createFromProto(proto); + + expect.fail('should throw InvalidResponseError'); + } catch (e) { + expect(e).to.be.an.instanceOf(InvalidResponseError); + } + }); +}); diff --git a/packages/js-dapi-client/test/unit/methods/platform/getIdentityBalance/getIdentityBalanceFactory.spec.js b/packages/js-dapi-client/test/unit/methods/platform/getIdentityBalance/getIdentityBalanceFactory.spec.js new file mode 100644 index 0000000000..210d2a8a9f --- /dev/null +++ b/packages/js-dapi-client/test/unit/methods/platform/getIdentityBalance/getIdentityBalanceFactory.spec.js @@ -0,0 +1,156 @@ +const { + v0: { + PlatformPromiseClient, + GetIdentityBalanceRequest, + GetIdentityBalanceResponse, + ResponseMetadata, + Proof: ProofResponse, + }, +} = require('@dashevo/dapi-grpc'); + +const { GetIdentityBalanceResponseV0 } = GetIdentityBalanceResponse; + +const getIdentityBalanceFactory = require('../../../../../lib/methods/platform/getIdentityBalance/getIdentityBalanceFactory'); +const getMetadataFixture = require('../../../../../lib/test/fixtures/getMetadataFixture'); +const getProofFixture = require('../../../../../lib/test/fixtures/getProofFixture'); +const Proof = require('../../../../../lib/methods/platform/response/Proof'); + +describe('getIdentityBalanceFactory', () => { + let grpcTransportMock; + let getIdentityBalance; + let options; + let response; + let balance; + let identityId; + let metadataFixture; + let proofFixture; + let proofResponse; + + beforeEach(async function beforeEach() { + balance = 1337; + + identityId = Buffer.alloc(32).fill(0); + + metadataFixture = getMetadataFixture(); + proofFixture = getProofFixture(); + + const metadata = new ResponseMetadata(); + metadata.setHeight(metadataFixture.height); + metadata.setCoreChainLockedHeight(metadataFixture.coreChainLockedHeight); + metadata.setTimeMs(metadataFixture.timeMs); + metadata.setProtocolVersion(metadataFixture.protocolVersion); + + response = new GetIdentityBalanceResponse(); + + response.setV0( + new GetIdentityBalanceResponseV0() + .setBalance(balance) + .setMetadata(metadata), + ); + + proofResponse = new ProofResponse(); + + proofResponse.setQuorumHash(proofFixture.quorumHash); + proofResponse.setSignature(proofFixture.signature); + proofResponse.setGrovedbProof(proofFixture.merkleProof); + proofResponse.setRound(proofFixture.round); + + grpcTransportMock = { + request: this.sinon.stub().resolves(response), + }; + + getIdentityBalance = getIdentityBalanceFactory(grpcTransportMock); + + options = { + timeout: 1000, + }; + }); + + it('should return identity balance', async () => { + const result = await getIdentityBalance(identityId, options); + + const { GetIdentityBalanceRequestV0 } = GetIdentityBalanceRequest; + const request = new GetIdentityBalanceRequest(); + request.setV0( + new GetIdentityBalanceRequestV0() + .setId(identityId) + .setProve(false), + ); + + expect(grpcTransportMock.request).to.be.calledOnceWithExactly( + PlatformPromiseClient, + 'getIdentityBalance', + request, + options, + ); + expect(result.getBalance()).to.deep.equal(balance); + expect(result.getMetadata()).to.deep.equal(metadataFixture); + expect(result.getProof()).to.equal(undefined); + }); + + it('should return proof', async () => { + options.prove = true; + response.getV0().setBalance(undefined); + response.getV0().setProof(proofResponse); + + const result = await getIdentityBalance(identityId, options); + + const { GetIdentityBalanceRequestV0 } = GetIdentityBalanceRequest; + const request = new GetIdentityBalanceRequest(); + request.setV0( + new GetIdentityBalanceRequestV0() + .setId(identityId) + .setProve(true), + ); + + expect(grpcTransportMock.request).to.be.calledOnceWithExactly( + PlatformPromiseClient, + 'getIdentityBalance', + request, + options, + ); + + expect(result.getBalance()).to.deep.equal(0); + + expect(result.getMetadata()).to.deep.equal(metadataFixture); + + expect(result.getProof()).to.be.an.instanceOf(Proof); + expect(result.getProof().getGrovedbProof()).to.deep.equal(proofFixture.merkleProof); + expect(result.getProof().getQuorumHash()).to.deep.equal(proofFixture.quorumHash); + expect(result.getProof().getSignature()).to.deep.equal(proofFixture.signature); + expect(result.getProof().getRound()).to.deep.equal(proofFixture.round); + expect(result.getMetadata()).to.deep.equal(metadataFixture); + expect(result.getMetadata().getHeight()).to.equal(metadataFixture.height); + expect(result.getMetadata().getCoreChainLockedHeight()).to.equal( + metadataFixture.coreChainLockedHeight, + ); + }); + + it('should throw unknown error', async () => { + const error = new Error('Unknown found'); + + grpcTransportMock.request.throws(error); + + const { GetIdentityBalanceRequestV0 } = GetIdentityBalanceRequest; + const request = new GetIdentityBalanceRequest(); + request.setV0( + new GetIdentityBalanceRequestV0() + .setId(identityId) + .setProve(false), + ); + + try { + await getIdentityBalance(identityId, options); + + expect.fail('should throw unknown error'); + } catch (e) { + expect(e).to.deep.equal(error); + expect(grpcTransportMock.request).to.be.calledOnceWithExactly( + PlatformPromiseClient, + 'getIdentityBalance', + request, + options, + ); + } + }); +});