diff --git a/.gitignore b/.gitignore index 3c3629e6..1dcef2d9 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,2 @@ node_modules +.env \ No newline at end of file diff --git a/README.md b/README.md index b19c3261..c5fc2699 100644 --- a/README.md +++ b/README.md @@ -6,13 +6,21 @@ New improved backend for https://polkastats.io! ### Table of Contents - * [Installation Instructions](#installation-instructions) - * [Usage Instructions](#usage-instructions) - * [List of current containers](#list-of-current-containers) - * [Updating containers](#updating-containers) - * [Crawler](#crawler) - * [Phragmen](#phragmen) - +- [PolkaStats Backend v3](#polkastats-backend-v3) + - [Table of Contents](#table-of-contents) + - [Installation Instructions](#installation-instructions) + - [Usage Instructions](#usage-instructions) + - [List of current containers](#list-of-current-containers) + - [Updating containers](#updating-containers) + - [Crawler](#crawler) + - [Phragmen](#phragmen) + - [Hasura demo](#hasura-demo) + - [Query example. Static](#query-example-static) + - [Subscription example. Dynamic](#subscription-example-dynamic) + - [Configuration](#configuration) + - [Substrate](#substrate) + - [Database](#database) + - [Crawlers](#crawlers) @@ -27,10 +35,13 @@ npm install ## Usage Instructions To launch all docker containers at once: + ``` npm run docker ``` + To run them separately: + ``` npm run docker: ``` @@ -41,7 +52,7 @@ npm run docker: - postgres - graphql-engine - crawler -- phragmen (temporarily disabled) +- phragmen (temporarily disabled) ## Updating containers @@ -104,6 +115,7 @@ In order to check it and see its power you could start a new subscription or jus ### Query example. Static - Block query example: + ``` query { block { @@ -122,6 +134,7 @@ query { ``` - Rewards query example: + ``` query { rewards { @@ -134,6 +147,7 @@ query { ``` - Validator by number of nominators example: + ``` query { validator_num_nominators { @@ -145,6 +159,7 @@ query { ``` - Account query example: + ``` query { account { @@ -158,6 +173,7 @@ query { ### Subscription example. Dynamic - Block subscription example: + ``` subscription { block { @@ -170,6 +186,7 @@ subscription { ``` - Validator active subscription example: + ``` subscription MySubscription { validator_active { @@ -183,6 +200,7 @@ subscription MySubscription { ``` - Account subscription example: + ``` subscription MySubscription { account { @@ -191,3 +209,46 @@ subscription MySubscription { } } ``` + +## Configuration + +You can customize your configuration through the following environment variables: + +### Substrate + +| Env name | Description | Default Value | +| --------------- | --------------------- | ------------------------ | +| WS_PROVIDER_URL | Substrate node to use | ws://substrate-node:9944 | + +### Database + +| Env name | Description | Default Value | +| ----------------- | ------------------------ | ------------- | +| POSTGRES_USER | PostgreSQL username | polkastats | +| POSTGRES_PASSWORD | PostgreSQL user password | polkastats | +| POSTGRES_HOST | PostgreSQL host | postgres | +| POSTGRES_PORT | PostgreSQL port | 5432 | +| POSTGRES_DATABASE | PostgreSQL database name | polkastats | + +### Crawlers + +| Env name | Description | Default Value | +| --------------------------------------- | ------------------------------------------- | ---------------------------------------------- | +| CRAWLER_BLOCK_LISTENER_POLLING_TIME_MS | Polling time for block listener (ms) | 60000 | +| CRAWLER_ACTIVE_ACCOUNTS_POLLING_TIME_MS | Polling time for accounts crawler (ms) | 600000 | +| CRAWLER_PHRAGMEN_POLLING_TIME_MS | Polling time for phragmen executions (ms) | 300000 | +| CRAWLER_PHRAGMEN_OUTPUT_DIR | Directory to store the phgramen JSON result | /tmp/phragmen | +| CRAWLER_PHRAGMEN_BINARY_PATH | Path to the phragmen executable | /usr/app/polkastats-backend-v3/offline-phragme | + +You can also disable specific crawlers with the following environment variables: + +- `CRAWLER_SYSTEM_DISABLE` +- `CRAWLER_BLOCK_LISTENER_DISABLE` +- `CRAWLER_BLOCK_HARVESTER_DISABLE` +- `CRAWLER_STAKING_DISABLE` +- `CRAWLER_ACTIVE_ACCOUNTS_DISABLE` +- `CRAWLER_CHAIN_DISABLE` +- `CRAWLER_REWARDS_DISABLE` +- `CRAWLER_PHRAGMEN_DISABLE` + +For instance, if you want to disable phgramen crawler just set `CRAWLER_PHRAGMEN_DISABLE=true`. diff --git a/backend.config.js b/backend.config.js index 54cf9b68..b5c445f9 100644 --- a/backend.config.js +++ b/backend.config.js @@ -1,56 +1,77 @@ -const DEFAULT_WS_PROVIDER_URL = 'ws://substrate-node:9944'; +require('dotenv').config(); module.exports = { - wsProviderUrl: process.env.WS_PROVIDER_URL || DEFAULT_WS_PROVIDER_URL, + wsProviderUrl: process.env.WS_PROVIDER_URL || 'ws://substrate-node:9944', postgresConnParams: { - user: 'polkastats', - host: 'postgres', - database: 'polkastats', - password: 'polkastats', - port: 5432, + user: process.env.POSTGRES_USER || 'polkastats', + host: process.env.POSTGRES_HOST || 'postgres', + database: process.env.POSTGRES_DATABASE || 'polkastats', + password: process.env.POSTGRES_PASSWORD || 'polkastats', + port: process.env.POSTGRES_PORT || 5432, }, crawlers: [ + + { + enabled: !process.env.CRAWLER_SYSTEM_DISABLE, + module: require('./lib/crawlers/system'), + }, + { - enabled: true, - module: require('./lib/crawlers/blockListener.js'), + enabled: !process.env.CRAWLER_BLOCK_LISTENER_DISABLE, + module: require('./lib/crawlers/blockListener'), }, { - enabled: true, - module: require('./lib/crawlers/blockHarvester.js'), + enabled: !process.env.CRAWLER_BLOCK_HARVESTER_DISABLE, + module: require('./lib/crawlers/blockHarvester'), config: { - pollingTime: 1 * 60 * 1000, + pollingTime: + parseInt(process.env.CRAWLER_BLOCK_LISTENER_POLLING_TIME_MS) || + 1 * 60 * 1000, }, }, { - enabled: true, - module: require('./lib/crawlers/staking.js'), + enabled: !process.env.CRAWLER_STAKING_DISABLE, + module: require('./lib/crawlers/staking'), }, { - enabled: true, - module: require('./lib/crawlers/activeAccounts.js'), + enabled: !process.env.CRAWLER_ACTIVE_ACCOUNTS_DISABLE, + module: require('./lib/crawlers/activeAccounts'), config: { - pollingTime: 10 * 60 * 1000, + pollingTime: + parseInt(process.env.CRAWLER_ACTIVE_ACCOUNTS_POLLING_TIME_MS) || + 60 * 60 * 1000, }, }, { - enabled: true, - module: require('./lib/crawlers/rewards.js'), + enabled: !process.env.CRAWLER_CHAIN_DISABLE, + module: require('./lib/crawlers/chain'), + }, + + { + enabled: !process.env.CRAWLER_REWARDS_DISABLE, + module: require('./lib/crawlers/rewards'), }, { - enabled: false, - module: require('./lib/crawlers/phragmen.js'), + enabled: !process.env.CRAWLER_PHRAGMEN_DISABLE, + module: require('./lib/crawlers/phragmen'), config: { - wsProviderUrl: process.env.WS_PROVIDER_URL || DEFAULT_WS_PROVIDER_URL, - pollingTime: 5 * 60 * 1000, - phragmenOutputDir: '/tmp/phragmen', - offlinePhragmenPath: '/usr/app/polkastats-backend-v3/offline-phragmen', + wsProviderUrl: + process.env.WS_PROVIDER_URL || 'ws://substrate-node:9944', + pollingTime: + parseInt(process.env.CRAWLER_PHRAGMEN_POLLING_TIME_MS) || + 5 * 60 * 1000, + phragmenOutputDir: + process.env.CRAWLER_PHRAGMEN_OUTPUT_DIR || '/tmp/phragmen', + offlinePhragmenPath: + process.env.CRAWLER_PHRAGMEN_BINARY_PATH || + '/usr/app/polkastats-backend-v3/offline-phragmen', }, }, ], diff --git a/docker/polkastats-backend/sql/polkastats.sql b/docker/polkastats-backend/sql/polkastats.sql index f695bcb4..5d982984 100644 --- a/docker/polkastats-backend/sql/polkastats.sql +++ b/docker/polkastats-backend/sql/polkastats.sql @@ -1,131 +1,440 @@ GRANT ALL PRIVILEGES ON DATABASE polkastats TO polkastats; CREATE TABLE IF NOT EXISTS block ( - block_number BIGINT NOT NULL, - block_author VARCHAR(47) NOT NULL, - block_author_name VARCHAR(100) NOT NULL, - block_hash VARCHAR(66) NOT NULL, - parent_hash VARCHAR(66) NOT NULL, - extrinsics_root VARCHAR(66) NOT NULL, - state_root VARCHAR(66) NOT NULL, - current_era BIGINT NOT NULL, - current_index BIGINT NOT NULL, - era_length BIGINT NOT NULL, - era_progress BIGINT NOT NULL, - is_epoch BOOLEAN NOT NULL, - session_length BIGINT NOT NULL, - session_per_era INT NOT NULL, - session_progress BIGINT NOT NULL, - validator_count INT NOT NULL, - spec_name VARCHAR(100) NOT NULL, - spec_version INT NOT NULL, - total_events INT NOT NULL, - num_transfers INT NOT NULL, - new_accounts INT NOT NULL, - timestamp BIGINT NOT NULL, - PRIMARY KEY ( block_number ) + block_number BIGINT NOT NULL, + block_number_finalized BIGINT NOT NULL, + block_author VARCHAR(47) NOT NULL, + block_author_name VARCHAR(100) NOT NULL, + block_hash VARCHAR(66) NOT NULL, + parent_hash VARCHAR(66) NOT NULL, + extrinsics_root VARCHAR(66) NOT NULL, + state_root VARCHAR(66) NOT NULL, + current_era BIGINT NOT NULL, + current_index BIGINT NOT NULL, + era_length BIGINT NOT NULL, + era_progress BIGINT NOT NULL, + is_epoch BOOLEAN NOT NULL, + session_length BIGINT NOT NULL, + session_per_era INT NOT NULL, + session_progress BIGINT NOT NULL, + validator_count INT NOT NULL, + spec_name VARCHAR(100) NOT NULL, + spec_version INT NOT NULL, + total_events INT NOT NULL, + num_transfers INT NOT NULL, + new_accounts INT NOT NULL, + timestamp BIGINT NOT NULL, + PRIMARY KEY ( block_number ) ); CREATE TABLE IF NOT EXISTS event ( - block_number BIGINT NOT NULL, - event_index INT NOT NULL, - section VARCHAR(100) NOT NULL, - method VARCHAR(100) NOT NULL, - phase VARCHAR(100) NOT NULL, - data TEXT NOT NULL, - PRIMARY KEY ( block_number, event_index ) + block_number BIGINT NOT NULL, + event_index INT NOT NULL, + section VARCHAR(100) NOT NULL, + method VARCHAR(100) NOT NULL, + phase VARCHAR(100) NOT NULL, + data TEXT NOT NULL, + PRIMARY KEY ( block_number, event_index ) ); CREATE TABLE IF NOT EXISTS phragmen ( - block_height BIGINT NOT NULL, - phragmen_json TEXT NOT NULL, - timestamp BIGINT NOT NULL, - PRIMARY KEY ( block_height ) + block_height BIGINT NOT NULL, + phragmen_json TEXT NOT NULL, + timestamp BIGINT NOT NULL, + PRIMARY KEY ( block_height ) ); CREATE TABLE IF NOT EXISTS rewards ( - block_number BIGINT NOT NULL, - era_index INT NOT NULL, - stash_id VARCHAR(50), - commission BIGINT, - era_rewards TEXT, - era_points INT NOT NULL, - stake_info TEXT, - estimated_payout BIGINT NOT NULL, - timestamp INT NOT NULL, - PRIMARY KEY ( block_number, era_index, stash_id ) + block_number BIGINT NOT NULL, + era_index INT NOT NULL, + stash_id VARCHAR(50), + commission BIGINT, + era_rewards TEXT, + era_points INT NOT NULL, + stake_info TEXT, + estimated_payout BIGINT NOT NULL, + timestamp BIGINT NOT NULL, + PRIMARY KEY ( block_number, era_index, stash_id ) ); CREATE TABLE IF NOT EXISTS validator_staking ( - block_number BIGINT NOT NULL, - session_index INT NOT NULL, - json TEXT NOT NULL, - timestamp INT NOT NULL, - PRIMARY KEY ( block_number, session_index ) + block_number BIGINT NOT NULL, + session_index INT NOT NULL, + json TEXT NOT NULL, + timestamp BIGINT NOT NULL, + PRIMARY KEY ( block_number, session_index ) ); CREATE TABLE IF NOT EXISTS intention_staking ( - block_number BIGINT NOT NULL, - session_index INT NOT NULL, - json TEXT NOT NULL, - timestamp INT NOT NULL, - PRIMARY KEY ( block_number, session_index ) + block_number BIGINT NOT NULL, + session_index INT NOT NULL, + json TEXT NOT NULL, + timestamp BIGINT NOT NULL, + PRIMARY KEY ( block_number, session_index ) ); CREATE TABLE IF NOT EXISTS validator_bonded ( - block_number BIGINT NOT NULL, - session_index INT NOT NULL, - account_id VARCHAR(47) NOT NULL, - amount BIGINT NOT NULL, - timestamp INT NOT NULL, - PRIMARY KEY ( block_number, session_index, account_id ) + block_number BIGINT NOT NULL, + session_index INT NOT NULL, + account_id VARCHAR(47) NOT NULL, + amount BIGINT NOT NULL, + timestamp BIGINT NOT NULL, + PRIMARY KEY ( block_number, session_index, account_id ) ); CREATE TABLE IF NOT EXISTS validator_selfbonded ( - block_number BIGINT NOT NULL, - session_index INT NOT NULL, - account_id VARCHAR(47) NOT NULL, - amount BIGINT NOT NULL, - timestamp INT NOT NULL, - PRIMARY KEY ( block_number, session_index, account_id ) + block_number BIGINT NOT NULL, + session_index INT NOT NULL, + account_id VARCHAR(47) NOT NULL, + amount BIGINT NOT NULL, + timestamp BIGINT NOT NULL, + PRIMARY KEY ( block_number, session_index, account_id ) ); CREATE TABLE IF NOT EXISTS validator_num_nominators ( - block_number BIGINT NOT NULL, - session_index INT NOT NULL, - account_id VARCHAR(47) NOT NULL, - nominators INT NOT NULL, - timestamp INT NOT NULL, - PRIMARY KEY ( block_number, session_index, account_id ) + block_number BIGINT NOT NULL, + session_index INT NOT NULL, + account_id VARCHAR(47) NOT NULL, + nominators INT NOT NULL, + timestamp BIGINT NOT NULL, + PRIMARY KEY ( block_number, session_index, account_id ) ); CREATE TABLE IF NOT EXISTS validator_era_points ( - block_number BIGINT NOT NULL, - session_index INT NOT NULL, - account_id VARCHAR(47) NOT NULL, - era_points INT NOT NULL, - timestamp INT NOT NULL, - PRIMARY KEY ( block_number, session_index, account_id ) + block_number BIGINT NOT NULL, + session_index INT NOT NULL, + account_id VARCHAR(47) NOT NULL, + era_points INT NOT NULL, + timestamp BIGINT NOT NULL, + PRIMARY KEY ( block_number, session_index, account_id ) ); CREATE TABLE IF NOT EXISTS validator_active ( - block_number BIGINT NOT NULL, - session_index INT NOT NULL, - account_id VARCHAR(47) NOT NULL, - active BOOLEAN NOT NULL, - timestamp INT NOT NULL, - PRIMARY KEY ( block_number, session_index, account_id ) + block_number BIGINT NOT NULL, + session_index INT NOT NULL, + account_id VARCHAR(47) NOT NULL, + active BOOLEAN NOT NULL, + timestamp BIGINT NOT NULL, + PRIMARY KEY ( block_number, session_index, account_id ) +); + +CREATE TABLE IF NOT EXISTS intention_bonded ( + block_number BIGINT NOT NULL, + session_index INT NOT NULL, + account_id VARCHAR(47) NOT NULL, + amount BIGINT NOT NULL, + timestamp BIGINT NOT NULL, + PRIMARY KEY ( block_number, session_index, account_id ) ); CREATE TABLE IF NOT EXISTS account ( - account_id VARCHAR(100) NOT NULL, - identity TEXT NOT NULL, - balances TEXT NOT NULL, - timestamp BIGINT NOT NULL, - block_height BIGINT NOT NULL, - PRIMARY KEY ( account_id ) + account_id VARCHAR(47) NOT NULL, + identity TEXT NOT NULL, + balances TEXT NOT NULL, + timestamp BIGINT NOT NULL, + block_height BIGINT NOT NULL, + is_staking BOOLEAN NOT NULL, + PRIMARY KEY ( account_id ) +); + +CREATE TABLE IF NOT EXISTS system ( + block_height BIGINT NOT NULL, + chain VARCHAR(100) NOT NULL, + node_name VARCHAR(100) NOT NULL, + node_version VARCHAR(100) NOT NULL, + timestamp BIGINT NOT NULL, + PRIMARY KEY ( block_height ) +); + +CREATE TABLE IF NOT EXISTS chain ( + block_height BIGINT NOT NULL, + session_index INT NOT NULL, + total_issuance BIGINT NOT NULL, + active_accounts BIGINT NOT NULL, + timestamp BIGINT NOT NULL, + PRIMARY KEY ( block_height ) +); + +CREATE TABLE IF NOT EXISTS polkastats_identity ( + account_id VARCHAR(47) NOT NULL, + username VARCHAR(100) NOT NULL, + username_cased VARCHAR(100) NOT NULL, + full_name VARCHAR(100) NOT NULL, + location VARCHAR(100) NOT NULL, + bio VARCHAR(200) NOT NULL, + logo VARCHAR(100) NOT NULL, + website VARCHAR(100) NOT NULL, + twitter VARCHAR(100) NOT NULL, + github VARCHAR(100) NOT NULL, + created BIGINT NOT NULL, + updated BIGINT NOT NULL, + PRIMARY KEY ( account_id ) ); +INSERT INTO polkastats_identity VALUES + ( + 'CanLB42xJughpTRC1vXStUryjWYkE679emign1af47QnAQC', + '5chdn', + '5chdn', + 'Afri Schoedon', + 'Berlin, Germany.', 'Everything Ethereum at Parity.', + 'https://s3.amazonaws.com/keybase_processed_uploads/e45398395dc357c5920514fba64ffe05_360_360.jpg', + 'http://5chdn.co', + '', + 'https://github.com/5chdn', + 1587202202, + 1587202202 + ), + ( + 'CoqysGbay3t3Q7hXgEmGJJquhYYpo8PqLwvW1WsUwR7KvXm', + 'deleganetworks', + 'DelegaNetworks', + 'Delega Networks', + 'Blockchain', + 'Your trusted PoS validator', + 'https://s3.amazonaws.com/keybase_processed_uploads/9769abccf1bee6c032fb5be50d6c3c05_360_360.jpg', + '', + 'https://twitter.com/deleganetworks', + '', + 1587202203, + 1587202203 + ), + ( + 'CsHw8cfzbnKdkCuUq24yuaPyJ1E5a55sNPqejJZ4h7CRtEs', + 'bneiluj', + 'bneiluj', + 'Julien Bouteloup', + 'London', + 'Engineer / Dev / Economist / crypto cyberpunk', + 'https://s3.amazonaws.com/keybase_processed_uploads/529c4d56e2e77b663224aca276549d05_360_360.jpg', + '', + 'https://twitter.com/bneiluj', + 'https://github.com/bneiluj', + 1587202203, + 1587202203 + ), + ( + 'D8rBgbN7NWa4k9Wa5aeupP2rjo6iYoHPTxxJU5ef35PUQJN', + 'pos_bakerz', + 'pos_bakerz', + 'POS Bakerz', + 'Based in Europe', + 'Secure, Efficient, and Reliable Staking-as-a-Service Provider', + 'https://s3.amazonaws.com/keybase_processed_uploads/f91a58793b597a3c76ecc58897e6e905_360_360.jpg', + '', + '', + 'https://github.com/posbakerz', + 1587202204, + 1587202204 + ), + ( + 'D9rwRxuG8xm8TZf5tgkbPxhhTJK5frCJU9wvp59VRjcMkUf', + 'forbole', + 'forbole', + 'Forbole', + 'Cosmos', + 'Forbole - An incentivised social ecosystem on blockchain', + 'https://s3.amazonaws.com/keybase_processed_uploads/f5b0771af36b2e3d6a196a29751e1f05_360_360.jpeg', + 'http://forbole.com', + '', + '', + 1587202205, + 1587202205 + ), + ( + 'DNDBcYD8zzqAoZEtgNzouVp2sVxsvqzD4UdB5WrAUwjqpL8', + 'simplyvc', + 'SimplyVC', + 'Simply VC', + 'Malta', + 'Simply VC consists of a team of security, cryptocurrency & business experts passionate about supporting the blockchain ecosystem.', + 'https://s3.amazonaws.com/keybase_processed_uploads/832fd8e95710fb345f084afb8aeace05_360_360.jpg', + 'https://simply-vc.com.mt', '', + 'https://github.com/SimplyVC', + 1587202205, + 1587202205 + ), + ( + 'DSpbbk6HKKyS78c4KDLSxCetqbwnsemv2iocVXwNe2FAvWC', + 'dragonstake', + 'dragonstake', + 'DragonStake', + 'Decentralized', + 'Trusted Blockchain Validators', + 'https://s3.amazonaws.com/keybase_processed_uploads/c62c205359734ddf9af1b03777703505_360_360.jpg', + 'http://dragonstake.io', + 'https://twitter.com/dragonstake', + 'https://github.com/dragonstake', + 1587202206, + 1587202206 + ), + ( + 'DTLcUu92NoQw4gg6VmNgXeYQiNywDhfYMQBPYg2Y1W6AkJF', + 'fgimenez', + 'fgimenez', + 'Federico Gimenez', + 'Madrid', + 'Infrastructure Lead @Web3 Foundation', + 'https://s3.amazonaws.com/keybase_processed_uploads/6975bd9d417a2088e6d8a6987a43a405_360_360.jpg', + '', + 'https://twitter.com/frgnieto', + 'https://github.com/fgimenez', + 1587202206, + 1587202206 + ), + ( + 'Dab4bfYTZRUDMWjYAUQuFbDreQ9mt7nULWu3Dw7jodbzVe9', + '5chdn', + '5chdn', + 'Afri Schoedon', + 'Berlin, Germany.', + 'Everything Ethereum at Parity.', + 'https://s3.amazonaws.com/keybase_processed_uploads/e45398395dc357c5920514fba64ffe05_360_360.jpg', + 'http://5chdn.co', + '', + 'https://github.com/5chdn', + 1587202207, + 1587202207 + ), + ( + 'DbAdiLJQDFzLyaLsoFCzrpBLuaBXXqQKdpewUSxqiWJadmp', + 'inchain', + 'inchain', + 'InChainWorks', + 'null', + 'Trusted & Secure Blockchain Investment', + 'https://s3.amazonaws.com/keybase_processed_uploads/4de135b8a487f19d837486cf4ab14905_360_360.jpg', + 'https://inchain.works', + 'https://twitter.com/inchain_works', + 'https://github.com/abouzidi', + 1587202208, + 1587202208 + ), + ( + 'ET9SkhNZhY7KT474vkCEJtAjbgJdaqAGW4beeeUJyDQ3SnA', + 'dokiacapital', + 'dokiacapital', + 'Dokia Capital', + 'Earth', + 'Proof of Stake infrastructure provider. ⛓', + 'https://s3.amazonaws.com/keybase_processed_uploads/64dc60dc1e68999354f19558603eb305_360_360.jpg', + 'https://dokia.capital', + 'https://twitter.com/DokiaCapital', + '', + 1587202208, + 1587202208 + ), + ( + 'EdUs96fjEhyaTVxZsFo3fxEABLSpdopBFuhE7FFexCUyDv6', + 'lionstake', + 'LionStake', + 'LionStake 🦁', + 'León - Spain', 'Secure and reliable Proof of Stake Infrastructure Services', + 'https://s3.amazonaws.com/keybase_processed_uploads/10da34e641c449e417870335e9399f05_360_360.jpg', + '', + 'https://twitter.com/lion_stake', + 'https://github.com/lion-stake', + 1587202209, + 1587202209 + ), + ( + 'EqyCQvYn1cHBdzFVQHQeL1nHDcxHhjWR8V48KbDyHyuyCGV', + 'realgar', + 'Realgar', + 'realgar', + 'Italy', + 'Governance experimenter, validator, researcher\nUnited Networks of State Machines', + 'https://s3.amazonaws.com/keybase_processed_uploads/cf09c45d1245d186a6aad09112c20b05_360_360.jpg', + 'https://polkadot.pro', 'https://twitter.com/ProPolkadot', + 'https://github.com/Realgar', + 1587202209, + 1587202209 + ), + ( + 'ErhkFXudde5xXFVMGUtNpiPLvZ9zcvqM3ueRLukDdpjszys', + 'bitcat365', + 'bitcat365', + 'Bit Cat🐱', + 'China', + 'Secure and stable validator service from China team.', + 'https://s3.amazonaws.com/keybase_processed_uploads/e6d2c9be95cde136dcf0ade7238f1705_360_360.jpg', + '', + '', + '', + 1587202210, + 1587202210 + ), + ( + 'FcjmeNzPk3vgdENm1rHeiMCxFK96beUoi2kb59FmCoZtkGF', + 'gnossienli', + 'gnossienli', + 'gnossienli', + 'NL', + 'Kusama/Solana/Terra/Polkadot/Edgeware Validator/Coda/Near/', + 'https://s3.amazonaws.com/keybase_processed_uploads/eb746a3b7016604688933baba52cda05_360_360.jpg', + 'https://staker.space', 'https://twitter.com/stakerspace', + 'https://github.com/stakerspace', + 1587202210, + 1587202210 + ), + ( + 'GTzRQPzkcuynHgkEHhsPBFpKdh4sAacVRsnd8vYfPpTMeEY', + 'polkastats', + 'PolkaStats', + 'PolkaStats', + 'Decentralized hyperspace', + 'PolkaStats, Polkadot Kusama network statistics', + 'https://s3.amazonaws.com/keybase_processed_uploads/ceb4f17440dc978fa4faa64814290005_360_360.jpg', + 'https://polkastats.io', + 'https://twitter.com/polkastats', + 'https://github.com/mariopino', + 1587202211, + 1587202211 + ), + ( + 'GXaUd6gyCaEoBVzXnkLVGneCF3idnLNtNZs5RHTugb9dCpY', + 'stakedotfish', + 'stakedotfish', + 'stakefish', + 'null', + 'null', + 'https://s3.amazonaws.com/keybase_processed_uploads/a45f7abadca25326bc31157dd7aa3605_360_360.jpg', + 'https://stake.fish', + 'https://twitter.com/stakedotfish', + '', + 1587202212, + 1587202212 + ), + ( + 'GhoRyTGK583sJec8aSiyyJCsP2PQXJ2RK7iPGUjLtuX8XCn', + 'purestake', + 'purestake', + 'PureStake', + 'USA', + 'Secure & Reliable API, Infrastructure, and Validator Services for Next-Generation Proof of Stake Blockchain Networks', + 'https://s3.amazonaws.com/keybase_processed_uploads/a175d8c5bc90a128db2305c0faa30d05_360_360.jpg', + 'https://www.purestake.com', + 'https://twitter.com/purestakeco', + '', + 1587202212, + 1587202212 + ), + ( + 'Ghw9swKjtCTZfEqEmzZkkqK4vEKQFz86HctEdGprQbNzpc7', + '5chdn', + '5chdn', + 'Afri Schoedon', + 'Berlin, Germany.', + 'Everything Ethereum at Parity.', + 'https://s3.amazonaws.com/keybase_processed_uploads/e45398395dc357c5920514fba64ffe05_360_360.jpg', + 'http://5chdn.co', + '', + 'https://github.com/5chdn', + 1587202213, + 1587202213 + ); + + CREATE INDEX IF NOT EXISTS validator_bonded_account_id_idx ON validator_bonded (account_id); CREATE INDEX IF NOT EXISTS validator_selfbonded_account_id_idx ON validator_selfbonded (account_id); CREATE INDEX IF NOT EXISTS validator_num_nominators_account_id_idx ON validator_num_nominators (account_id); @@ -139,8 +448,12 @@ GRANT ALL PRIVILEGES ON TABLE validator_selfbonded TO polkastats; GRANT ALL PRIVILEGES ON TABLE validator_num_nominators TO polkastats; GRANT ALL PRIVILEGES ON TABLE validator_era_points TO polkastats; GRANT ALL PRIVILEGES ON TABLE validator_active TO polkastats; +GRANT ALL PRIVILEGES ON TABLE intention_bonded TO polkastats; GRANT ALL PRIVILEGES ON TABLE block TO polkastats; GRANT ALL PRIVILEGES ON TABLE event TO polkastats; GRANT ALL PRIVILEGES ON TABLE rewards TO polkastats; GRANT ALL PRIVILEGES ON TABLE account TO polkastats; -GRANT ALL PRIVILEGES ON TABLE phragmen TO polkastats; \ No newline at end of file +GRANT ALL PRIVILEGES ON TABLE phragmen TO polkastats; +GRANT ALL PRIVILEGES ON TABLE polkastats_identity TO polkastats; +GRANT ALL PRIVILEGES ON TABLE system TO polkastats; +GRANT ALL PRIVILEGES ON TABLE chain TO polkastats; \ No newline at end of file diff --git a/docker/polkastats-backend/substrate-client/Dockerfile b/docker/polkastats-backend/substrate-client/Dockerfile index 73992e93..feb4a145 100644 --- a/docker/polkastats-backend/substrate-client/Dockerfile +++ b/docker/polkastats-backend/substrate-client/Dockerfile @@ -2,7 +2,7 @@ FROM phusion/baseimage:0.11 LABEL maintainer "@ColmenaLabs_svq" LABEL description="Small image with the Substrate binary." -ARG VERSION=v0.7.27 +ARG VERSION=v0.7.30 RUN apt-get update && apt-get install wget curl jq -y diff --git a/lib/BackendV3.js b/lib/BackendV3.js index 5dff520d..71aba290 100644 --- a/lib/BackendV3.js +++ b/lib/BackendV3.js @@ -1,9 +1,10 @@ -// @ts-check - +const pino = require('pino'); const { ApiPromise, WsProvider } = require('@polkadot/api'); const { Pool } = require('pg'); const { wait } = require('./utils.js'); +const logger = pino(); + class BackendV3 { constructor(config) { this.config = config; @@ -11,8 +12,7 @@ class BackendV3 { } async runCrawlers() { - - console.log(`[PolkaStats backend v3] - \x1b[32mStarting backend, waiting 15s...\x1b[0m`); + logger.info('Starting backend, waiting 15s...'); await wait(15000); const pool = await this.getPool(); @@ -23,41 +23,42 @@ class BackendV3 { api = await this.getPolkadotAPI(); } - console.log(`[PolkaStats backend v3] - \x1b[32mRunning crawlers\x1b[0m`); + logger.info('Running crawlers'); this.config.crawlers .filter(crawler => crawler.enabled) .forEach(crawler => crawler.module.start(api, pool, crawler.config)); - } + } async getPolkadotAPI() { - console.log(`[PolkaStats backend v3] - \x1b[32mConnecting to ${this.config.wsProviderUrl}\x1b[0m`); + logger.info(`Connecting to ${this.config.wsProviderUrl}`); const provider = new WsProvider(this.config.wsProviderUrl); const api = await ApiPromise.create({ provider }); await api.isReady; - console.log(`[PolkaStats backend v3] - \x1b[32mAPI is ready!\x1b[0m`); + + logger.info('API is ready!'); // Wait for node is synced let node; try { node = await api.rpc.system.health(); } catch { - console.log(`[PolkaStats backend v3] - \x1b[31mCan't connect to node! Waiting 10s...\x1b[0m`); + logger.error("Can't connect to node! Waiting 10s..."); api.disconnect(); await wait(10000); return false; } - console.log(`[PolkaStats backend v3] - \x1b[32mNode: ${JSON.stringify(node)}\x1b[0m`); + logger.info(`Node: ${JSON.stringify(node)}`); if (node && node.isSyncing.eq(false)) { // Node is synced! - console.log(`[PolkaStats backend v3] - \x1b[32mNode is synced!\x1b[0m`); + logger.info('Node is synced!'); this.nodeisSyncing = false; return api; } else { - console.log(`[PolkaStats backend v3] - \x1b[33mNode is not synced! Waiting 10s...\x1b[0m`); + logger.warn('Node is not synced! Waiting 10s...'); api.disconnect(); await wait(10000); } diff --git a/lib/crawlers/activeAccounts.js b/lib/crawlers/activeAccounts.js index 2f34204f..60cb2ba2 100644 --- a/lib/crawlers/activeAccounts.js +++ b/lib/crawlers/activeAccounts.js @@ -1,52 +1,182 @@ -// @ts-check -module.exports = { - start: async function (api, pool, config) { - console.log(`[PolkaStats backend v3] - \x1b[32mStarting active accounts crawler...\x1b[0m`); - - // Fetch active accounts - const accountKeys = await api.query.system.account.keys() - const accounts = accountKeys.map(key => key.args[0].toHuman()); - - console.log(`[PolkaStats backend v3] - Active Accounts - \x1b[32mProcessing ${accounts.length} active accounts\x1b[0m`); - - await accounts.forEach(async accountId => { - - // console.log(`[PolkaStats backend v3] - Active Accounts - \x1b[32mProcessing account ${accountId}\x1b[0m`); - const accountInfo = await api.derive.accounts.info(accountId); - const identity = accountInfo.identity.display ? JSON.stringify(accountInfo.identity) : ``; - const balances = await api.derive.balances.all(accountId); - const block = await api.rpc.chain.getBlock(); - const blockNumber = block.block.header.number.toNumber(); - - let sql = `SELECT account_id FROM account WHERE account_id = '${accountId}'`; - let res = await pool.query(sql); - - if (res.rows.length > 0) { - const timestamp = new Date().getTime(); - sql = `UPDATE account SET identity = '${identity}', balances = '${JSON.stringify(balances)}', timestamp = '${timestamp}', block_height = '${blockNumber}' WHERE account_id = '${accountId}'`; - try { - // console.log(`[PolkaStats backend v3] - Active Accounts - \x1b[32mUpdating account ${accountId}\x1b[0m`); - await pool.query(sql); - } catch (error) { - console.log(`[PolkaStats backend v3] - Active Accounts - \x1b[31mError updating account ${accountId}\x1b[0m`); - console.log(`[PolkaStats backend v3] - Active Accounts - \x1b[31mError: ${error}\x1b[0m`); - } - } else { - const timestamp = new Date().getTime(); - sql = `INSERT INTO account (account_id, identity, balances, timestamp, block_height) VALUES ('${accountId}', '${identity}', '${JSON.stringify(balances)}', '${timestamp}', '${blockNumber}');`; - try { - // console.log(`[PolkaStats backend v3] - Active Accounts - \x1b[32mAdding account ${accountId}\x1b[0m`); - await pool.query(sql); - } catch (error) { - console.log(`[PolkaStats backend v3] - Active Accounts - \x1b[31mError adding new account ${accountId}\x1b[0m`); - console.log(`[PolkaStats backend v3] - Active Accounts - \x1b[31mError: ${error}\x1b[0m`); - } - } - }); - - setTimeout( - () => module.exports.start(api, pool, config), - config.pollingTime, - ); +const pino = require('pino'); +const { zip } = require('lodash'); + +const DEFAULT_POLLING_TIME_MS = 1 * 60 * 1000; + +const logger = pino(); + +const getAccountId = account => + account + .map(e => e.args) + .map(([e]) => e) + .map(e => e.toHuman()); + +const fetchBlockNumber = async api => { + const { block } = await api.rpc.chain.getBlock(); + + return block.header.number.toNumber(); +}; + +const fetchAccountIds = async api => { + return getAccountId(await api.query.system.account.keys()); +}; + +const fetchAccountIdentity = async (accountId, api) => { + const info = await api.derive.accounts.info(accountId); + + return info.identity.display ? JSON.stringify(info.identity) : ''; +}; + +const fetchValidators = async api => { + const validators = await api.query.staking.validators.keys(); + + return getAccountId(validators); +}; + +const fetchAccountStaking = async (accountId, api, validators) => { + if (validators.includes(accountId)) { + return true; } -} \ No newline at end of file + + const staking = await api.query.staking.nominators(accountId); + + return !staking.isEmpty; +}; + +const fetchAccountBalance = async (accountId, api) => { + const balance = await api.derive.balances.all(accountId); + + return JSON.stringify(balance); +}; + +/** + * Creates an account object from attributes. + * + * @param {String} id ID of the account + * @param {String} identity Identity on the blockchain of the account + * @param {String} balances Current balances of the account + * @param {Boolean} isStaking True if the account is staking + */ +const buildAccount = (id, identity, balances, isStaking) => { + return { id, identity, balances, isStaking }; +}; + +/** + * Applies a zip function to multiple lists containing related data and then + * groups related entries using a builder function. + * + * @param {String} name Name of the attribute to store the builder result + * @param {Function} builder Function using to build the initial state object + */ +const prepareState = (name, builder) => itemsList => { + return zip(...itemsList) + .map(items => builder(...items)) + .map(result => ({ [name]: result })); +}; + +/** + * Specific version of prepareState to initizalize the state with an account + * object. + */ +const makeState = prepareState('account', buildAccount); + +/** + * Builds an upsert query: Try to INSERT an entry on the database, if a + * conflict with the "account_id" key occurs, then update the entry instead of + * create a new one. Stores the built query in the state object. + * + * @param {Object} state State object, containing account info + * @param {Number} block Block number attached to the entry + * @param {Number} timestamp Timestamp attached to the entry + */ +const makeQuery = (state, block, timestamp) => { + const { id, identity, balances, isStaking } = state.account; + const query = ` \ + INSERT INTO account (account_id, identity, balances, timestamp, block_height, is_staking) \ + VALUES ('${id}', '${identity}', '${balances}', '${timestamp}', '${block}', ${isStaking}) \ + ON CONFLICT (account_id) \ + DO UPDATE \ + SET identity = EXCLUDED.identity, \ + balances = EXCLUDED.balances, \ + timestamp = EXCLUDED.timestamp, \ + is_staking = EXCLUDED.is_staking, \ + block_height = EXCLUDED.block_height; \ + `; + + return { ...state, query }; +}; + +/** + * Uses a provided connection pool to send a query to Postgres. Stores a promise + * wrapping the result of the execution in the state object. + * + * @param {Object} state State object, containing the query + * @param {Object} pool Postgres connection pool + */ +const execQuery = (state, pool) => { + const queryResult = pool + .query(state.query) + .catch(err => + logger.error({ msg: `Error updating account ${state.account.id}`, err }), + ); + + return { ...state, queryResult }; +}; + +/** + * Runs every iteration. Fetchs some data from the blockchain, constructs a + * SQL query an executes the query. + * + * @param {*} api Polkadot API object + * @param {*} pool Postgres connection pool + */ +const exec = async (api, pool) => { + logger.info('Running active accounts crawler...'); + + const timestamp = Date.now(); + const block = await fetchBlockNumber(api); + const validators = await fetchValidators(api); + + const accountIds = await fetchAccountIds(api); + + const accountsIdentity = await Promise.all( + accountIds.map(id => fetchAccountIdentity(id, api)), + ); + const accountsBalances = await Promise.all( + accountIds.map(id => fetchAccountBalance(id, api)), + ); + const accountsStaking = await Promise.all( + accountIds.map(id => fetchAccountStaking(id, api, validators)), + ); + + await Promise.all( + makeState([accountIds, accountsIdentity, accountsBalances, accountsStaking]) + .map(state => makeQuery(state, block, timestamp)) + .map(state => execQuery(state, pool)) + .map(state => state.queryResult), // Pick the promise to await + ); + + logger.info(`Processed ${accountIds.length} active accounts`); +}; + +/** + * Calls run function and then sets a timer to run the function again after + * some time. + * + * @param {*} api Polkadot API object + * @param {*} pool Postgres connection pool + * @param {*} config Crawler configuration + */ +const start = async (api, pool, config) => { + const pollingTime = config.pollingTime || DEFAULT_POLLING_TIME_MS; + + (async function run() { + await exec(api, pool).catch(err => + logger.error({ msg: 'Error running crawler', err }), + ); + + setTimeout(() => run(api, pool, config), pollingTime); + })(); +}; + +module.exports = { start }; diff --git a/lib/crawlers/blockHarvester.js b/lib/crawlers/blockHarvester.js index 0cf1c6f3..9e566a0d 100644 --- a/lib/crawlers/blockHarvester.js +++ b/lib/crawlers/blockHarvester.js @@ -81,6 +81,10 @@ module.exports = { // Get block events const blockEvents = await api.query.system.events.at(blockHash); + // Get block number finalized + // TODO: Get finalized from finalitytracker/final_hint extrinsic + const blockNumberFinalized = 0; + // Delete before insert to avoid duplicate key errors (issue #48) let sqlDelete = `DELETE FROM event WHERE block_number = '${endBlock}';`; try { @@ -186,6 +190,7 @@ module.exports = { const sqlInsert = `INSERT INTO block ( block_number, + block_number_finalized, block_author, block_author_name, block_hash, @@ -209,6 +214,7 @@ module.exports = { timestamp ) VALUES ( '${endBlock}', + '${blockNumberFinalized}', '${blockAuthor}', '${blockAuthorName}', '${blockHash}', diff --git a/lib/crawlers/blockListener.js b/lib/crawlers/blockListener.js index 4c217045..661cc78f 100644 --- a/lib/crawlers/blockListener.js +++ b/lib/crawlers/blockListener.js @@ -11,6 +11,9 @@ module.exports = { // Get block number const blockNumber = header.number.toNumber(); + // Get finalized block number + const blockNumberFinalized = await api.derive.chain.bestNumberFinalized(); + // Get block hash const blockHash = await api.rpc.chain.getBlockHash(blockNumber); @@ -83,6 +86,7 @@ module.exports = { const sqlInsert = `INSERT INTO block ( block_number, + block_number_finalized, block_author, block_author_name, block_hash, @@ -106,6 +110,7 @@ module.exports = { timestamp ) VALUES ( '${blockNumber}', + '${blockNumberFinalized}', '${blockAuthor}', '${blockAuthorName}', '${blockHash}', diff --git a/lib/crawlers/chain.js b/lib/crawlers/chain.js new file mode 100644 index 00000000..01e0d343 --- /dev/null +++ b/lib/crawlers/chain.js @@ -0,0 +1,76 @@ +// @ts-check + +const pino = require('pino'); +const logger = pino(); + +/** + * Fetch and store global chain counters on session change + * + * @param {object} api Polkadot API object + * @param {object} pool Postgres pool object + */ +async function start(api, pool) { + + logger.info('Starting chain crawler'); + + let currentSessionIndex = 0; + + // Subscribe to new blocks + await api.rpc.chain.subscribeNewHeads(async (header) => { + + const [session, blockHeight, totalIssuance] = await Promise.all([ + api.derive.session.info(), + api.derive.chain.bestNumber(), + api.query.balances.totalIssuance() + ]); + + if (session.currentIndex >= currentSessionIndex) { + currentSessionIndex = session.currentIndex; + let sqlSelect = `SELECT session_index FROM chain ORDER by session_index DESC LIMIT 1`; + const res = await pool.query(sqlSelect); + + if (res.rows.length > 0) { + if (res.rows[0].session_index < currentSessionIndex) { + const activeAccounts = await getTotalActiveAccounts(api); + await insertRow(pool, blockHeight, currentSessionIndex, totalIssuance, activeAccounts); + } + } else { + const activeAccounts = await getTotalActiveAccounts(api); + await insertRow(pool, blockHeight, currentSessionIndex, totalIssuance, activeAccounts); + } + } + }); +} + +async function getTotalActiveAccounts(api) { + const accountKeys = await api.query.system.account.keys() + const accounts = accountKeys.map(key => key.args[0].toHuman()); + return accounts.length || 0; +} + +async function insertRow(pool, blockHeight, currentSessionIndex, totalIssuance, activeAccounts) { + const sqlInsert = + `INSERT INTO chain ( + block_height, + session_index, + total_issuance, + active_accounts, + timestamp + ) VALUES ( + '${blockHeight}', + '${currentSessionIndex}', + '${totalIssuance}', + '${activeAccounts}', + '${new Date().getTime()}' + );`; + try { + await pool.query(sqlInsert); + logger.info('Updating chain info'); + return true; + } catch (error) { + logger.error('Error updating chain info'); + return false; + } +} + +module.exports = { start }; diff --git a/lib/crawlers/phragmen.js b/lib/crawlers/phragmen.js index f632a003..7375bdaa 100644 --- a/lib/crawlers/phragmen.js +++ b/lib/crawlers/phragmen.js @@ -7,7 +7,7 @@ const DEFAULT_OFFLINE_PHRAGMEN_PATH = 'offline-phragmen'; const DEFAULT_POLLING_TIME = 5 * 60 * 1000; /** - * start fetch some information from the blockchain using Poladot API, use + * start fetch some information from the blockchain using Polkadot API, use * the collected data to run "offline-phragmen" binary and stores the output * on the database. * diff --git a/lib/crawlers/staking.js b/lib/crawlers/staking.js index 1ca4987b..901b8832 100644 --- a/lib/crawlers/staking.js +++ b/lib/crawlers/staking.js @@ -52,10 +52,11 @@ module.exports = { // // Get all stash addresses, active validators, imOnline data, current elected and current era points earned // - const [allStashAddresses, validatorAddresses, imOnline, erasRewardPoints] = await Promise.all([ + const [allStashAddresses, validatorAddresses, imOnline, stakingOverview, erasRewardPoints] = await Promise.all([ api.derive.staking.stashes(), api.query.session.validators(), api.derive.imOnline.receivedHeartbeats(), + api.derive.staking.overview(), api.query.staking.erasRewardPoints(currentEraIndex) ]); @@ -85,6 +86,18 @@ module.exports = { validator.imOnline = imOnline[validator.accountId]; } }, imOnline); + + // + // Add current elected property + // + const { nextElected } = JSON.parse(JSON.stringify(stakingOverview)); + validatorStaking.forEach(function (validator) { + if (nextElected.includes(validator)) { + validator.currentElected = true; + } else { + validator.currentElected = false; + } + }); // // Add earned era points to validator object @@ -108,7 +121,7 @@ module.exports = { } // - // Populate graph data tables + // Populate validator graph data tables // console.log(`[PolkaStats backend v3] - Staking crawler - \x1b[32mPopulating validator_bonded, validator_selfbonded, validator_num_nominators and validator_active tables\x1b[0m`); validatorStaking.forEach(async validator => { @@ -171,6 +184,16 @@ module.exports = { console.log(`[PolkaStats backend v3] - Staking crawler - \x1b[31mERROR: ${JSON.stringify(error)}\x1b[0m`); } } + + // + // Populate intention_bonded table + // + console.log(`[PolkaStats backend v3] - Staking crawler - \x1b[32mPopulating intention_bonded table\x1b[0m`); + intentionStaking.forEach(async intention => { + const sql = `INSERT INTO intention_bonded (block_number, session_index, account_id, amount, timestamp) VALUES ('${blockNumber}', '${currentIndex}', '${intention.accountId.toString()}', '${BigNumber(intention.stakingLedger.total).toString(10)}', extract(epoch from now()));`; + await pool.query(sql); + }) + } } \ No newline at end of file diff --git a/lib/crawlers/system.js b/lib/crawlers/system.js new file mode 100644 index 00000000..a0d85928 --- /dev/null +++ b/lib/crawlers/system.js @@ -0,0 +1,64 @@ +// @ts-check + +const pino = require('pino'); +const logger = pino(); + +/** + * Get polkadot node information and store in database + * + * @param {object} api Polkadot API object + * @param {object} pool Postgres pool object + */ +async function start(api, pool) { + + logger.info('Starting system crawler'); + + const [blockHeight, chain, nodeName, nodeVersion] = await Promise.all([ + api.derive.chain.bestNumber(), + api.rpc.system.chain(), + api.rpc.system.name(), + api.rpc.system.version() + ]); + + let sqlSelect = `SELECT chain, node_name, node_version FROM system ORDER by block_height DESC LIMIT 1`; + const res = await pool.query(sqlSelect); + + if (res.rows.length > 0) { + if ( + res.rows[0].chain !== chain || + res.rows[0].node_name !== nodeName || + res.rows[0].node_version !== nodeVersion + ) { + await insertRow(pool, blockHeight, chain, nodeName, nodeVersion); + } + } else { + await insertRow(pool, blockHeight, chain, nodeName, nodeVersion); + } +} + +async function insertRow(pool, blockHeight, chain, nodeName, nodeVersion) { + const sqlInsert = + `INSERT INTO system ( + block_height, + chain, + node_name, + node_version, + timestamp + ) VALUES ( + '${blockHeight}', + '${chain}', + '${nodeName}', + '${nodeVersion}', + '${new Date().getTime()}' + );`; + try { + await pool.query(sqlInsert); + logger.info('Updating system info'); + return true; + } catch (error) { + logger.error('Error updating system info'); + return false; + } +} + +module.exports = { start }; diff --git a/package-lock.json b/package-lock.json index 9aaca1a7..78cc174a 100644 --- a/package-lock.json +++ b/package-lock.json @@ -192,6 +192,11 @@ "color-convert": "^2.0.1" } }, + "atomic-sleep": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/atomic-sleep/-/atomic-sleep-1.0.0.tgz", + "integrity": "sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ==" + }, "axios": { "version": "0.19.2", "resolved": "https://registry.npmjs.org/axios/-/axios-0.19.2.tgz", @@ -342,6 +347,11 @@ "ms": "2.0.0" } }, + "dotenv": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-8.2.0.tgz", + "integrity": "sha512-8sJ78ElpbDJBHNeBzUbUVLsqKdccaa/BXF1uPTw3GrvQTBgrQrtObr2mUrE38vzYd8cEv+m/JBfDLioYcfXoaw==" + }, "elliptic": { "version": "6.5.2", "resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.5.2.tgz", @@ -447,6 +457,21 @@ } } }, + "fast-redact": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/fast-redact/-/fast-redact-2.0.0.tgz", + "integrity": "sha512-zxpkULI9W9MNTK2sJ3BpPQrTEXFNESd2X6O1tXMFpK/XM0G5c5Rll2EVYZH2TqI3xRGK/VaJ+eEOt7pnENJpeA==" + }, + "fast-safe-stringify": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.0.7.tgz", + "integrity": "sha512-Utm6CdzT+6xsDk2m8S6uL8VHxNwI6Jub+e9NYTcAms28T84pTa25GJQV9j0CY0N1rM8hK4x6grpF2BQf+2qwVA==" + }, + "flatstr": { + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/flatstr/-/flatstr-1.0.12.tgz", + "integrity": "sha512-4zPxDyhCyiN2wIAtSLI6gc82/EjqZc1onI4Mz/l0pWrAlsSfYH/2ZIcU+e3oA2wDwbzIWNKwa23F8rh6+DRWkw==" + }, "follow-redirects": { "version": "1.5.10", "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.5.10.tgz", @@ -545,6 +570,11 @@ "resolved": "https://registry.npmjs.org/js-sha3/-/js-sha3-0.8.0.tgz", "integrity": "sha512-gF1cRrHhIzNfToc802P800N8PpXS+evLLXfsVpowqmAFR9uwbi89WvXg2QspOmXL8QL86J4T1EpFu+yUkwJY3Q==" }, + "lodash": { + "version": "4.17.15", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", + "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==" + }, "lru-queue": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/lru-queue/-/lru-queue-0.1.0.tgz", @@ -684,6 +714,24 @@ "split": "^1.0.0" } }, + "pino": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/pino/-/pino-6.2.0.tgz", + "integrity": "sha512-UzrsiT5Wyscw7dxHa8Ec8G2kY45mwFk7rrZhMkCMg8s9F8VWDVj+WFcaSIKproTDyxlqerMaHw+11jlNXgeiCg==", + "requires": { + "fast-redact": "^2.0.0", + "fast-safe-stringify": "^2.0.7", + "flatstr": "^1.0.12", + "pino-std-serializers": "^2.4.2", + "quick-format-unescaped": "^4.0.1", + "sonic-boom": "^1.0.0" + } + }, + "pino-std-serializers": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/pino-std-serializers/-/pino-std-serializers-2.4.2.tgz", + "integrity": "sha512-WaL504dO8eGs+vrK+j4BuQQq6GLKeCCcHaMB2ItygzVURcL1CycwNEUHTD/lHFHs/NL5qAz2UKrjYWXKSf4aMQ==" + }, "postgres-array": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", @@ -707,6 +755,17 @@ "xtend": "^4.0.0" } }, + "prettier": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.0.4.tgz", + "integrity": "sha512-SVJIQ51spzFDvh4fIbCLvciiDMCrRhlN3mbZvv/+ycjvmF5E73bKdGfU8QDLNmjYJf+lsGnDBC4UUnvTe5OO0w==", + "dev": true + }, + "quick-format-unescaped": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/quick-format-unescaped/-/quick-format-unescaped-4.0.1.tgz", + "integrity": "sha512-RyYpQ6Q5/drsJyOhrWHYMWTedvjTIat+FTwv0K4yoUxzvekw2aRHMQJLlnvt8UantkZg2++bEzD9EdxXqkWf4A==" + }, "randombytes": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", @@ -761,6 +820,15 @@ "safe-buffer": "^5.0.1" } }, + "sonic-boom": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/sonic-boom/-/sonic-boom-1.0.1.tgz", + "integrity": "sha512-o9tx+bonVEXSaPtptyXQXpP8l6UV9Bi3im2geZskvWw2a/o/hrbWI7EBbbv+rOx6Hubnzun9GgH4WfbgEA3MFQ==", + "requires": { + "atomic-sleep": "^1.0.0", + "flatstr": "^1.0.12" + } + }, "split": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/split/-/split-1.0.1.tgz", @@ -850,4 +918,4 @@ "integrity": "sha1-8m9ITXJoTPQr7ft2lwqhYI+/lXc=" } } -} +} \ No newline at end of file diff --git a/package.json b/package.json index 0a28f6f4..0cfcc6d5 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "polkastats-backend-v3", - "version": "3.0.0", + "version": "3.0.0-milestone2", "description": "PolkaStats backend v3", "main": "index.js", "scripts": { @@ -34,6 +34,9 @@ "@polkadot/api": "^v1.8.0-beta.2", "axios": "^0.19.2", "bignumber.js": "^9.0.0", - "pg": "^7.18.1" + "pg": "^7.18.1", + "dotenv": "^8.2.0", + "lodash": "^4.17.15", + "pino": "^6.2.0" } }