diff --git a/.dockerignore b/.dockerignore
index 1675f407..65cdc3f1 100644
--- a/.dockerignore
+++ b/.dockerignore
@@ -1,3 +1,3 @@
node_modules
-modules
+runtimes
npm-debug.log
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index f0203515..709732c4 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -73,7 +73,7 @@ jobs:
context: .
load: true
tags: core-test
- cache-from: type=registry,ref=ghcr.io/filecoin-station/core
+ cache-from: type=registry,ref=ghcr.io/CheckerNetwork/node
cache-to: type=inline
- name: Build Docker image for other platforms
@@ -81,13 +81,13 @@ jobs:
with:
context: .
platforms: linux/arm64
- cache-from: type=registry,ref=ghcr.io/filecoin-station/core
+ cache-from: type=registry,ref=ghcr.io/CheckerNetwork/node
cache-to: type=inline
- - name: Start Station Core container with PASSPHRASE
+ - name: Start Checker Node container with PASSPHRASE
run: |
docker run \
- --name station \
+ --name checker \
--detach \
--env FIL_WALLET_ADDRESS=0x000000000000000000000000000000000000dEaD \
--env PASSPHRASE=secret \
@@ -95,28 +95,28 @@ jobs:
env:
IMAGEID: ${{ steps.docker_build.outputs.imageid }}
- - name: Print station activity
+ - name: Print checker activity
run: |
- sleep 10 # Wait for Station modules to start
- docker logs station
+ sleep 10 # Wait for Checker subnets to start
+ docker logs checker
- name: Check | Spark started
- run: docker logs station | grep "Spark started"
+ run: docker logs checker | grep "Spark started"
- - name: Start Station Core container without PASSPHRASE
+ - name: Start Checker Node container without PASSPHRASE
run: |
docker run \
- --name station_unsecure \
+ --name checker_unsecure \
--detach \
--env FIL_WALLET_ADDRESS=0x000000000000000000000000000000000000dEaD \
$IMAGEID
env:
IMAGEID: ${{ steps.docker_build.outputs.imageid }}
- - name: Print station activity
+ - name: Print checker activity
run: |
- sleep 10 # Wait for Station modules to start
- docker logs station_unsecure
+ sleep 10 # Wait for Checker subnets to start
+ docker logs checker_unsecure
- name: Check | Spark started
- run: docker logs station_unsecure | grep "Spark started"
+ run: docker logs checker_unsecure | grep "Spark started"
diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml
index b063573b..64ff5a9a 100644
--- a/.github/workflows/docker.yml
+++ b/.github/workflows/docker.yml
@@ -40,8 +40,8 @@ jobs:
context: .
push: true
tags: |
- ghcr.io/CheckerNetwork/core
- ghcr.io/CheckerNetwork/core:${{ steps.package-version.outputs.current-version }}
+ ghcr.io/CheckerNetwork/node
+ ghcr.io/CheckerNetwork/node:${{ steps.package-version.outputs.current-version }}
platforms: linux/amd64,linux/arm64
- cache-from: type=registry,ref=ghcr.io/CheckerNetwork/core
+ cache-from: type=registry,ref=ghcr.io/CheckerNetwork/node
cache-to: type=inline
diff --git a/.gitignore b/.gitignore
index 8141d43a..66c01a22 100644
--- a/.gitignore
+++ b/.gitignore
@@ -99,7 +99,7 @@ typings/
# TernJS port file
.tern-port
.DS_Store
-modules
+runtimes
dist
.state
diff --git a/Dockerfile b/Dockerfile
index cd92aff9..2a2eca5b 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,8 +1,8 @@
FROM node:22.13.1-slim
-LABEL org.opencontainers.image.source=https://github.com/CheckerNetwork/core
+LABEL org.opencontainers.image.source=https://github.com/CheckerNetwork/node
USER node
WORKDIR /usr/src/app
COPY . .
RUN npm ci --omit=dev
ENV DEPLOYMENT_TYPE=docker
-CMD [ "./bin/station.js" ]
+CMD [ "./bin/checker.js" ]
diff --git a/README.md b/README.md
index 901e7df8..53e78907 100644
--- a/README.md
+++ b/README.md
@@ -3,72 +3,73 @@
:artificial_satellite:
- Station Core
+ Checker Node
-> Station Core is a headless version of Filecoin Station suitable for running on
-> all kinds of servers.
+> Checker Node is a node implementation for the
+> [Checker Network](https://checker.network), suitable for running on all kinds
+> of servers..
-[](https://github.com/filecoin-station/core/actions/workflows/ci.yml)
+[](https://github.com/CheckerNetwork/node/actions/workflows/ci.yml)
## Deployment
-Station Core supports different deployment options:
+Checker Node supports different deployment options:
- [Docker](#docker)
- [Manual Deployment (Ubuntu)](#manual-deployment-ubuntu)
## Installation
-> **Note**: Station Core requires Node.js, we recommend using the latest LTS
+> **Note**: Checker Node requires Node.js, we recommend using the latest LTS
> version. You can install Node.js using your favorite package manager or get
> the official installer from
> [Node.js downloads](https://nodejs.org/en/download/).
-With Node.js installed, run `npm` to install Station Core.
+With Node.js installed, run `npm` to install Checker Node.
```bash
-$ npm install -g @filecoin-station/core
+$ npm install -g @checkernetwork/node
```
## Usage
```bash
-$ FIL_WALLET_ADDRESS=... PASSPHRASE=... station
+$ FIL_WALLET_ADDRESS=... PASSPHRASE=... checker
```
## Common Configuration
-Station Core is configured using environment variables (see
+Checker Node is configured using environment variables (see
[The Twelve-Factor App](https://12factor.net/config)).
-The following configuration options are shared by all Station commands:
+The following configuration options are shared by all Checker commands:
-- `$CACHE_ROOT` _(string; optional)_: Station stores temporary files (e.g.
+- `$CACHE_ROOT` _(string; optional)_: Checker stores temporary files (e.g.
cached data) in this directory. Defaults to
- - Linux: `${XDG_CACHE_HOME:-~/.cache}/filecoin-station-core`
- - macOS: `~/Library/Caches/app.filstation.core`
- - Windows: `%TEMP%/Filecoin Station Core`
-- `$STATE_ROOT` _(string; optional)_: Station stores logs and module state in
+ - Linux: `${XDG_CACHE_HOME:-~/.cache}/checker-network-node`
+ - macOS: `~/Library/Caches/network.checker.node`
+ - Windows: `%TEMP%/Checker Network Node`
+- `$STATE_ROOT` _(string; optional)_: Checker stores logs and subnet state in
this directory. Defaults to
- - Linux: `${XDG_STATE_HOME:-~/.local/state}/filecoin-station-core`
- - macOS: `~/Library/Application Support/app.filstation.core`
- - Windows: `%LOCALAPPDATA%/Filecoin Station Core`
+ - Linux: `${XDG_STATE_HOME:-~/.local/state}/checker-network-node`
+ - macOS: `~/Library/Application Support/network.checker.node`
+ - Windows: `%LOCALAPPDATA%/Checker Network Node`
**IMPORTANT:** The`$STATE_ROOT` directory must be local to the computer
- running the Station. This directory must not be shared with other computers
+ running the Checker. This directory must not be shared with other computers
operated by the user, e.g. via Windows Domain profile or cloud storage like
iCloud Drive, Dropbox and OneDrive.
## Commands
-### `$ station`
+### `$ checker`
-Start a new Station process. The Station will run in foreground and can be
+Start a new Checker process. The Checker will run in foreground and can be
terminated by pressing Ctrl+C.
This command has the following additional configuration in addition to common
@@ -79,7 +80,7 @@ the configuration options described in
will receive rewards. The value must be a mainnet address starting with
`f410`, `0x`.
- `f1` addresses currently are not supported. Rewards for Station operators are
+ `f1` addresses currently are not supported. Rewards for Checker operators are
administered by a FEVM smart contract. It is currently technically complex to
make payments to f1 addresses.
@@ -87,17 +88,17 @@ the configuration options described in
`0x000000000000000000000000000000000000dEaD`. Please note that any earnings
sent there will be lost.
-- `PASSPHRASE` _(string; optional)_: a passphrase to protect the Station
+- `PASSPHRASE` _(string; optional)_: a passphrase to protect the Checker
instance private key stored in a file inside the `STATE_ROOT` directory.
-- `MODULE_FILTER` _(string; optional)_: Run only the Zinnia module with the
- given name. Eg:
- - `MODULE_FILTER=spark`
+- `SUBNET_FILTER` _(string; optional)_: Run only the subnet with the given name.
+ Eg:
+ - `SUBNET_FILTER=spark`
This command outputs metrics and activity events:
```bash
-$ station
+$ checker
{
"totalJobsCompleted": 161,
"rewardsScheduledForAddress": "0.041033208757289921"
@@ -108,10 +109,10 @@ $ station
```
```bash
-$ station --json
+$ checker --json
{"type":"jobs-completed","total":161}
-{"type":"activity:info","module":"Saturn","message":"Saturn Node will try to connect to the Saturn Orchestrator..."}
-{"type":"activity:info","module":"Saturn","message":"Saturn Node was able to connect to the Orchestrator and will now start connecting to the Saturn network..."}
+{"type":"activity:info","subnet":"Saturn","message":"Saturn Node will try to connect to the Saturn Orchestrator..."}
+{"type":"activity:info","subnet":"Saturn","message":"Saturn Node was able to connect to the Orchestrator and will now start connecting to the Saturn network..."}
...
```
@@ -120,56 +121,56 @@ For the JSON output, the following event types exist:
- `jobs-completed`
- `total`
- `activity:info`
- - `module`
+ - `subnet`
- `message`
- `activity:error`
- - `module`
+ - `subnet`
- `message`
-Set the flag `--experimental` to run modules not yet considered safe for
+Set the flag `--experimental` to run subnets not yet considered safe for
production use. _Run this at your own risk!_
-No modules currently in experimental mode.
+No subnets currently in experimental mode.
-### `$ station --help`
+### `$ checker --help`
Show help.
```bash
-$ station --help
-Usage: station [options]
+$ checker --help
+Usage: checker [options]
Options:
-j, --json Output JSON [boolean]
- --experimental Also run experimental modules [boolean]
- --recreateStationIdOnError Recreate Station ID if it is corrupted
+ --experimental Also run experimental subnets [boolean]
+ --recreateCheckerIdOnError Recreate Checker ID if it is corrupted
[boolean]
-v, --version Show version number [boolean]
-h, --help Show help [boolean]
```
-### `$ station --version`
+### `$ checker --version`
Show version number.
```bash
-$ station --version
-@filecoin-station/core: 1.0.1
+$ checker --version
+@checkernetwork/node: 1.0.1
```
## Docker
-Deploy Station with [Docker](https://www.docker.com/). Please replace
+Deploy Checker with [Docker](https://www.docker.com/). Please replace
`FIL_WALLET_ADDRESS` and ensure the passed `state` folder is persisted across
machine restarts.
```bash
$ docker run \
- --name station \
+ --name checker \
--detach \
--env FIL_WALLET_ADDRESS=0x000000000000000000000000000000000000dEaD \
-v ./state:/home/node/.local/state/
- ghcr.io/CheckerNetwork/core
+ ghcr.io/CheckerNetwork/node
```
## Manual Deployment (Ubuntu)
@@ -182,21 +183,21 @@ $ curl -fsSL https://deb.nodesource.com/setup_20.x | sudo -E bash - &&\
sudo apt-get install -y nodejs
# Install core
-$ npm install -g @filecoin-station/core
+$ npm install -g @checkernetwork/node
# Create systemd service
# Don't forget to replace FIL_WALLET_ADDRESS and User
-$ sudo tee /etc/systemd/system/station.service > /dev/null < /dev/null < yargs
.option('json', {
alias: 'j',
@@ -30,13 +33,13 @@ yargs(hideBin(process.argv))
})
.option('experimental', {
type: 'boolean',
- description: 'Also run experimental modules'
+ description: 'Also run experimental subnets'
})
- .option('recreateStationIdOnError', {
+ .option('recreateCheckerIdOnError', {
type: 'boolean',
- description: 'Recreate Station ID if it is corrupted'
+ description: 'Recreate Checker ID if it is corrupted'
}),
- ({ json, experimental, recreateStationIdOnError }) => station({ json, experimental, recreateStationIdOnError })
+ ({ json, experimental, recreateCheckerIdOnError }) => checker({ json, experimental, recreateCheckerIdOnError })
)
.version(`${pkg.name}: ${pkg.version}`)
.alias('v', 'version')
diff --git a/commands/station.js b/commands/checker.js
similarity index 79%
rename from commands/station.js
rename to commands/checker.js
index b0a733e7..3db4d281 100644
--- a/commands/station.js
+++ b/commands/checker.js
@@ -5,7 +5,7 @@ import { runPingLoop, runMachinesLoop } from '../lib/telemetry.js'
import fs from 'node:fs/promises'
import { metrics } from '../lib/metrics.js'
import { paths } from '../lib/paths.js'
-import { getStationId } from '../lib/station-id.js'
+import { getCheckerId } from '../lib/checker-id.js'
import pRetry from 'p-retry'
import { fetch } from 'undici'
import { ethAddressFromDelegated, isEthAddress } from '@glif/filecoin-address'
@@ -18,7 +18,7 @@ const {
PASSPHRASE
} = process.env
-const moduleNames = [
+const runtimeNames = [
'zinnia'
]
@@ -31,7 +31,7 @@ const panic = (msg, exitCode = 1) => {
process.exit(exitCode)
}
-export const station = async ({ json, recreateStationIdOnError, experimental }) => {
+export const checker = async ({ json, recreateCheckerIdOnError, experimental }) => {
if (!FIL_WALLET_ADDRESS) panic('FIL_WALLET_ADDRESS required')
if (FIL_WALLET_ADDRESS.startsWith('f1')) {
panic('Invalid FIL_WALLET_ADDRESS: f1 addresses are currently not supported. Please use an f4 or 0x address.')
@@ -46,8 +46,8 @@ export const station = async ({ json, recreateStationIdOnError, experimental })
panic('Invalid FIL_WALLET_ADDRESS ethereum address', 2)
}
- const keypair = await getStationId({ secretsDir: paths.secrets, passphrase: PASSPHRASE, recreateOnError: recreateStationIdOnError })
- const STATION_ID = keypair.publicKey
+ const keypair = await getCheckerId({ secretsDir: paths.secrets, passphrase: PASSPHRASE, recreateOnError: recreateCheckerIdOnError })
+ const CHECKER_ID = keypair.publicKey
const fetchRes = await pRetry(
() => fetch(`https://station-wallet-screening.fly.dev/${FIL_WALLET_ADDRESS}`),
@@ -62,16 +62,16 @@ export const station = async ({ json, recreateStationIdOnError, experimental })
const ethAddress = FIL_WALLET_ADDRESS.startsWith('0x')
? FIL_WALLET_ADDRESS
: ethAddressFromDelegated(FIL_WALLET_ADDRESS)
- for (const moduleName of moduleNames) {
- await fs.mkdir(join(paths.moduleCache, moduleName), { recursive: true })
- await fs.mkdir(join(paths.moduleState, moduleName), { recursive: true })
+ for (const runtimeName of runtimeNames) {
+ await fs.mkdir(join(paths.runtimeCache, runtimeName), { recursive: true })
+ await fs.mkdir(join(paths.runtimeState, runtimeName), { recursive: true })
}
activities.onActivity(activity => {
if (json) {
console.log(JSON.stringify({
type: `activity:${activity.type}`,
- module: activity.source,
+ subnet: activity.source,
message: activity.message
}))
} else {
@@ -96,7 +96,7 @@ export const station = async ({ json, recreateStationIdOnError, experimental })
})
if (experimental) {
- console.error('No experimental modules available at this point')
+ console.error('No experimental subnets available at this point')
}
const contracts = []
@@ -113,17 +113,17 @@ export const station = async ({ json, recreateStationIdOnError, experimental })
await Promise.all([
zinniaRuntime.run({
provider,
- STATION_ID,
+ CHECKER_ID,
FIL_WALLET_ADDRESS: ethAddress,
ethAddress,
- STATE_ROOT: join(paths.moduleState, 'zinnia'),
- CACHE_ROOT: join(paths.moduleCache, 'zinnia'),
- moduleVersionsDir: paths.moduleVersionsDir,
- moduleSourcesDir: paths.moduleSourcesDir,
+ STATE_ROOT: join(paths.runtimeState, 'zinnia'),
+ CACHE_ROOT: join(paths.runtimeCache, 'zinnia'),
+ subnetVersionsDir: paths.subnetVersionsDir,
+ subnetSourcesDir: paths.subnetSourcesDir,
onActivity: activity => {
activities.submit({
...activity,
- // Zinnia will try to overwrite `source` if a module created the
+ // Zinnia will try to overwrite `source` if a subnet created the
// activity. Using the spread syntax won't work because a
// `source: null` would overwrite the default value.
source: activity.source || 'Zinnia'
@@ -131,8 +131,8 @@ export const station = async ({ json, recreateStationIdOnError, experimental })
},
onMetrics: m => metrics.submit('zinnia', m)
}),
- runPingLoop({ STATION_ID }),
- runMachinesLoop({ STATION_ID }),
+ runPingLoop({ CHECKER_ID }),
+ runMachinesLoop({ CHECKER_ID }),
runUpdateContractsLoop({
provider,
contracts,
diff --git a/lib/station-id.js b/lib/checker-id.js
similarity index 88%
rename from lib/station-id.js
rename to lib/checker-id.js
index 9c266753..15f9fff5 100644
--- a/lib/station-id.js
+++ b/lib/checker-id.js
@@ -11,15 +11,15 @@ import { subtle, getRandomValues } from 'node:crypto'
* @param {import('node:console')} [args.log]
* @returns {Promise<{publicKey: string, privateKey: string}>}
*/
-export async function getStationId ({ secretsDir, passphrase, recreateOnError = false, log = console }) {
+export async function getCheckerId ({ secretsDir, passphrase, recreateOnError = false, log = console }) {
assert.strictEqual(typeof secretsDir, 'string', 'secretsDir must be a string')
await fs.mkdir(secretsDir, { recursive: true })
- const keystore = path.join(secretsDir, 'station_id')
+ const keystore = path.join(secretsDir, 'checker_id')
try {
const keypair = await loadKeypair(keystore, passphrase, { log })
- log.error('Loaded Station ID: %s', keypair.publicKey)
+ log.error('Loaded Checker ID: %s', keypair.publicKey)
return keypair
} catch (err) {
if (recreateOnError || (err.code === 'ENOENT' && err.path === keystore)) {
@@ -28,7 +28,7 @@ export async function getStationId ({ secretsDir, passphrase, recreateOnError =
return await generateKeypair(keystore, passphrase, { log })
} else {
throw new Error(
- `Cannot load Station ID from file "${keystore}". ${err.message}`,
+ `Cannot load Checker ID from file "${keystore}". ${err.message}`,
{ cause: err }
)
}
@@ -63,7 +63,7 @@ async function loadKeypair (keystore, passphrase, { log }) {
plaintext = await decrypt(passphrase, ciphertext)
} catch (err) {
throw new Error(
- 'Cannot decrypt Station ID file. Did you configure the correct PASSPHRASE? Alternatively overwrite it using `--recreateStationIdOnError`',
+ 'Cannot decrypt Checker ID file. Did you configure the correct PASSPHRASE? Alternatively overwrite it using `--recreateCheckerIdOnError`',
{ cause: err }
)
}
@@ -91,7 +91,7 @@ async function tryUpgradePlaintextToCiphertext (passphrase, keystore, maybeCiphe
// re-create the keypair file with encrypted keypair
await storeKeypair(passphrase, keystore, keypair)
- log.error('Encrypted the Station ID file using the provided PASSPHRASE.')
+ log.error('Encrypted the Checker ID file using the provided PASSPHRASE.')
return keypair
}
/**
@@ -100,8 +100,8 @@ async function tryUpgradePlaintextToCiphertext (passphrase, keystore, maybeCiphe
*/
function parseStoredKeys (json) {
const storedKeys = JSON.parse(Buffer.from(json).toString())
- assert.strictEqual(typeof storedKeys.publicKey, 'string', 'station_id is corrupted: invalid publicKey')
- assert.strictEqual(typeof storedKeys.privateKey, 'string', 'station_id is corrupted: invalid privateKey')
+ assert.strictEqual(typeof storedKeys.publicKey, 'string', 'checker_id is corrupted: invalid publicKey')
+ assert.strictEqual(typeof storedKeys.privateKey, 'string', 'checker_id is corrupted: invalid privateKey')
return storedKeys
}
@@ -116,9 +116,9 @@ async function generateKeypair (keystore, passphrase, { log }) {
if (!passphrase) {
log.warn(`
*****************************************************************************************
- The private key of the identity of your Station instance will be stored in plaintext.
+ The private key of the identity of your Checker instance will be stored in plaintext.
We strongly recommend you to configure PASSPHRASE environment variable to enable
- Station to encrypt the private key stored on the filesystem.
+ Checker to encrypt the private key stored on the filesystem.
*****************************************************************************************
`)
}
@@ -129,7 +129,7 @@ async function generateKeypair (keystore, passphrase, { log }) {
)
const publicKey = Buffer.from(await subtle.exportKey('spki', keyPair.publicKey)).toString('hex')
const privateKey = Buffer.from(await subtle.exportKey('pkcs8', keyPair.privateKey)).toString('hex')
- log.error('Generated a new Station ID:', publicKey)
+ log.error('Generated a new Checker ID:', publicKey)
await storeKeypair(passphrase, keystore, { publicKey, privateKey })
return { publicKey, privateKey }
}
diff --git a/lib/metrics.js b/lib/metrics.js
index eba0ea0c..2e95bdf5 100644
--- a/lib/metrics.js
+++ b/lib/metrics.js
@@ -21,57 +21,55 @@ export class Metrics {
constructor () {
this.mergedMetrics = null
/** @type {Map} */
- this.moduleMetrics = new Map()
+ this.subnetMetrics = new Map()
this.lastErrorReportedAt = 0
}
/**
* - Filters duplicate entries
* - Writes `jobs-completed` to InfluxDB
- * - Merges metrics from all modules
- * @param {String} moduleName
+ * - Merges metrics from all subnets
+ * @param {String} subnetName
* @param {Partial} metrics
*/
- submit (moduleName, metrics) {
+ submit (subnetName, metrics) {
/** @type {MetricsEvent} */
const resolvedMetrics = {
// initial values
totalJobsCompleted: 0,
rewardsScheduledForAddress: 0n,
// or values submitted previously
- ...this.moduleMetrics.get(moduleName),
+ ...this.subnetMetrics.get(subnetName),
// or values submitted now
...metrics
}
if (
typeof metrics.totalJobsCompleted === 'number' &&
- typeof this.moduleMetrics.get(moduleName)?.totalJobsCompleted === 'number'
+ typeof this.subnetMetrics.get(subnetName)?.totalJobsCompleted === 'number'
) {
const diff = metrics.totalJobsCompleted -
- this.moduleMetrics.get(moduleName).totalJobsCompleted
+ this.subnetMetrics.get(subnetName).totalJobsCompleted
if (diff < 0) {
this.maybeReportErrorToSentry(
- new Error(`Negative jobs completed for ${moduleName}`)
+ new Error(`Negative jobs completed for ${subnetName}`)
)
} else if (diff > 0) {
writeClient.writePoint(
new Point('jobs-completed')
- .tag('module', moduleName)
- // TODO: remove this after July 2024
- .stringField('module', moduleName)
+ .tag('subnet', subnetName)
.intField('value', diff)
)
}
}
- this.moduleMetrics.set(moduleName, resolvedMetrics)
+ this.subnetMetrics.set(subnetName, resolvedMetrics)
const mergedMetrics = {
totalJobsCompleted: 0,
rewardsScheduledForAddress: 0n
}
- for (const [, metrics] of this.moduleMetrics) {
+ for (const [, metrics] of this.subnetMetrics) {
mergedMetrics.totalJobsCompleted += metrics.totalJobsCompleted
- // Merging rewards metrics should be revisited as more modules start
+ // Merging rewards metrics should be revisited as more subnets start
// paying rewards
mergedMetrics.rewardsScheduledForAddress +=
metrics.rewardsScheduledForAddress
@@ -101,7 +99,7 @@ export class Metrics {
if (now - this.lastErrorReportedAt < 4 /* HOURS */ * 3600_000) return
this.lastErrorReportedAt = now
- console.error('Reporting the problem to Sentry for inspection by the Station team.')
+ console.error('Reporting the problem to Sentry for inspection by the Checker team.')
Sentry.captureException(err)
}
}
diff --git a/lib/migrate.js b/lib/migrate.js
new file mode 100644
index 00000000..9802eade
--- /dev/null
+++ b/lib/migrate.js
@@ -0,0 +1,27 @@
+import { join } from 'node:path'
+import { stat, rename, mkdir } from 'node:fs/promises'
+import { paths, getOldStateRoot } from './paths.js'
+
+const exists = async path => {
+ try {
+ await stat(path)
+ return true
+ } catch {
+ return false
+ }
+}
+
+export const maybeMigrateRuntimeState = async () => {
+ const newRuntimeStatePath = paths.runtimeState
+ const oldRuntimeStatePath = join(getOldStateRoot(), 'modules')
+ const hasNewState = await exists(newRuntimeStatePath)
+ const hasOldState = await exists(oldRuntimeStatePath)
+ if (!hasNewState && hasOldState) {
+ console.error(
+ `Migrating runtime state files from ${oldRuntimeStatePath} to ${newRuntimeStatePath}`
+ )
+ await mkdir(join(newRuntimeStatePath, '..'), { recursive: true })
+ await rename(oldRuntimeStatePath, newRuntimeStatePath)
+ console.error('Migration complete')
+ }
+}
diff --git a/lib/paths.js b/lib/paths.js
index 6e9b501c..cc227c8b 100644
--- a/lib/paths.js
+++ b/lib/paths.js
@@ -12,19 +12,19 @@ const {
XDG_STATE_HOME = join(homedir(), '.local', 'state')
} = process.env
-export const getPaths = ({ cacheRoot, stateRoot }) => ({
+const getPaths = ({ cacheRoot, stateRoot }) => ({
secrets: join(stateRoot, 'secrets'),
- moduleCache: join(cacheRoot, 'modules'),
- moduleState: join(stateRoot, 'modules'),
- moduleSourcesDir: join(cacheRoot, 'sources'),
- moduleVersionsDir: join(stateRoot, 'modules', 'latest'),
+ runtimeCache: join(cacheRoot, 'runtimes'),
+ runtimeState: join(stateRoot, 'runtimes'),
+ subnetSourcesDir: join(cacheRoot, 'subnets', 'sources'),
+ subnetVersionsDir: join(stateRoot, 'subnets', 'latest'),
lockFile: join(stateRoot, '.lock')
})
export const getDefaultRootDirs = () => {
switch (platform()) {
case 'darwin': {
- const appId = 'app.filstation.core'
+ const appId = 'network.checker.node'
return {
cacheRoot: CACHE_ROOT || join(homedir(), 'Library', 'Caches', appId),
stateRoot: STATE_ROOT ||
@@ -34,7 +34,7 @@ export const getDefaultRootDirs = () => {
case 'win32': {
assert(TEMP || CACHE_ROOT, '%TEMP% required')
assert(LOCALAPPDATA || STATE_ROOT, '%LOCALAPPDATA% required')
- const appName = 'Filecoin Station Core'
+ const appName = 'Checker Network Node'
return {
cacheRoot: CACHE_ROOT || join(TEMP, appName),
// We must use LOCALAPPDATA (not APPDATA) to avoid sharing the state with other computers
@@ -43,7 +43,7 @@ export const getDefaultRootDirs = () => {
}
}
case 'linux': {
- const appSlug = 'filecoin-station-core'
+ const appSlug = 'checker-network-node'
return {
cacheRoot: CACHE_ROOT || join(XDG_CACHE_HOME, appSlug),
stateRoot: STATE_ROOT || join(XDG_STATE_HOME, appSlug)
@@ -54,9 +54,23 @@ export const getDefaultRootDirs = () => {
}
}
+export const getOldStateRoot = () => {
+ switch (platform()) {
+ case 'darwin':
+ return STATE_ROOT ||
+ join(homedir(), 'Library', 'Application Support', 'app.filstation.core')
+ case 'win32':
+ return STATE_ROOT || join(LOCALAPPDATA, 'Filecoin Station Core')
+ case 'linux':
+ return STATE_ROOT || join(XDG_STATE_HOME, 'filecoin-station-core')
+ default:
+ throw new Error(`Unsupported platform: ${platform()}`)
+ }
+}
+
export const paths = getPaths(getDefaultRootDirs())
-export const moduleBinaries = fileURLToPath(
- new URL('../modules', import.meta.url)
+export const runtimeBinaries = fileURLToPath(
+ new URL('../runtimes', import.meta.url)
)
export const packageJSON = fileURLToPath(
new URL('../package.json', import.meta.url)
diff --git a/lib/runtime.js b/lib/runtime.js
new file mode 100644
index 00000000..85adf525
--- /dev/null
+++ b/lib/runtime.js
@@ -0,0 +1,95 @@
+import os from 'node:os'
+import assert from 'node:assert'
+import { join } from 'node:path'
+import { mkdir, chmod } from 'node:fs/promises'
+import { fetch } from 'undici'
+import { pipeline } from 'node:stream/promises'
+import unzip from 'unzip-stream'
+import { createWriteStream } from 'node:fs'
+import { runtimeBinaries } from './paths.js'
+import * as tar from 'tar'
+
+/** @typedef {import('unzip-stream').UnzipStreamEntry} UnzipStreamEntry */
+
+const { GITHUB_TOKEN } = process.env
+const authorization = GITHUB_TOKEN ? `Bearer ${GITHUB_TOKEN}` : undefined
+
+export const getRuntimeExecutable = ({
+ runtime,
+ executable
+}) => {
+ return join(
+ runtimeBinaries,
+ runtime,
+ getExecutableFileName(executable)
+ )
+}
+
+const getExecutableFileName = executable => {
+ return `${executable}${os.platform() === 'win32' ? '.exe' : ''}`
+}
+
+export const installRuntime = async ({
+ runtime,
+ repo,
+ distTag,
+ executable,
+ targets,
+ arch
+}) => {
+ console.log(
+ `[${runtime}] GitHub client: ${authorization ? 'authorized' : 'anonymous'}`
+ )
+ const target = targets.find(target =>
+ target.platform === os.platform() && target.arch === arch
+ )
+ assert(target, `[${runtime}] Unsupported platform: ${os.platform()} ${arch}`)
+
+ await mkdir(runtimeBinaries, { recursive: true })
+ const outFile = join(runtimeBinaries, runtime)
+
+ console.log(`[${runtime}] ⇣ downloading ${os.platform()} ${arch}`)
+ const res = await fetch(
+ `https://github.com/${repo}/releases/download/${distTag}/${target.asset}`,
+ {
+ headers: {
+ ...(authorization ? { authorization } : {})
+ },
+ redirect: 'follow'
+ }
+ )
+
+ if (res.status >= 300) {
+ throw new Error(
+ `[${runtime}] Cannot fetch binary ${os.platform()} ${arch}: ${res.status}\n` +
+ await res.text()
+ )
+ }
+
+ if (!res.body) {
+ throw new Error(
+ `[${runtime}] Cannot fetch binary ${os.platform()} ${arch}: no response body`
+ )
+ }
+
+ if (target.asset.endsWith('tar.gz')) {
+ await mkdir(outFile, { recursive: true })
+ await pipeline(
+ /** @type {any} */(res.body),
+ /** @type {any} */(tar.x({ C: outFile }))
+ )
+ } else {
+ await mkdir(join(runtimeBinaries, runtime), { recursive: true })
+ const parser = unzip.Parse()
+ parser.on('entry', async entry => {
+ const executableFileName = getExecutableFileName(executable)
+ const outPath = join(runtimeBinaries, runtime, entry.path)
+ await pipeline(entry, createWriteStream(outPath))
+ if (entry.path === executableFileName) {
+ await chmod(outPath, 0o755)
+ }
+ })
+ await pipeline(res.body, parser)
+ }
+ console.log(`[${runtime}] ✓ ${outFile}`)
+}
diff --git a/lib/modules.js b/lib/subnets.js
similarity index 50%
rename from lib/modules.js
rename to lib/subnets.js
index 753c5b18..4bd98ec4 100644
--- a/lib/modules.js
+++ b/lib/subnets.js
@@ -1,23 +1,14 @@
-import os from 'node:os'
import assert from 'node:assert'
import { join } from 'node:path'
-import { mkdir, chmod, rm, readFile, writeFile, stat } from 'node:fs/promises'
+import { mkdir, rm, readFile, writeFile, stat } from 'node:fs/promises'
import { fetch, Headers } from 'undici'
import { pipeline } from 'node:stream/promises'
-import unzip from 'unzip-stream'
-import { createWriteStream } from 'node:fs'
-import { moduleBinaries } from './paths.js'
import * as Name from 'w3name'
import { CarReader } from '@ipld/car'
import { validateBlock } from '@web3-storage/car-block-validator'
import { recursive as exporter } from 'ipfs-unixfs-exporter'
-import * as tar from 'tar'
import { reportW3NameError } from './telemetry.js'
-
-/** @typedef {import('unzip-stream').UnzipStreamEntry} UnzipStreamEntry */
-
-const { GITHUB_TOKEN } = process.env
-const authorization = GITHUB_TOKEN ? `Bearer ${GITHUB_TOKEN}` : undefined
+import * as tar from 'tar'
const gateways = [
'w3s.link',
@@ -25,86 +16,6 @@ const gateways = [
'dweb.link'
]
-export const getBinaryModuleExecutable = ({
- module,
- executable
-}) => {
- return join(
- moduleBinaries,
- module,
- getExecutableFileName(executable)
- )
-}
-
-const getExecutableFileName = executable => {
- return `${executable}${os.platform() === 'win32' ? '.exe' : ''}`
-}
-
-export const installBinaryModule = async ({
- module,
- repo,
- distTag,
- executable,
- targets,
- arch
-}) => {
- console.log(
- `[${module}] GitHub client: ${authorization ? 'authorized' : 'anonymous'}`
- )
- const target = targets.find(target =>
- target.platform === os.platform() && target.arch === arch
- )
- assert(target, `[${module}] Unsupported platform: ${os.platform()} ${arch}`)
-
- await mkdir(moduleBinaries, { recursive: true })
- const outFile = join(moduleBinaries, module)
-
- console.log(`[${module}] ⇣ downloading ${os.platform()} ${arch}`)
- const res = await fetch(
- `https://github.com/${repo}/releases/download/${distTag}/${target.asset}`,
- {
- headers: {
- ...(authorization ? { authorization } : {})
- },
- redirect: 'follow'
- }
- )
-
- if (res.status >= 300) {
- throw new Error(
- `[${module}] Cannot fetch binary ${os.platform()} ${arch}: ${res.status}\n` +
- await res.text()
- )
- }
-
- if (!res.body) {
- throw new Error(
- `[${module}] Cannot fetch binary ${os.platform()} ${arch}: no response body`
- )
- }
-
- if (target.asset.endsWith('tar.gz')) {
- await mkdir(outFile, { recursive: true })
- await pipeline(
- /** @type {any} */(res.body),
- /** @type {any} */(tar.x({ C: outFile }))
- )
- } else {
- await mkdir(join(moduleBinaries, module), { recursive: true })
- const parser = unzip.Parse()
- parser.on('entry', async entry => {
- const executableFileName = getExecutableFileName(executable)
- const outPath = join(moduleBinaries, module, entry.path)
- await pipeline(entry, createWriteStream(outPath))
- if (entry.path === executableFileName) {
- await chmod(outPath, 0o755)
- }
- })
- await pipeline(res.body, parser)
- }
- console.log(`[${module}] ✓ ${outFile}`)
-}
-
async function getLatestCID (ipnsKey) {
const name = Name.parse(ipnsKey)
let revision
@@ -120,9 +31,9 @@ async function getLatestCID (ipnsKey) {
return revision.value.split('/').pop()
}
-async function getLastSeenModuleCID ({ module, moduleVersionsDir }) {
+async function getLastSeenSubnetCID ({ subnet, subnetVersionsDir }) {
try {
- return await readFile(join(moduleVersionsDir, module), 'utf-8')
+ return await readFile(join(subnetVersionsDir, subnet), 'utf-8')
} catch (err) {
if (err.code !== 'ENOENT') {
throw err
@@ -131,25 +42,25 @@ async function getLastSeenModuleCID ({ module, moduleVersionsDir }) {
return undefined
}
-async function setLastSeenModuleCID ({ module, cid, moduleVersionsDir }) {
- await mkdir(moduleVersionsDir, { recursive: true })
- await writeFile(join(moduleVersionsDir, module), cid)
+async function setLastSeenSubnetCID ({ subnet, cid, subnetVersionsDir }) {
+ await mkdir(subnetVersionsDir, { recursive: true })
+ await writeFile(join(subnetVersionsDir, subnet), cid)
}
export async function updateSourceFiles ({
- module,
+ subnet,
ipnsKey,
- moduleVersionsDir,
- moduleSourcesDir,
+ subnetVersionsDir,
+ subnetSourcesDir,
noCache
}) {
- await mkdir(moduleSourcesDir, { recursive: true })
- const outDir = join(moduleSourcesDir, module)
+ await mkdir(subnetSourcesDir, { recursive: true })
+ const outDir = join(subnetSourcesDir, subnet)
- const lastSeenCID = await getLastSeenModuleCID({ module, moduleVersionsDir })
+ const lastSeenCID = await getLastSeenSubnetCID({ subnet, subnetVersionsDir })
if (lastSeenCID !== undefined) {
// Use `console.error` because with `--json` stdout needs to be JSON only
- console.error(`[${module}] ⇣ checking for updates`)
+ console.error(`[${subnet}] ⇣ checking for updates`)
}
const cid = await getLatestCID(ipnsKey)
@@ -157,10 +68,10 @@ export async function updateSourceFiles ({
if (!isUpdate) {
try {
await stat(join(outDir, 'main.js'))
- console.error(`[${module}] ✓ no update available`)
+ console.error(`[${subnet}] ✓ no update available`)
return false
} catch (err) {
- console.error(`[${module}] Cannot find sources on disk`)
+ console.error(`[${subnet}] Cannot find sources on disk`)
}
}
@@ -168,7 +79,7 @@ export async function updateSourceFiles ({
for (const gateway of gateways) {
try {
const url = `https://${cid}.ipfs.${gateway}?format=car`
- console.error(`[${module}] ⇣ downloading source files via ${url}`)
+ console.error(`[${subnet}] ⇣ downloading source files via ${url}`)
const headers = new Headers()
if (noCache) headers.append('Cache-Control', 'no-cache')
res = await fetch(url, {
@@ -178,21 +89,21 @@ export async function updateSourceFiles ({
if (res.status >= 300) {
throw new Error(
- `[${module}] Cannot fetch ${module} archive for ${cid}: ${res.status}\n` +
+ `[${subnet}] Cannot fetch ${subnet} archive for ${cid}: ${res.status}\n` +
await res.text()
)
}
if (!res.body) {
throw new Error(
- `[${module}] Cannot fetch ${module} archive for ${cid}: no response body`
+ `[${subnet}] Cannot fetch ${subnet} archive for ${cid}: no response body`
)
}
break
} catch (err) {
if (gateway === gateways[gateways.length - 1]) {
throw new Error(
- `[${module}] Can't download module sources from any gateway`,
+ `[${subnet}] Can't download subnet sources from any gateway`,
{ cause: err }
)
} else {
@@ -219,7 +130,7 @@ export async function updateSourceFiles ({
}
})
const { value: entry } = await entries.next()
- assert(entry, `No entries in ${module} archive`)
+ assert(entry, `No entries in ${subnet} archive`)
// Depending on size, entries might be packaged as `file` or `raw`
// https://github.com/web3-storage/w3up/blob/e8bffe2ee0d3a59a977d2c4b7efe425699424e19/packages/upload-client/src/unixfs.js#L11
if (entry.type === 'file' || entry.type === 'raw') {
@@ -253,8 +164,8 @@ export async function updateSourceFiles ({
throw err
}
- await setLastSeenModuleCID({ module, cid, moduleVersionsDir })
- console.error(`[${module}] ✓ ${outDir}`)
+ await setLastSeenSubnetCID({ subnet, cid, subnetVersionsDir })
+ console.error(`[${subnet}] ✓ ${outDir}`)
return isUpdate
}
diff --git a/lib/telemetry.js b/lib/telemetry.js
index db56bf01..5096d52a 100644
--- a/lib/telemetry.js
+++ b/lib/telemetry.js
@@ -9,7 +9,7 @@ import timers from 'node:timers/promises'
const { FIL_WALLET_ADDRESS, DEPLOYMENT_TYPE = 'cli' } = process.env
-const validDeploymentTypes = ['cli', 'docker', 'station-desktop']
+const validDeploymentTypes = ['cli', 'docker', 'checker-app']
assert(
validDeploymentTypes.includes(DEPLOYMENT_TYPE),
`Invalid DEPLOYMENT_TYPE: ${DEPLOYMENT_TYPE}. Options: ${validDeploymentTypes.join(', ')}`
@@ -50,7 +50,7 @@ const handleFlushError = err => {
Sentry.captureException(err)
}
-export const runPingLoop = async ({ STATION_ID }) => {
+export const runPingLoop = async ({ CHECKER_ID }) => {
assert(FIL_WALLET_ADDRESS)
while (true) {
@@ -59,7 +59,7 @@ export const runPingLoop = async ({ STATION_ID }) => {
'wallet',
createHash('sha256').update(FIL_WALLET_ADDRESS).digest('hex')
)
- point.stringField('station_id', STATION_ID)
+ point.stringField('checker_id', CHECKER_ID)
point.stringField('process_uuid', processUUID)
point.stringField('version', pkg.version)
point.tag('station', 'core')
@@ -72,10 +72,10 @@ export const runPingLoop = async ({ STATION_ID }) => {
}
}
-export const runMachinesLoop = async ({ STATION_ID }) => {
+export const runMachinesLoop = async ({ CHECKER_ID }) => {
while (true) {
const point = new Point('machine')
- point.stringField('station_id', STATION_ID)
+ point.stringField('checker_id', CHECKER_ID)
point.stringField('process_uuid', processUUID)
point.intField('cpu_count', cpus().length)
if (cpus().length > 0) {
diff --git a/lib/zinnia.js b/lib/zinnia.js
index a6db14f4..53580ce6 100644
--- a/lib/zinnia.js
+++ b/lib/zinnia.js
@@ -1,26 +1,33 @@
import { execa } from 'execa'
import * as Sentry from '@sentry/node'
-import { installBinaryModule, updateSourceFiles, getBinaryModuleExecutable } from './modules.js'
+import { installRuntime, getRuntimeExecutable } from './runtime.js'
+import { updateSourceFiles } from './subnets.js'
import os from 'node:os'
import pRetry from 'p-retry'
import timers from 'node:timers/promises'
import { join } from 'node:path'
const ZINNIA_DIST_TAG = 'v0.20.3'
-const ZINNIA_MODULES = [
+const SUBNETS = [
{
- module: 'spark',
+ subnet: 'spark',
ipnsKey: 'k51qzi5uqu5dlej5gtgal40sjbowuau5itwkr6mgyuxdsuhagjxtsfqjd6ym3g'
}
]
const {
TARGET_ARCH = os.arch(),
- MODULE_FILTER = ''
+ SUBNET_FILTER = '',
+ MODULE_FILTER // Legacy
} = process.env
-export const install = () => installBinaryModule({
- module: 'zinnia',
- repo: 'filecoin-station/zinnia',
+if (MODULE_FILTER) {
+ console.error('MODULE_FILTER is deprecated, use SUBNET_FILTER instead')
+ process.exit(1)
+}
+
+export const install = () => installRuntime({
+ runtime: 'zinnia',
+ repo: 'CheckerNetwork/zinnia',
distTag: ZINNIA_DIST_TAG,
executable: 'zinniad',
arch: TARGET_ARCH,
@@ -54,36 +61,36 @@ const maybeReportErrorToSentry = (/** @type {unknown} */ err) => {
const tail = err.details.split(/\n/g).slice(-50).join('\n')
hint.extra.details = tail
}
- if ('moduleName' in err && typeof err.moduleName === 'string') {
- hint.extra.moduleName = err.moduleName
+ if ('subnetName' in err && typeof err.subnetName === 'string') {
+ hint.extra.subnetName = err.subnetName
}
}
- console.error('Reporting the problem to Sentry for inspection by the Station team.')
+ console.error('Reporting the problem to Sentry for inspection by the Checker team.')
Sentry.captureException(err, hint)
}
-const matchesModuleFilter = module =>
- MODULE_FILTER === '' || module === MODULE_FILTER
+const matchesSubnetFilter = subnet =>
+ SUBNET_FILTER === '' || subnet === SUBNET_FILTER
const capitalize = str => `${str.charAt(0).toUpperCase()}${str.slice(1)}`
const updateAllSourceFiles = async ({
- moduleVersionsDir,
- moduleSourcesDir,
+ subnetVersionsDir,
+ subnetSourcesDir,
signal
}) => {
- const modules = await Promise.all(
+ const subnets = await Promise.all(
Object
- .values(ZINNIA_MODULES)
- .filter(({ module }) => matchesModuleFilter(module))
- .map(({ module, ipnsKey }) =>
+ .values(SUBNETS)
+ .filter(({ subnet }) => matchesSubnetFilter(subnet))
+ .map(({ subnet, ipnsKey }) =>
pRetry(
attemptNumber => updateSourceFiles({
- module,
+ subnet,
ipnsKey,
- moduleVersionsDir,
- moduleSourcesDir,
+ subnetVersionsDir,
+ subnetSourcesDir,
noCache: attemptNumber > 1
}),
{
@@ -91,7 +98,7 @@ const updateAllSourceFiles = async ({
retries: 10,
onFailedAttempt: err => {
console.error(err)
- const msg = `Failed to download ${module} source. Retrying...`
+ const msg = `Failed to download ${subnet} source. Retrying...`
console.error(msg)
if (String(err).includes('You are being rate limited')) {
const delaySeconds = 60 + (Math.random() * 60)
@@ -106,7 +113,7 @@ const updateAllSourceFiles = async ({
)
)
)
- const hasUpdated = modules.find(updated => updated === true)
+ const hasUpdated = subnets.find(updated => updated === true)
return hasUpdated
}
@@ -114,8 +121,8 @@ const runUpdateSourceFilesLoop = async ({
controller,
signal,
onActivity,
- moduleVersionsDir,
- moduleSourcesDir
+ subnetVersionsDir,
+ subnetSourcesDir
}) => {
while (true) {
if (signal.aborted) {
@@ -131,14 +138,14 @@ const runUpdateSourceFilesLoop = async ({
}
try {
const shouldRestart = await updateAllSourceFiles({
- moduleVersionsDir,
- moduleSourcesDir,
+ subnetVersionsDir,
+ subnetSourcesDir,
signal
})
if (shouldRestart) {
onActivity({
type: 'info',
- message: 'Updated Zinnia module source code, restarting...'
+ message: 'Updated subnet source code, restarting...'
})
controller.abort()
return
@@ -146,7 +153,7 @@ const runUpdateSourceFilesLoop = async ({
} catch (err) {
onActivity({
type: 'error',
- message: 'Failed to update Zinnia module source code'
+ message: 'Failed to update subnet source code'
})
console.error(err)
maybeReportErrorToSentry(err)
@@ -163,13 +170,13 @@ const catchChildProcessExit = async ({
const tasks = childProcesses.map(p => (async () => {
try {
await p
- onActivity({ type: 'info', message: `${capitalize(module)} exited` })
+ onActivity({ type: 'info', message: `${capitalize(p.subnetName)} exited` })
} catch (err) {
- // When the child process crash, attach the module name & the exit reason to the error object
+ // When the child process crash, attach the subnet name & the exit reason to the error object
const exitReason = p.exitCode
? `with exit code ${p.exitCode}`
: p.signalCode ? `via signal ${p.signalCode}` : undefined
- throw Object.assign(err, { moduleName: p.moduleName, exitReason, signalCode: p.signalCode })
+ throw Object.assign(err, { subnetName: p.subnetName, exitReason, signalCode: p.signalCode })
}
})())
@@ -178,14 +185,14 @@ const catchChildProcessExit = async ({
if (err.name === 'AbortError') {
Object.assign(err, { reportToSentry: false })
} else {
- const moduleName = capitalize(err.moduleName ?? 'Zinnia')
+ const subnetName = capitalize(err.subnetName ?? 'Zinnia')
const exitReason = err.exitReason ?? 'for unknown reason'
- const message = `${moduleName} crashed ${exitReason}`
+ const message = `${subnetName} crashed ${exitReason}`
onActivity({ type: 'error', message })
- const moduleErr = new Error(message, { cause: err })
+ const subnetErr = new Error(message, { cause: err })
// Store the full error message including stdout & stder in the top-level `details` property
- Object.assign(moduleErr, { details: err.message })
+ Object.assign(subnetErr, { details: err.message })
if (err.signalCode && ['SIGTERM', 'SIGKILL', 'SIGINT'].includes(err.signalCode)) {
// These signal codes are triggered when somebody terminates the process from outside.
@@ -195,11 +202,11 @@ const catchChildProcessExit = async ({
// (Note that this event has been already logged via `onActivity()` call above.)
return
} else {
- // Apply a custom rule to force Sentry to group all issues with the same module & exit code
+ // Apply a custom rule to force Sentry to group all issues with the same subnet & exit code
// See https://docs.sentry.io/platforms/node/usage/sdk-fingerprinting/#basic-example
Sentry.withScope(scope => {
scope.setFingerprint([message])
- maybeReportErrorToSentry(moduleErr)
+ maybeReportErrorToSentry(subnetErr)
})
}
}
@@ -211,38 +218,38 @@ const catchChildProcessExit = async ({
export async function run ({
provider,
- STATION_ID,
+ CHECKER_ID,
FIL_WALLET_ADDRESS,
ethAddress,
STATE_ROOT,
CACHE_ROOT,
- moduleVersionsDir,
- moduleSourcesDir,
+ subnetVersionsDir,
+ subnetSourcesDir,
onActivity,
onMetrics,
isUpdated = false
}) {
- const zinniadExe = getBinaryModuleExecutable({ module: 'zinnia', executable: 'zinniad' })
+ const zinniadExe = getRuntimeExecutable({ runtime: 'zinnia', executable: 'zinniad' })
if (!isUpdated) {
try {
onActivity({
type: 'info',
- message: 'Updating source code for Zinnia modules...'
+ message: 'Updating source code for subnets...'
})
await updateAllSourceFiles({
- moduleVersionsDir,
- moduleSourcesDir,
+ subnetVersionsDir,
+ subnetSourcesDir,
signal: null
})
onActivity({
type: 'info',
- message: 'Zinnia module source code up to date'
+ message: 'Subnet source code up to date'
})
} catch (err) {
onActivity({
type: 'error',
- message: 'Failed to download latest Zinnia module source code'
+ message: 'Failed to download latest Subnet source code'
})
throw err
}
@@ -252,17 +259,17 @@ export async function run ({
const { signal } = controller
const childProcesses = []
- for (const { module } of ZINNIA_MODULES) {
- if (!matchesModuleFilter(module)) continue
+ for (const { subnet } of SUBNETS) {
+ if (!matchesSubnetFilter(subnet)) continue
- // all paths are relative to `moduleBinaries`
+ // all paths are relative to `runtimeBinaries`
const childProcess = execa(
zinniadExe,
- [join(module, 'main.js')],
+ [join(subnet, 'main.js')],
{
- cwd: moduleSourcesDir,
+ cwd: subnetSourcesDir,
env: {
- STATION_ID,
+ STATION_ID: CHECKER_ID,
FIL_WALLET_ADDRESS,
STATE_ROOT,
CACHE_ROOT
@@ -270,7 +277,7 @@ export async function run ({
cancelSignal: signal
}
)
- childProcesses.push(Object.assign(childProcess, { moduleName: module }))
+ childProcesses.push(Object.assign(childProcess, { subnetName: subnet }))
let timeoutId
const resetTimeout = () => {
@@ -278,7 +285,7 @@ export async function run ({
timeoutId = setTimeout(() => {
onActivity({
type: 'error',
- message: `${capitalize(module)} has been inactive for 5 minutes, restarting...`
+ message: `${capitalize(subnet)} has been inactive for 5 minutes, restarting...`
})
const err = new Error('Module inactive for 5 minutes')
@@ -295,7 +302,7 @@ export async function run ({
childProcess.stdout.on('data', data => {
resetTimeout()
handleEvents({
- module,
+ subnet,
onActivity,
onMetrics,
text: data
@@ -317,8 +324,8 @@ export async function run ({
controller,
signal,
onActivity,
- moduleVersionsDir,
- moduleSourcesDir
+ subnetVersionsDir,
+ subnetSourcesDir
}),
catchChildProcessExit({ childProcesses, onActivity, controller })
])
@@ -338,13 +345,13 @@ export async function run ({
// size, as awaiting promises unwinds the stack
return run({
provider,
- STATION_ID,
+ CHECKER_ID,
FIL_WALLET_ADDRESS,
ethAddress,
STATE_ROOT,
CACHE_ROOT,
- moduleVersionsDir,
- moduleSourcesDir,
+ subnetVersionsDir,
+ subnetSourcesDir,
onActivity,
onMetrics,
isUpdated: true
@@ -354,7 +361,7 @@ export async function run ({
const jobsCompleted = {}
async function handleEvents ({
- module,
+ subnet,
onActivity,
onMetrics,
text
@@ -372,16 +379,16 @@ async function handleEvents ({
case 'activity:started':
onActivity({
type: 'info',
- message: `${capitalize(module)} started`,
- source: module
+ message: `${capitalize(subnet)} started`,
+ source: subnet
})
break
case 'activity:info':
onActivity({
type: 'info',
message:
- event.message.replace(/Module Runtime/, capitalize(module)),
- source: event.module
+ event.message.replace(/Module Runtime/, capitalize(subnet)),
+ source: event.subnet
})
break
@@ -389,13 +396,13 @@ async function handleEvents ({
onActivity({
type: 'error',
message:
- event.message.replace(/Module Runtime/, capitalize(module)),
- source: event.module
+ event.message.replace(/Module Runtime/, capitalize(subnet)),
+ source: event.subnet
})
break
case 'jobs-completed': {
- jobsCompleted[module] = event.total
+ jobsCompleted[subnet] = event.total
const totalJobsCompleted = Object.values(jobsCompleted).reduce((a, b) => a + b, 0)
onMetrics({ totalJobsCompleted })
break
diff --git a/package-lock.json b/package-lock.json
index 6a52cd2f..685444b7 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -1,11 +1,11 @@
{
- "name": "@filecoin-station/core",
+ "name": "@checkernetwork/node",
"version": "21.2.6",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
- "name": "@filecoin-station/core",
+ "name": "@checkernetwork/node",
"version": "21.2.6",
"hasInstallScript": true,
"license": "(Apache-2.0 AND MIT)",
@@ -27,7 +27,7 @@
"yargs": "^17.7.1"
},
"bin": {
- "station": "bin/station.js"
+ "checker": "bin/checker.js"
},
"devDependencies": {
"@types/yargs": "^17.0.24",
diff --git a/package.json b/package.json
index 42dab2d6..81367da6 100644
--- a/package.json
+++ b/package.json
@@ -1,17 +1,17 @@
{
- "name": "@filecoin-station/core",
+ "name": "@checkernetwork/node",
"version": "21.2.6",
- "description": "Filecoin Station Core",
+ "description": "Checker Network Node",
"license": "(Apache-2.0 AND MIT)",
"repository": {
"type": "git",
- "url": "git+https://github.com/filecoin-station/core.git"
+ "url": "git+https://github.com/CheckerNetwork/node.git"
},
"main": "index.js",
"types": "dist/index.d.ts",
"type": "module",
"bin": {
- "station": "bin/station.js"
+ "checker": "bin/checker.js"
},
"engines": {
"node": ">=18"
@@ -19,7 +19,7 @@
"scripts": {
"format": "prettier --write .",
"lint": "npm run test:lint",
- "start": "cross-env FIL_WALLET_ADDRESS=0x000000000000000000000000000000000000dEaD STATE_ROOT=.state CACHE_ROOT=.cache node ./bin/station.js",
+ "start": "cross-env FIL_WALLET_ADDRESS=0x000000000000000000000000000000000000dEaD STATE_ROOT=.state CACHE_ROOT=.cache node ./bin/checker.js",
"test": "npm run test:lint && npm run test:types && npm run test:unit",
"test:lint": "prettier --check . && standard",
"test:types": "tsc -p .",
diff --git a/test/station-id.test.js b/test/checker-id.test.js
similarity index 68%
rename from test/station-id.test.js
rename to test/checker-id.test.js
index 2f44732e..b2df258f 100644
--- a/test/station-id.test.js
+++ b/test/checker-id.test.js
@@ -1,7 +1,7 @@
import assert from 'node:assert'
import fs from 'node:fs/promises'
import path from 'node:path'
-import { decrypt, encrypt, getStationId } from '../lib/station-id.js'
+import { decrypt, encrypt, getCheckerId } from '../lib/checker-id.js'
import { getUniqueTempDir } from './util.js'
const log = {
@@ -9,67 +9,67 @@ const log = {
warn () {}
}
-describe('station-id', () => {
- describe('getStationId', () => {
+describe('checker-id', () => {
+ describe('getCheckerId', () => {
it('creates a new key and stores it in the given path', async () => {
const secretsDir = getUniqueTempDir()
- const generated = await getStationId({ secretsDir, passphrase: 'secret', log })
+ const generated = await getCheckerId({ secretsDir, passphrase: 'secret', log })
assert.match(generated.publicKey, /^[0-9a-z]+$/)
assert.match(generated.privateKey, /^[0-9a-z]+$/)
- await fs.stat(path.join(secretsDir, 'station_id'))
+ await fs.stat(path.join(secretsDir, 'checker_id'))
// the check passes if the statement above does not throw
- const loaded = await getStationId({ secretsDir, passphrase: 'secret', log })
+ const loaded = await getCheckerId({ secretsDir, passphrase: 'secret', log })
assert.deepStrictEqual(loaded, generated)
})
it('returns a public key that is exactly 88 characters long', async () => {
- // spark-api is enforcing this constraint and rejecting measurements containing stationId
+ // spark-api is enforcing this constraint and rejecting measurements containing checkerId
// in a different format
const secretsDir = getUniqueTempDir()
- const { publicKey } = await await getStationId({ secretsDir, passphrase: 'secret', log })
+ const { publicKey } = await await getCheckerId({ secretsDir, passphrase: 'secret', log })
assert.strictEqual(publicKey.length, 88, 'publicKey.length')
assert.match(publicKey, /^[0-9A-Za-z]*$/)
})
it('skips encryption when passphrase is not set', async () => {
const secretsDir = getUniqueTempDir()
- const generated = await getStationId({ secretsDir, passphrase: '', log })
+ const generated = await getCheckerId({ secretsDir, passphrase: '', log })
assert.match(generated.publicKey, /^[0-9a-z]+$/)
assert.match(generated.privateKey, /^[0-9a-z]+$/)
- await fs.stat(path.join(secretsDir, 'station_id'))
+ await fs.stat(path.join(secretsDir, 'checker_id'))
// the check passes if the statement above does not throw
- const loaded = await getStationId({ secretsDir, passphrase: '', log })
+ const loaded = await getCheckerId({ secretsDir, passphrase: '', log })
assert.deepStrictEqual(loaded, generated)
})
it('provides a helpful error message when the file cannot be decrypted', async () => {
const secretsDir = getUniqueTempDir()
- await getStationId({ secretsDir, passphrase: 'secret', log })
+ await getCheckerId({ secretsDir, passphrase: 'secret', log })
await assert.rejects(
- getStationId({ secretsDir, passphrase: 'wrong pass', log }),
- /Cannot decrypt Station ID file. Did you configure the correct PASSPHRASE/
+ getCheckerId({ secretsDir, passphrase: 'wrong pass', log }),
+ /Cannot decrypt Checker ID file. Did you configure the correct PASSPHRASE/
)
})
- it('recreates unreadable station ids on demand', async () => {
+ it('recreates unreadable checker ids on demand', async () => {
const secretsDir = getUniqueTempDir()
- await getStationId({ secretsDir, passphrase: 'secret', log })
- await getStationId({ secretsDir, passphrase: 'new pass', recreateOnError: true, log })
+ await getCheckerId({ secretsDir, passphrase: 'secret', log })
+ await getCheckerId({ secretsDir, passphrase: 'new pass', recreateOnError: true, log })
})
- it('encrypts plaintext station_id file when PASSPHRASE is provided', async () => {
+ it('encrypts plaintext checker_id file when PASSPHRASE is provided', async () => {
const secretsDir = getUniqueTempDir()
- const generated = await getStationId({ secretsDir, passphrase: '', log })
- const plaintext = await fs.readFile(path.join(secretsDir, 'station_id'))
+ const generated = await getCheckerId({ secretsDir, passphrase: '', log })
+ const plaintext = await fs.readFile(path.join(secretsDir, 'checker_id'))
- const loaded = await getStationId({ secretsDir, passphrase: 'super-secret', log })
+ const loaded = await getCheckerId({ secretsDir, passphrase: 'super-secret', log })
assert.deepStrictEqual(loaded, generated)
- const ciphertext = await fs.readFile(path.join(secretsDir, 'station_id'))
+ const ciphertext = await fs.readFile(path.join(secretsDir, 'checker_id'))
assert.notStrictEqual(plaintext.toString('hex'), ciphertext.toString('hex'))
})
})
diff --git a/test/station.js b/test/checker.js
similarity index 69%
rename from test/station.js
rename to test/checker.js
index c6dd5988..3a382fcb 100644
--- a/test/station.js
+++ b/test/checker.js
@@ -1,13 +1,13 @@
import assert from 'node:assert'
import { execa } from 'execa'
-import { station, FIL_WALLET_ADDRESS, PASSPHRASE, getUniqueTempDir } from './util.js'
+import { checker, FIL_WALLET_ADDRESS, PASSPHRASE, getUniqueTempDir } from './util.js'
import streamMatch from 'stream-match'
import getStream from 'get-stream'
import { once } from 'node:events'
-describe('Station', () => {
+describe('Checker', () => {
it('runs Zinnia', async () => {
- const ps = startStation()
+ const ps = startChecker()
await Promise.race([
once(ps, 'exit'),
Promise.all([
@@ -17,43 +17,43 @@ describe('Station', () => {
])
// Assert that the process did not exit prematurely
assert.strictEqual(ps.exitCode, null)
- stopStation()
+ stopChecker()
})
- // No experimental modules available at this point
- // it('runs experimental modules', () => {
+ // No experimental subnets available at this point
+ // it('runs experimental subnets', () => {
// it('runs Bacalhau', async () => {
- // const ps = startStation(['--experimental'])
- // await streamMatch(ps.stdout, 'Bacalhau module started.')
- // stopStation()
+ // const ps = startChecker(['--experimental'])
+ // await streamMatch(ps.stdout, 'Bacalhau subnet started.')
+ // stopChecker()
// })
// })
it('outputs events', async () => {
- const ps = startStation()
+ const ps = startChecker()
await Promise.all([
streamMatch(ps.stdout, 'totalJobsCompleted'),
streamMatch(ps.stdout, 'Spark started')
])
- stopStation()
+ stopChecker()
})
it('outputs events json', async () => {
- const ps = startStation(['--json'])
+ const ps = startChecker(['--json'])
await Promise.all([
streamMatch(ps.stdout, 'jobs-completed'),
streamMatch(ps.stdout, /activity:info.*(Spark started)/)
])
- stopStation()
+ stopChecker()
})
let ps, stdout, stderr
- function startStation (cliArgs = []) {
- assert(!ps, 'Station is already running')
+ function startChecker (cliArgs = []) {
+ assert(!ps, 'Checker is already running')
const CACHE_ROOT = getUniqueTempDir()
const STATE_ROOT = getUniqueTempDir()
ps = execa(
- station,
+ checker,
cliArgs,
{ env: { CACHE_ROOT, STATE_ROOT, FIL_WALLET_ADDRESS, PASSPHRASE } }
)
@@ -62,16 +62,16 @@ describe('Station', () => {
return ps
}
- function stopStation () {
+ function stopChecker () {
ps.kill()
ps = undefined
}
afterEach(async () => {
if (!ps) return
- // The test failed and did not stop the Station process
+ // The test failed and did not stop the Checker process
// Let's stop the process and print stdout & stderr for troubleshooting
- stopStation()
+ stopChecker()
console.log('== STATION STDOUT ==\n%s', await stdout)
console.log('== STATION STDERR ==\n%s', await stderr)
diff --git a/test/cli.js b/test/cli.js
index c93e2bb4..25def02d 100644
--- a/test/cli.js
+++ b/test/cli.js
@@ -1,12 +1,12 @@
import assert from 'node:assert'
import { execa } from 'execa'
-import { station, FIL_WALLET_ADDRESS, PASSPHRASE, getUniqueTempDir } from './util.js'
+import { checker, FIL_WALLET_ADDRESS, PASSPHRASE, getUniqueTempDir } from './util.js'
import { once } from 'node:events'
describe('CLI', () => {
describe('FIL_WALLET_ADDRESS', () => {
it('fails without address', async () => {
- await assert.rejects(execa(station, {
+ await assert.rejects(execa(checker, {
env: {
STATE_ROOT: getUniqueTempDir(),
PASSPHRASE
@@ -15,7 +15,7 @@ describe('CLI', () => {
})
it('fails with sanctioned address', async () => {
try {
- await execa(station, {
+ await execa(checker, {
env: {
STATE_ROOT: getUniqueTempDir(),
PASSPHRASE,
@@ -26,11 +26,11 @@ describe('CLI', () => {
assert.strictEqual(err.exitCode, 2)
return
}
- assert.fail('Expected Station Core to return a non-zero exit code')
+ assert.fail('Expected Checker to return a non-zero exit code')
})
it('fails with invalid 0x address', async () => {
try {
- await execa(station, {
+ await execa(checker, {
env: {
STATE_ROOT: getUniqueTempDir(),
PASSPHRASE,
@@ -41,10 +41,10 @@ describe('CLI', () => {
assert.strictEqual(err.exitCode, 2)
return
}
- assert.fail('Expected Station Core to return a non-zero exit code')
+ assert.fail('Expected Checker to return a non-zero exit code')
})
it('starts without passphrase in a fresh install', async () => {
- const ps = execa(station, {
+ const ps = execa(checker, {
env: {
STATE_ROOT: getUniqueTempDir(),
FIL_WALLET_ADDRESS
@@ -54,7 +54,7 @@ describe('CLI', () => {
ps.kill()
})
it('works with address and passphrase', async () => {
- const ps = execa(station, {
+ const ps = execa(checker, {
env: {
STATE_ROOT: getUniqueTempDir(),
FIL_WALLET_ADDRESS,
@@ -66,7 +66,7 @@ describe('CLI', () => {
})
it('fails with the wrong pass phrase', async () => {
const STATE_ROOT = getUniqueTempDir()
- const ps = execa(station, {
+ const ps = execa(checker, {
env: {
STATE_ROOT,
FIL_WALLET_ADDRESS,
@@ -76,7 +76,7 @@ describe('CLI', () => {
await once(ps.stdout, 'data')
ps.kill()
try {
- await execa(station, {
+ await execa(checker, {
env: {
STATE_ROOT,
FIL_WALLET_ADDRESS,
@@ -87,15 +87,15 @@ describe('CLI', () => {
assert.strictEqual(err.exitCode, 1)
return
}
- assert.fail('Expected Station Core to return a non-zero exit code')
+ assert.fail('Expected Checker to return a non-zero exit code')
})
})
- describe('--recreateStationIdOnError', () => {
- it('recreates the station id on demand', async () => {
+ describe('--recreateCheckerIdOnError', () => {
+ it('recreates the checker id on demand', async () => {
const STATE_ROOT = getUniqueTempDir()
{
- const ps = execa(station, {
+ const ps = execa(checker, {
env: {
STATE_ROOT,
FIL_WALLET_ADDRESS,
@@ -106,7 +106,7 @@ describe('CLI', () => {
ps.kill()
}
{
- const ps = execa(station, ['--recreateStationIdOnError'], {
+ const ps = execa(checker, ['--recreateCheckerIdOnError'], {
env: {
STATE_ROOT,
FIL_WALLET_ADDRESS,
@@ -121,15 +121,15 @@ describe('CLI', () => {
describe('--version', () => {
it('outputs version', async () => {
- await execa(station, ['--version'])
- await execa(station, ['-v'])
+ await execa(checker, ['--version'])
+ await execa(checker, ['-v'])
})
})
describe('--help', () => {
it('outputs help text', async () => {
- await execa(station, ['--help'])
- await execa(station, ['-h'])
+ await execa(checker, ['--help'])
+ await execa(checker, ['-h'])
})
})
})
diff --git a/test/metrics.js b/test/metrics.js
index 98099216..0f2b98e4 100644
--- a/test/metrics.js
+++ b/test/metrics.js
@@ -5,11 +5,11 @@ describe('Metrics', () => {
describe('submit', () => {
it('should merge metrics', () => {
const metrics = new Metrics()
- metrics.submit('module1', {
+ metrics.submit('subnet1', {
totalJobsCompleted: 1,
rewardsScheduledForAddress: 1n
})
- metrics.submit('module2', {
+ metrics.submit('subnet2', {
totalJobsCompleted: 2,
rewardsScheduledForAddress: 2n
})
@@ -37,15 +37,15 @@ describe('Metrics', () => {
}
i++
})
- metrics.submit('module1', {
+ metrics.submit('subnet1', {
totalJobsCompleted: 1,
rewardsScheduledForAddress: 0n
})
- metrics.submit('module1', {
+ metrics.submit('subnet1', {
totalJobsCompleted: 1,
rewardsScheduledForAddress: 0n
})
- metrics.submit('module2', {
+ metrics.submit('subnet2', {
totalJobsCompleted: 1,
rewardsScheduledForAddress: 0n
})
diff --git a/test/storage.js b/test/storage.js
index 2cef58d8..5c656a6f 100644
--- a/test/storage.js
+++ b/test/storage.js
@@ -1,5 +1,5 @@
import { execa } from 'execa'
-import { station, FIL_WALLET_ADDRESS, PASSPHRASE } from './util.js'
+import { checker, FIL_WALLET_ADDRESS, PASSPHRASE } from './util.js'
import { once } from 'node:events'
import { tmpdir } from 'node:os'
import fs from 'node:fs/promises'
@@ -11,7 +11,7 @@ describe('Storage', async () => {
const CACHE_ROOT = join(tmpdir(), randomUUID())
const STATE_ROOT = join(tmpdir(), randomUUID())
const ps = execa(
- station,
+ checker,
{ env: { CACHE_ROOT, STATE_ROOT, FIL_WALLET_ADDRESS, PASSPHRASE } }
)
while (true) {
@@ -22,8 +22,8 @@ describe('Storage', async () => {
} catch {}
}
ps.kill()
- await fs.stat(join(CACHE_ROOT, 'modules'))
+ await fs.stat(join(CACHE_ROOT, 'runtimes'))
await fs.stat(STATE_ROOT)
- await fs.stat(join(STATE_ROOT, 'modules'))
+ await fs.stat(join(STATE_ROOT, 'runtimes'))
})
})
diff --git a/test/util.js b/test/util.js
index cb93558e..a442539e 100644
--- a/test/util.js
+++ b/test/util.js
@@ -6,8 +6,8 @@ import { fileURLToPath } from 'node:url'
export const FIL_WALLET_ADDRESS = '0x000000000000000000000000000000000000dEaD'
export const PASSPHRASE = 'secret'
-export const station = fileURLToPath(
- new URL('../bin/station.js', import.meta.url)
+export const checker = fileURLToPath(
+ new URL('../bin/checker.js', import.meta.url)
)
export const getUniqueTempDir = () => {