diff --git a/.env.sample b/.env.sample
index 053c3578..85479a37 100644
--- a/.env.sample
+++ b/.env.sample
@@ -1,13 +1,3 @@
-#general
-ENABLE_KIND_2=true #relay submissions
-ENABLE_KIND_3=true #contact list
-ENABLE_KIND_10002=true #relay list
-ENABLE_ONION=true
-ENABLE_CLEARNET=true
-
-ENABLE_BLOCKLIST=true
-BLOCKLIST=""
-
POSTGRES_HOST="localhost"
POSTGRES_USER="postgres"
POSTGRES_PASS="postgres"
diff --git a/.gitignore b/.gitignore
index 74cb8f31..80fe8d98 100644
--- a/.gitignore
+++ b/.gitignore
@@ -12,4 +12,7 @@ packages/nostrwatch-history-relay/strfry-db/data.mdb
packages/relaydb-cli
dist/js/chunk-vendors.cd206719.js.map
node_modules
-ours.sh
\ No newline at end of file
+ours.sh
+.pg
+packages/synx
+packages/kinds
\ No newline at end of file
diff --git a/README.md b/README.md
index 2e92bc4c..e17c5fd0 100644
--- a/README.md
+++ b/README.md
@@ -87,3 +87,8 @@ Testing suite is not yet implemented.
## Legacy Participants
A special thank you to the 300+ individuals who submitted their relay and/or oppenned issues/pull requests to nostrwatch legacy and to the 1M+ unique visitors over the last year. A huge thank you to OpenSats for giving my vision a new life.
+
+
+
+
+
diff --git a/package.json b/package.json
index cb8a54db..1e5c9be3 100644
--- a/package.json
+++ b/package.json
@@ -7,9 +7,7 @@
"clean": "lerna clean",
"test": "lerna run test"
},
- "devDependencies": {
- "lerna": "^4.0.0"
- },
+ "devDependencies": {},
"workspaces": [ "packages/*", "packages/nocap/adapters/**/*" ],
"main": "index.js",
"repository": "git@github.com:sandwichfarm/nostr-watch.git",
diff --git a/packages/controlflow/index.js b/packages/controlflow/index.js
index ed6e0d95..bf71cc9f 100644
--- a/packages/controlflow/index.js
+++ b/packages/controlflow/index.js
@@ -1,11 +1,3 @@
-import { Trawler, Nocapd, RestApi } from './queues.js'
-import { Scheduler as NWScheduler } from './scheduler.js'
-
-export default {
- NWQueues: {
- Trawler,
- Nocapd,
- RestApi
- },
- NWScheduler
-}
\ No newline at end of file
+export { SyncQueue, TrawlQueue, NocapdQueue, RestApiQueue, QueueInit, BullMQ } from './src/queues.js'
+export { Scheduler } from './src/scheduler.js'
+export { RetryManager } from './src/retry.js'
\ No newline at end of file
diff --git a/packages/controlflow/package.json b/packages/controlflow/package.json
index 5c2f6651..7f92f188 100644
--- a/packages/controlflow/package.json
+++ b/packages/controlflow/package.json
@@ -3,9 +3,11 @@
"version": "0.0.1",
"description": "Provides exports for application control flow",
"main": "index.js",
+ "type": "module",
"license": "MIT",
"dependencies": {
"bullmq": "4.14.2",
- "dotenv": "16.3.1"
+ "dotenv": "16.3.1",
+ "node-schedule": "2.1.1"
}
}
diff --git a/packages/controlflow/queues.js b/packages/controlflow/queues.js
deleted file mode 100644
index 9c78ee2d..00000000
--- a/packages/controlflow/queues.js
+++ /dev/null
@@ -1,29 +0,0 @@
-import dotenv from 'dotenv'
-import { Queue, QueueEvents, Worker } from 'bullmq';
-import { RedisConnectionDetails } from '@nostrwatch/utils'
-
-dotenv.config()
-
-const Trawler = (qopts={}) => {
- qopts = { connection: RedisConnectionDetails(), ...qopts }
- return new Queue('Trawler', qopts)
-}
-
-const Nocapd = (qopts={}) => {
- qopts = { connection: RedisConnectionDetails(), ...qopts }
- return new Queue('Nocapd', qopts)
-}
-
-const RestApi = (qopts={}) => {
- qopts = { connection: RedisConnectionDetails(), ...qopts }
- return new Queue('Nocapd', qopts)
-}
-
-export {
- Trawler,
- Nocapd,
- RestApi,
- Queue as BullQueue,
- QueueEvents as BullQueueEvents,
- Worker as BullWorker,
-}
\ No newline at end of file
diff --git a/packages/controlflow/scheduler.js b/packages/controlflow/scheduler.js
deleted file mode 100644
index 9b3ac250..00000000
--- a/packages/controlflow/scheduler.js
+++ /dev/null
@@ -1,11 +0,0 @@
-class Scheduler {
- constructor(){
-
- }
-
- add(task, options){
-
- }
-}
-
-export default Scheduler
\ No newline at end of file
diff --git a/packages/controlflow/src/queues.js b/packages/controlflow/src/queues.js
new file mode 100644
index 00000000..e6ffd1de
--- /dev/null
+++ b/packages/controlflow/src/queues.js
@@ -0,0 +1,47 @@
+import dotenv from 'dotenv'
+import { Queue, QueueEvents, Worker } from 'bullmq';
+import { RedisConnectionDetails } from '@nostrwatch/utils'
+
+dotenv.config()
+
+const $ = {}
+
+export const TrawlQueue = (qopts={}) => {
+ return QueueInit('TrawlQueue', qopts)
+}
+
+export const NocapdQueue = (qopts={}) => {
+ return QueueInit('NocapdQueue', qopts)
+}
+
+export const SyncQueue = (qopts={}) => {
+ return QueueInit('SyncQueue', qopts)
+}
+
+export const RestApiQueue = (qopts={}) => {
+ return QueueInit('RestApiQueue', qopts)
+}
+
+export const QueueInit = (key, qopts={}) => {
+ if($?.[key]) return $[key]
+ qopts = { connection: RedisConnectionDetails(), ...qopts }
+ const $Queue = new Queue(key, qopts)
+ const $QueueEvents = new QueueEvents($Queue.name, { connection: RedisConnectionDetails() } )
+ $[key] = { $Queue, $QueueEvents, Worker }
+ return $[key]
+}
+
+export const BullMQ = {
+ Queue,
+ QueueEvents,
+ Worker
+}
+
+export default {
+ SyncQueue,
+ TrawlQueue,
+ NocapdQueue,
+ RestApiQueue,
+ QueueInit,
+ BullMQ
+}
\ No newline at end of file
diff --git a/packages/controlflow/src/retry.js b/packages/controlflow/src/retry.js
new file mode 100644
index 00000000..033ed7cf
--- /dev/null
+++ b/packages/controlflow/src/retry.js
@@ -0,0 +1,82 @@
+import relaycache from '@nostrwatch/relaycache'
+import { capitalize, loadConfig } from "@nostrwatch/utils"
+
+const rcache = relaycache(process.env.NWCACHE_PATH)
+
+const config = await loadConfig()
+
+export class RetryManager {
+
+ constructor(caller, action, relays) {
+ if(!caller) throw new Error('caller is required')
+ if(!action) throw new Error('action is required')
+ this.caller = caller
+ this.action = action
+ this.relays = relays? relays : []
+ this.retries = []
+ this.config = config?.[caller]?.[action]
+ }
+
+
+ cacheId(url){
+ return `${capitalize(this.caller)}:${url}`
+ }
+
+ async init(){
+ const relays = this.relays.length? this.relays: await rcache.relays.get.all()
+ const persisted = []
+ for await(const relay of relays) {
+ const url = relay.url
+ const retries = rcache.retry.get( this.cacheId(url) )
+ if(retries === null)
+ persisted.push(await rcache.retry.set(this.cacheId(url), 0))
+ }
+ return persisted
+ }
+
+ expiry(retries){
+ if(retries === null) return 0
+ let map
+ if(this.config?.expiry && this.config.expiry instanceof Array )
+ map = this.config.expiry.map( entry => { return { max: entry.max, delay: parseInt(eval(entry.delay)) } } )
+ else
+ map = [
+ { max: 3, delay: 1000 * 60 * 60 },
+ { max: 6, delay: 1000 * 60 * 60 * 24 },
+ { max: 13, delay: 1000 * 60 * 60 * 24 * 7 },
+ { max: 17, delay: 1000 * 60 * 60 * 24 * 28 },
+ { max: 29, delay: 1000 * 60 * 60 * 24 * 90 }
+ ];
+ const found = map.find(entry => retries <= entry.max);
+ return found ? found.delay : map[map.length - 1].delay;
+ };
+
+ async getExpiredRelays(lastCheckedFn, relays=[]){
+ relays = relays?.length? relays: this.relays?.length? this.relays: await rcache.relays.get.all()
+ if(!(lastCheckedFn instanceof Function)) throw new Error('lastCheckedFn (arg[1]) must be a function')
+ const relayStatuses = await Promise.all(relays.map(async relay => {
+ const url = relay.url;
+ const lastChecked = rcache.cachetime.get.one(lastCheckedFn(url))
+ if (!lastChecked) return { relay, isExpired: true };
+ const retries = await rcache.retry.get(this.cacheId(url));
+ const isExpired = lastChecked < Date.now() - this.expiry(retries);
+ return { relay, isExpired };
+ }));
+ return relayStatuses.filter(r => r.isExpired).map(r => r.relay);
+ }
+
+ async getRetries( url ){
+ return await rcache.retry.get(this.cacheId(url))
+ }
+
+ async setRetries( url, success ){
+ let id
+ if(success) {
+ this.log?.debug(`${url} did not require a retry`)
+ id = await rcache.retry.set(this.cacheId(url), 0)
+ } else {
+ this.log?.debug(`${url} required a retry`)
+ id = await rcache.retry.increment(this.cacheId(url))
+ }
+ }
+}
\ No newline at end of file
diff --git a/packages/controlflow/src/scheduler.js b/packages/controlflow/src/scheduler.js
new file mode 100644
index 00000000..d2d742d3
--- /dev/null
+++ b/packages/controlflow/src/scheduler.js
@@ -0,0 +1,92 @@
+import schedule from 'node-schedule'
+
+export class Scheduler {
+ constructor(workers) {
+ this.workers = workers;
+ this.analysis = {};
+ this.schedules = {};
+ this.analyzeAndCacheWorkers();
+ this.createSchedules();
+ }
+
+ analyzeAndCacheWorkers() {
+ this.workers.sort((a, b) => a.interval - b.interval);
+ const totalInterval = this.workers.reduce((sum, worker) => sum + worker.interval, 0);
+ let cumulativeOffset = 0;
+ this.workers.forEach(worker => {
+ // console.log(worker)
+ this.analysis[worker.name] = {
+ interval: worker.interval,
+ offset: this.calculateBestOffset(worker, cumulativeOffset, totalInterval),
+ handler: worker.handler
+ };
+ cumulativeOffset += this.analysis[worker.name].offset;
+ });
+ }
+
+ calculateBestOffset(worker, currentOffset, totalInterval) {
+ // Calculate an ideal gap between tasks
+ const idealGap = totalInterval / this.workers.length;
+ // Start by proposing an offset that spaces out the tasks evenly
+ let proposedOffset = currentOffset + idealGap;
+ // Adjust the proposed offset to avoid as much overlap as possible
+ // This loop tries to find a spot where the current task is least likely to collide with others
+ while (this.isCollision(proposedOffset, worker.interval, totalInterval)) {
+ proposedOffset = (proposedOffset + worker.interval) % totalInterval;
+ }
+ return proposedOffset % totalInterval;
+ }
+
+ isCollision(proposedOffset, interval, totalInterval) {
+ // Check if the proposed offset collides with other tasks
+ for (let otherWorkerName in this.analysis) {
+ const otherWorker = this.analysis[otherWorkerName];
+ if (this.doIntervalsOverlap(proposedOffset, interval, otherWorker.offset, otherWorker.interval, totalInterval)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ doIntervalsOverlap(start1, length1, start2, length2, totalLength) {
+ // Simplified check for overlap between two intervals on a circular timeline
+ const end1 = (start1 + length1) % totalLength;
+ const end2 = (start2 + length2) % totalLength;
+ if (start1 <= end1) {
+ // Case 1: Interval 1 does not wrap around
+ return (start2 < end1 && end2 > start1);
+ } else {
+ // Case 2: Interval 1 wraps around
+ return (start2 < end1 || end2 > start1);
+ }
+ }
+
+
+ createSchedules() {
+ Object.keys(this.analysis).forEach(name => {
+ const worker = this.analysis[name];
+ // Calculate the initial start time based on the current time and the offset
+ const startTime = new Date(Date.now() + worker.offset);
+ // Define the rule for scheduling
+ const rule = new schedule.RecurrenceRule();
+ rule.start = startTime; // Set the start time
+ rule.rule = `*/${Math.round(worker.interval / 1000)} * * * * *`; // Set the interval in seconds
+ // Schedule the job
+ this.schedules[name] = schedule.scheduleJob(rule, this.analysis[name].handler);
+ });
+ }
+
+ getAll() {
+ return this.schedules;
+ }
+
+ get(name) {
+ return this.schedules[name];
+ }
+
+ gracefulShutdown() {
+ Object.values(this.schedules).forEach(job => {
+ schedule.gracefulShutdown(job);
+ });
+ }
+}
diff --git a/packages/logger/index.js b/packages/logger/index.js
index 106b748e..7ed32324 100644
--- a/packages/logger/index.js
+++ b/packages/logger/index.js
@@ -8,7 +8,7 @@ export default class Logger {
constructor(name, log_level="INFO", split_logs=false) {
this.logger = createLogger?.default? createLogger.default(name): createLogger(name)
- this.log_level = new String(config.log_level? config.log_level : log_level).toUpperCase();
+ this.log_level = new String(config?.log_level? config.log_level : log_level).toUpperCase();
this.split_logs = split_logs || false
}
diff --git a/packages/nocap/adapters/default/DnsAdapterDefault/index.js b/packages/nocap/adapters/default/DnsAdapterDefault/index.js
index 0a4ee182..27261fb3 100644
--- a/packages/nocap/adapters/default/DnsAdapterDefault/index.js
+++ b/packages/nocap/adapters/default/DnsAdapterDefault/index.js
@@ -9,14 +9,16 @@ class DnsAdapterDefault {
async check_dns(){
let result, data = {}
if(this.$.results.get('network') !== 'clearnet')
- return this.$.logger.warn('DNS check skipped for url not accessible over clearnet')
+ return this.$.logger.debug('DNS check skipped for url not accessible over clearnet')
let err = false
- let url = this.$.url.replace('wss://', '').replace('ws://', '')
+ let url = this.$.url.replace('wss://', '').replace('ws://', '').replace(/\/+$/, '');
const query = `https://1.1.1.1/dns-query?name=${url}`
const headers = { accept: 'application/dns-json' }
- const response = await fetch( query, { headers } ).catch((e) => { result = { status: "error", message: e.message, data } })
+ const response = await fetch( query, { headers } ).catch((e) => { result = { status: "error", message: e.message, data } })
data = await response.json()
- if(!result)
+ if(!data?.Answer || data.Answer.length === 0 || Object.keys(data.Answer[0]) === 0)
+ result = { status: "error", message: "No DNS Answer" }
+ else
result = { status: "success", data }
this.$.finish('dns', result)
}
diff --git a/packages/nocap/adapters/default/GeoAdapterDefault/index.js b/packages/nocap/adapters/default/GeoAdapterDefault/index.js
index 11f1b02e..7733ff52 100644
--- a/packages/nocap/adapters/default/GeoAdapterDefault/index.js
+++ b/packages/nocap/adapters/default/GeoAdapterDefault/index.js
@@ -9,12 +9,15 @@ import { fetch } from 'cross-fetch'
let endpoint
const ips = this.$.results.getIps('ipv4')
const ip = ips[ips?.length-1]
+ const apiKey = process.env?.IP_API_KEY
+ //todo, enable override via options
+ const fields = 'continent,continentCode,countryCode,regionName,city,lat,lon,isp,as,asname,query'
if(typeof ip !== 'string')
return this.$.finish('geo', { status: "error", message: 'No IP address. Run `dns` check first.', data: {} })
- if(this.config?.auth?.ip_api_key)
- endpoint = `https://pro.ip-api.com/json/${ip}?key=${this.config.auth.ip_api_key}`
+ if(apiKey)
+ endpoint = `https://pro.ip-api.com/json/${ip}?key=${apiKey}&fields=${fields}`
else
- endpoint = `http://ip-api.com/json/${ip}`
+ endpoint = `http://ip-api.com/json/${ip}?fields=${fields}`
const headers = { 'accept': 'application/json' }
const response = await fetch(endpoint, { headers }).catch(e => err=e)
delete response.query
diff --git a/packages/nocap/adapters/default/InfoAdapterDefault/index.js b/packages/nocap/adapters/default/InfoAdapterDefault/index.js
index 048028c5..e6b90ca2 100644
--- a/packages/nocap/adapters/default/InfoAdapterDefault/index.js
+++ b/packages/nocap/adapters/default/InfoAdapterDefault/index.js
@@ -15,7 +15,7 @@ class InfoAdapterDefault {
url.protocol = 'https:'
- this.$.timeouts.create('info', this.$.config.info_timeout, () => controller.abort())
+ this.$.timeouts.create('info', this.$.config.timeout.info, () => controller.abort())
try {
const response = await fetch(url.toString(), { method, headers, signal })
data = await response.json()
diff --git a/packages/nocap/adapters/default/SslAdapterDefault/index.js b/packages/nocap/adapters/default/SslAdapterDefault/index.js
index e2c93150..dfeefb05 100644
--- a/packages/nocap/adapters/default/SslAdapterDefault/index.js
+++ b/packages/nocap/adapters/default/SslAdapterDefault/index.js
@@ -11,11 +11,11 @@ class SslAdapterDefault {
let result, data = {}
const url = new URL(this.$.url)
const hostname = url.hostname
- const timeout = this.$.config?.ssl_timeout? this.$.config.ssl_timeout: 1000
+ const timeout = this.$.config?.timeout.ssl? this.$.config.timeout.ssl: 1000
const sslCheckerResponse = await sslChecker(hostname, this.sslCheckerOptions(url.port)).catch( (e) => { result = { status: "error", status: "error", message: e.message, data } } )
const sslCertificateResponse = await sslCertificate.get(hostname, timeout).catch( (e) => { result = { status: "error", message: e.message, data } } )
- data.days_remaining = sslCheckerResponse.daysRemaining
- data.valid = sslCheckerResponse.valid
+ data.days_remaining = sslCheckerResponse?.daysRemaining? sslCheckerResponse.daysRemaining: null
+ data.valid = sslCheckerResponse?.valid? sslCheckerResponse.valid: null
data = {...data, ...sslCertificateResponse }
if(!result)
result = { status: "success", data }
diff --git a/packages/nocap/adapters/default/WebsocketAdapterDefault/index.js b/packages/nocap/adapters/default/WebsocketAdapterDefault/index.js
index e9a112f1..0c4fe55d 100644
--- a/packages/nocap/adapters/default/WebsocketAdapterDefault/index.js
+++ b/packages/nocap/adapters/default/WebsocketAdapterDefault/index.js
@@ -1,4 +1,6 @@
+import "websocket-polyfill";
import WebSocket from 'ws';
+import { WebsocketTor } from 'ws-tor'
class WebsocketAdapterDefault {
@@ -13,7 +15,14 @@ class WebsocketAdapterDefault {
* @returns promise
*/
async check_connect(deferred){
- this.$.set('ws', new WebSocket(this.$.url))
+ let $ws
+
+ // if(this.$.results.network === 'tor')
+ // $ws = new WebsocketTor(this.$.url, { socksHost: this.$.config?.tor?.host, socksPort: this.$.config?.tor?.port })
+ // else
+ $ws = new WebSocket(this.$.url)
+
+ this.$.set('ws', $ws)
this.bind_events()
return deferred
}
@@ -48,9 +57,15 @@ class WebsocketAdapterDefault {
* @returns null
*/
bind_events(){
- this.$.ws.on('open', (e) => this.$.on_open(e))
- this.$.ws.on('message', (ev) => this.handle_nostr_event(ev))
- this.$.ws.on('close', (e) => this.$.on_close(e))
+ try {
+ this.$.ws.on('open', (e) => this.$.on_open(e))
+ this.$.ws.on('message', (ev) => this.handle_nostr_event(ev))
+ this.$.ws.on('close', (e) => this.$.on_close(e))
+ this.$.ws.on('error', (...args) => this.$.on_error(...args))
+ }
+ catch(e) {
+ this.$.log.warn(e)
+ }
}
/**
diff --git a/packages/nocap/package.json b/packages/nocap/package.json
index 19028fc5..73985934 100644
--- a/packages/nocap/package.json
+++ b/packages/nocap/package.json
@@ -12,7 +12,9 @@
"fetch-h2": "3.0.2",
"get-ssl-certificate": "2.3.3",
"jest": "29.7.0",
+ "object-hash": "3.0.0",
"promise-deferred": "2.0.4",
+ "socks-proxy-agent": "8.0.2",
"ssl-checker": "2.0.8",
"ssl-validator": "3.0.0",
"vitest": "0.34.6"
diff --git a/packages/nocap/src/classes/Base.js b/packages/nocap/src/classes/Base.js
index 9e5e0e65..2aacddb0 100644
--- a/packages/nocap/src/classes/Base.js
+++ b/packages/nocap/src/classes/Base.js
@@ -10,6 +10,7 @@ import { SessionHelper } from "./SessionHelper.js";
import { TimeoutHelper } from "./TimeoutHelper.js";
import { LatencyHelper } from "./LatencyHelper.js";
import { DeferredWrapper } from "./DeferredWrapper.js";
+import { Counter } from "./Counter.js";
import AllDefaultAdapters from "@nostrwatch/nocap-all-adapters-default"
@@ -26,8 +27,8 @@ import SAMPLE_EVENT from "../data/sample_event.js"
export default class {
- constructor(url, config) {
-
+ constructor(url, config={}) {
+
this.url = url
this.ws = null //set by adapter, needed for conn. status. might be refactored.
this.$instance = null //placeholder for adapters to use for storing a pre-initialized instance
@@ -47,10 +48,12 @@ export default class {
this.latency = new LatencyHelper(this.session)
this.promises = new DeferredWrapper(this.session, this.timeouts)
this.logger = new Logger(url, this.config.logLevel)
+ this.count = new Counter(this.session, [...this.checks])
this.customChecks = {}
//
this.SAMPLE_EVENT = SAMPLE_EVENT
//
+ this.hard_fail = false
this.results.set('url', url)
this.results.set('network', parseRelayNetwork(url))
this.logger.debug(`constructor(${url}, ${JSON.stringify(config)})`)
@@ -78,17 +81,24 @@ export default class {
async check(keys, raw=true){
let result
+ if(!this.session.initial){
+ this.hard_fail = false
+ this.results.reset({ url: this.url, network: this.network })
+ this.session.create()
+ }
if(keys == "all") {
- console.log('check all')
return this.check(this.checks)
}
else if(typeof keys === 'string') {
result = await this._check(keys)
+ this.close()
}
else if(keys instanceof Array && keys.length) {
for(const key of keys){
- await this._check(key)
+ if(this.hard_fail !== true)
+ await this._check(key)
}
+ this.close()
result = this.results.raw(keys)
}
else {
@@ -108,9 +118,18 @@ export default class {
return result
}
+ maybeTimeoutReject(key){
+ return (reject) => {
+ if(this.isWebsocketKey(key))
+ return reject({ data: false, duration: -1, status: "error", message: `Websocket connection to relay timed out (after ${this.config.timeout[key]}ms}` })
+ else
+ return reject({ data: {}, duration: -1, status: "error", message: `${key} check timed out (after ${this.config.timeout[key]}ms}` })
+ }
+ }
+
async start(key){
this.logger.debug(`${key}: start()`)
- const deferred = this.addDeferred(key)
+ const deferred = await this.addDeferred(key, this.maybeTimeoutReject(key))
const adapter = this.routeAdapter(key)
if( typeof key !== 'string')
@@ -128,14 +147,12 @@ export default class {
})
.catch((precheck) => {
if(key === 'connect' && precheck.status == "error" && precheck?.result){
- this.logger.debug(`${key}: precheck rejected with cached result`)
- this.logger.warn(`Precheck found that connect check was already fulfilled, returning cached result`)
+ this.logger.debug(`${key}: Precheck found that connect check was already fulfilled, returning cached result`)
this.promises.get(key).resolve(precheck.result)
}
else if(precheck.status == "error") {
- this.logger.debug(`${key}: precheck rejected with error`)
- this.logger.err(`Error in ${key} precheck: ${precheck.error}`)
- this.promises.get(key).resolve({ [key]: false, [`${key}Latency`]: -1, ...precheck })
+ this.logger.debug(`${key} precheck failed: ${precheck.message}`)
+ this.promises.get(key).resolve({ [key]: false, [`${key}Duration`]: -1, ...precheck })
}
else {
throw new Error(`start(): precheck rejection for ${key} should not ever get here: ${JSON.stringify(precheck)}`)
@@ -145,26 +162,69 @@ export default class {
return deferred.promise
}
+ websocket_hard_fail(){
+ this.logger.debug(`websocket_hard_fail(): ${this.url}`)
+ const wschecks = ['connect', 'read', 'write']
+ wschecks.forEach(key => {
+ this.results.set(key, { data: false, duration: -1, status: "error", message: "Websocket connection failed" })
+ })
+ const promise = this.promises.get(this.current)
+ if(!promise) return this.logger.warn(`websocket_hard_fail(): No promise found for ${this.current} check on ${this.url}`)
+ promise.resolve(this.results.get(this.current))
+ this.current = null
+ }
+
async finish(key, data={}){
this.logger.debug(`${key}: finish()`)
this.current = null
this.latency.finish(key)
- const url = this.results.get('url')
- const network = this.results.get('network')
- const adapter_key = this.routeAdapter(key)
- const adapter_name = this.adapters[adapter_key].constructor.name
- const adapters = [ ...new Set( this.results.get('adapters').concat([adapter_name]) ) ]
- const checked_at = Date.now()
- data.duration = this.latency.duration(key)
- const result = { url, network, checked_at, adapters, [key]: {...data} }
+ const result = this.produce_result(key, data)
+ if(this.skip_result(key)) return
this.results.setMany(result)
this.promises.get(key).resolve(result)
this.on_change()
}
+ skip_result(key){
+ let skip = false
+ let reason
+ if(this.promises.reflect(key).state.isRejected){
+ skip = true
+ reason = 'rejected'
+ }
+ if(this.promises.reflect(key).state.isFulfilled){
+ skip = true
+ reason = 'already fulfilled'
+ }
+ if(!skip) return
+
+ this.logger.warn(`Skipping ${key} check because it was ${reason} when finish() was called`)
+ return true
+ }
+
+ produce_result(key, data={}){
+ const result = {}
+ const adapter_key = this.routeAdapter(key)
+ const adapter_name = this.adapters[adapter_key].constructor.name
+
+ result.url = this.results.get('url')
+ result.network = this.results.get('network')
+ result.adapters = [ ...new Set( this.results.get('adapters').concat([adapter_name]) ) ]
+ result.checked_at = Date.now()
+ result.checked_by = this.config.checked_by
+ data.duration = this.latency.duration(key)
+ result[key] = { ...data }
+
+ return result
+ }
+
+ isWebsocketKey(key){
+ return ['connect', 'read', 'write'].includes(key)
+ }
+
async precheck(key){
- const deferred = this.addDeferred(`precheck_${key}`)
- const needsWebsocket = ['connect', 'read', 'write'].includes(key)
+ const deferred = await this.addDeferred(`precheck_${key}`)
+ const needsWebsocket = this.isWebsocketKey(key)
const keyIsConnect = key === 'connect'
const resolvePrecheck = deferred.resolve
const rejectPrecheck = deferred.reject
@@ -182,8 +242,8 @@ export default class {
const prechecker = async () => {
this.logger.debug(`${key}: prechecker(): needs websocket: ${needsWebsocket}, key is connect: ${keyIsConnect}, connectAttempted: ${connectAttempted}`)
+
//Doesn't need websocket. Resolve precheck immediately.
-
if( !needsWebsocket ){
this.logger.debug(`${key}: prechecker(): doesn't need websocket. Continue to ${key} check`)
return resolvePrecheck()
@@ -217,12 +277,12 @@ export default class {
// this.logger.debug(`precheck(${key}):prechecker():websocket is open, key is connect`)
rejectPrecheck({ status: "error", message: 'Cannot check connect because websocket is already connected, returning cached result'})
}
-
//Websocket is not connecting, key is not connect
if( !keyIsConnect && !this.isConnected()) {
this.logger.debug(`${key}: prechecker(): websocket is not connecting, key is not connect`)
return rejectPrecheck({ status: "error", message: `Cannot check ${key}, no active websocket connection to relay` })
}
+
this.logger.debug(`${key}: Made it here without resolving or rejecting precheck. You missed something.`)
}
await prechecker()
@@ -305,9 +365,24 @@ export default class {
on_error(err){
this.cbcall('error')
this.track('relay', 'error', err)
- this?.handle_error(err)
+ if(this?.handle_error)
+ this?.handle_error(err)
}
+ /**
+ * handle_error
+ * Standard Websocket handler triggered by ws.on_error
+ * @private
+ * @returns null
+ */
+ handle_error(){
+ // this.unsubscribe()
+ // this.close()
+ this.websocket_hard_fail()
+ // this.finish(this.current, { [this.current]: false, duration: -1 }, this.promises.get(this.current).reject)
+ }
+
+
/**
* on_close
* Standard WebSocket event triggered by Adapter
@@ -329,7 +404,6 @@ export default class {
* @returns null
*/
on_event(subid, ev){
- this.unsubscribe(subid)
this.track('relay', 'event', ev.id)
if(this?.adapters?.relay?.handle_event)
this.adapters.relay.handle_event(subid, ev)
@@ -344,7 +418,7 @@ export default class {
* @returns null
*/
on_notice(notice){
- this.logger.info(notice)
+ this.logger.debug(notice)
this.track('relay', 'notice', notice)
this.cbcall('notice')
if(this?.adapters?.relay?.handle_notice)
@@ -361,9 +435,8 @@ export default class {
on_eose(eose){
this.cbcall('eose')
this.track('relay', 'eose', eose)
- this.handle_eose(eose)
if(this.promises.reflect('read').state.isPending)
- this?.logger.warn(`received EOSE event but read promise is pending`)
+ this.handle_eose(eose)
}
/**
@@ -376,9 +449,8 @@ export default class {
on_ok(ok){
this.cbcall('ok')
this.handle_ok(ok)
- this.handle_write_check(true)
if(this.promises.reflect('write').state.isPending)
- this?.logger.warn(`received OK event but write promise is pending`)
+ this.handle_write_check(true)
}
/**
@@ -391,7 +463,7 @@ export default class {
on_auth(challenge){
this.cbcall('auth', challenge)
this.track('relay', 'auth', challenge)
- this?.handle_auth(challenge)
+ this.handle_auth(challenge)
}
/**
@@ -414,7 +486,7 @@ export default class {
* @returns null
*/
on_change(){
- this.cbcall('change', this.result)
+ this.cbcall('change', this.results)
}
/**
@@ -424,7 +496,7 @@ export default class {
* @returns null
*/
handle_connect_check(data){
- this.finish('connect', { data }, this.promises.get('connect').resolve)
+ this.finish('connect', { data })
}
/**
@@ -434,7 +506,8 @@ export default class {
* @returns null
*/
handle_read_check(data){
- this.finish('read', { data }, this.promises.get('read').resolve)
+ this.unsubscribe(this.subid('read'))
+ this.finish('read', { data })
}
/**
@@ -444,9 +517,20 @@ export default class {
* @returns null
*/
handle_write_check(data){
- this.finish('write', { data }, this.promises.get('write').resolve)
+ this.finish('write', { data })
+ }
+
+ /**
+ * handle_auth
+ * Implementation specific handler triggered by Hooks proxy-handler
+ * @private
+ * @returns null
+ */
+ handle_auth(challenge){
+
}
+
/**
* handle_on
* Nostr handler called by Base proxy-handler
@@ -464,7 +548,7 @@ export default class {
* @returns null
*/
handle_eose(){
-
+ this.handle_read_check(true)
}
/**
@@ -520,10 +604,11 @@ export default class {
return this.ws?.readyState && this.ws.readyState === WebSocket.CLOSED ? true : false
}
- addDeferred(key){
+ async addDeferred(key, cb=()=>{}){
const existingDeferred = this.promises.exists(key)
- if(!existingDeferred)
- this.promises.add(key, this.config?.[`${key}_timeout`])
+ if(existingDeferred)
+ await this.promises.get(key).promise
+ this.promises.add(key, this.config?.timeout?.[key], cb)
return this.promises.get(key)
}
diff --git a/packages/nocap/src/classes/Base.test.js b/packages/nocap/src/classes/Base.test.js
index 85057689..12ffd06a 100644
--- a/packages/nocap/src/classes/Base.test.js
+++ b/packages/nocap/src/classes/Base.test.js
@@ -16,14 +16,6 @@ afterAll(() => {
nocap = null
});
-beforeEach(async () => {
-
-})
-
-afterEach(async () => {
-
-})
-
describe("Nocap class", () => {
let nocap = new Nocap(url);
@@ -204,6 +196,7 @@ describe("Nocap class", () => {
const method = 'connect'
it("defaults should return connect result", async () => {
const response = await nocap.check(method)
+ console.log('check(connect)', response)
expect(response).toBeTypeOf('object');
expect(response).toHaveProperty(method);
expect(response[method]).toBeTypeOf('object');
diff --git a/packages/nocap/src/classes/Counter.js b/packages/nocap/src/classes/Counter.js
new file mode 100644
index 00000000..7120b563
--- /dev/null
+++ b/packages/nocap/src/classes/Counter.js
@@ -0,0 +1,52 @@
+export class Counter {
+ constructor($session, checks) {
+ this.checks = checks;
+ this.$session = $session;
+ this.counts = {};
+ this.init()
+ }
+
+ init(){
+ this.checks.forEach(check => {
+ this.setup()
+ this[check] = {}
+ this[check].add = (count) => this.add(check, count)
+ this[check].subtract = (count) => this.subtract(check, count)
+ this[check].get = () => this.get(check)
+ })
+ }
+
+ add(check, count) {
+ this.setup()
+ if (!this.checks[this.session()].includes(check)) throw new Error(`Invalid check ${check}`);
+ if (!this.counts[this.session()][check]) this.counts[this.session()][check] = 0;
+ this.counts[this.session()][check] += count;
+ }
+
+ subtract(check, count) {
+ this.setup()
+ if (!this.checks[this.session()].includes(check)) throw new Error(`Invalid check ${check}`);
+ if (!this.counts[this.session()][check]) this.counts[this.session()][check] = 0;
+ this.counts[this.session()][check] -= count;
+ }
+
+ get(check) {
+ this.setup()
+ if (!this.checks[this.session()].includes(check)) throw new Error(`Invalid check ${check}`);
+ return this.counts[this.session()][check];
+ }
+
+ total() {
+ this.setup()
+ return this.checks[this.session()].reduce((total, check) => total + this.get(check), 0);
+ }
+
+ session(){
+ return this.$session.get()
+ }
+
+ setup(){
+ if(!this.checks?.[this.session()])
+ this.checks[this.session()] = {}
+ }
+}
\ No newline at end of file
diff --git a/packages/nocap/src/classes/DeferredWrapper.js b/packages/nocap/src/classes/DeferredWrapper.js
index 7239cbfe..2b87cd4b 100644
--- a/packages/nocap/src/classes/DeferredWrapper.js
+++ b/packages/nocap/src/classes/DeferredWrapper.js
@@ -11,13 +11,15 @@ export class DeferredWrapper {
const deferred = this.create(key)
if(timeout)
this.timeout.create(key, timeout, () => {
- this.reject(key, { timeout: true } )
if(timeoutCb instanceof Function) {
try {
- timeoutCb()
+ timeoutCb(deferred.reject)
}
catch(e) { this.logger.error(`error in timeout callback for ${key}: ${e.message}` ) }
}
+ else {
+ this.reject(key, { status: "error", message: `timeout of ${timeout}ms exceeded for ${key}` })
+ }
})
return deferred
}
diff --git a/packages/nocap/src/classes/SessionHelper.js b/packages/nocap/src/classes/SessionHelper.js
index 096b1ec5..27b1672d 100644
--- a/packages/nocap/src/classes/SessionHelper.js
+++ b/packages/nocap/src/classes/SessionHelper.js
@@ -4,6 +4,7 @@ import { random } from '../utils.js'
export class SessionHelper {
constructor(){
this.init()
+ this.initial = true
}
init(){
@@ -15,10 +16,11 @@ export class SessionHelper {
this.id.write = murmurhash.v3('write', this.salt)
this.id.info = murmurhash.v3('info', this.salt)
this.id.geo = murmurhash.v3('geo', this.salt)
+ this.initial = false
return this.id
}
- new(){
+ create(){
return this.init()
}
diff --git a/packages/nocap/src/classes/SessionHelper.test.js b/packages/nocap/src/classes/SessionHelper.test.js
index 13149270..d5774d13 100644
--- a/packages/nocap/src/classes/SessionHelper.test.js
+++ b/packages/nocap/src/classes/SessionHelper.test.js
@@ -9,7 +9,6 @@ describe('SessionHelper', () => {
beforeEach(() => {
sessionHelper = new SessionHelper();
- console.log(sessionHelper.salt, typeof sessionHelper.salt)
});
afterEach(() => {
@@ -25,10 +24,7 @@ describe('SessionHelper', () => {
const random2 = murmurhash.v3(random(50))
const key1 = murmurhash.v3('key', random1)
const key2 = murmurhash.v3('key', random2)
-
- console.log(random1, random2)
expect(random1 === random2).toBe(false)
- console.log(key1, key2)
expect(key1 === key2).toBe(false)
})
@@ -64,9 +60,9 @@ describe('SessionHelper', () => {
const oldIds = Object.assign({}, sessionHelper.id)
console.log(oldIds)
- sessionHelper.new();
+ sessionHelper.create();
- console.log(sessionHelper.new())
+ console.log(sessionHelper.create())
expect(sessionHelper.id.session).not.toEqual(oldIds.session);
expect(sessionHelper.id.connect).not.toEqual(oldIds.connect);
diff --git a/packages/nocap/src/classes/TimeoutHelper.js b/packages/nocap/src/classes/TimeoutHelper.js
index 99acc5dc..9915541a 100644
--- a/packages/nocap/src/classes/TimeoutHelper.js
+++ b/packages/nocap/src/classes/TimeoutHelper.js
@@ -12,6 +12,14 @@ export class TimeoutHelper {
this.setup()
}
+ get(key){
+ return this.timeouts[this.session.get()][key]
+ }
+
+ has(key){
+ return this.timeouts?.[this.session.get()]?.[key]? true : false
+ }
+
create(key, timeout=1000, timeoutCb=()=>{}){
if(!this.timeouts?.[this.session.get()])
this.timeouts[this.session.get()] = {}
@@ -20,7 +28,7 @@ export class TimeoutHelper {
try {
timeoutCb()
}
- catch(e) { this.logger.error(`error in timeout callback for ${key}: ${e.message}` ) }
+ catch(e) { throw new Error(`error in timeout callback for ${key}: ${e.message}` ) }
}
}, timeout)
}
diff --git a/packages/nocap/src/classes/Validator.js b/packages/nocap/src/classes/Validator.js
index 36c9049c..1ac3256f 100644
--- a/packages/nocap/src/classes/Validator.js
+++ b/packages/nocap/src/classes/Validator.js
@@ -37,5 +37,9 @@ export class Validator {
return acc
}, {}) }
}
+
+ reset(initalValues={}){
+ Object.assign(this, this.defaults, initalValues)
+ }
}
\ No newline at end of file
diff --git a/packages/nocap/src/interfaces/ConfigInterface.js b/packages/nocap/src/interfaces/ConfigInterface.js
index fd192143..1fb51826 100644
--- a/packages/nocap/src/interfaces/ConfigInterface.js
+++ b/packages/nocap/src/interfaces/ConfigInterface.js
@@ -2,12 +2,18 @@ import { Validator } from '../classes/Validator.js'
export const ConfigDefaults = {
logLevel: 'info',
- connectTimeout: 5000,
- readTimeout: 5000,
- writeTimeout: 5000,
- infoTimeout: 5000,
- dnsTimeout: 5000,
- geoTimeout: 5000
+ checked_by: '',
+ timeout: {
+ connect: 10000,
+ read: 10000,
+ write: 10000,
+ info: 10000,
+ dns: 2000,
+ geo: 2000,
+ ssl: 2000
+ },
+ tor: {},
+ adapterOptions: {},
}
/**
@@ -29,7 +35,7 @@ export class ConfigInterface extends Validator {
}
set(key, value){
- this._get(key, value)
+ this._set(key, value)
}
}
\ No newline at end of file
diff --git a/packages/nocap/src/interfaces/ResultInterface.js b/packages/nocap/src/interfaces/ResultInterface.js
index 5dacf280..00f73637 100644
--- a/packages/nocap/src/interfaces/ResultInterface.js
+++ b/packages/nocap/src/interfaces/ResultInterface.js
@@ -3,6 +3,7 @@ export const ResultDefaults = {
network: "",
adapters: [],
checked_at: -1,
+ checked_by: "",
connect: {},
read: {},
write: {},
@@ -18,7 +19,7 @@ export class ResultInterface extends Validator {
constructor(){
super()
Object.assign(this, ResultDefaults)
- this.header_keys = ['url', 'created_at', 'network', 'adapters', 'checked_at']
+ this.header_keys = ['url', 'network', 'adapters', 'checked_at', 'checked_by']
this.defaults = Object.freeze(ResultDefaults)
}
@@ -31,7 +32,6 @@ export class ResultInterface extends Validator {
}
else {
result = {}
- console.log(k)
for(const key of k) {
const { data, duration } = this.get(key)
result = { ...result, [key]: data, [`${key}_duration`]: duration }
@@ -41,18 +41,6 @@ export class ResultInterface extends Validator {
}
get(key){
- // switch(key){
- // case "url":
- // case "checked_at":
- // case "adapters":
- // case "network":
- // case "connect":
- // case "read":
- // case "write":
- // return this._get(key)
- // default:
- // return this._get(key).data
- // }
return this._get(key)
}
@@ -70,7 +58,7 @@ export class ResultInterface extends Validator {
}
getIps(protocol='ipv4') {
- const answer = this.get('dns')?.Answer
+ const answer = this.get('dns')?.data?.Answer
if(!answer || !answer.length)
return []
const regex = {}
diff --git a/packages/nocapd/.gitignore b/packages/nocapd/.gitignore
new file mode 100644
index 00000000..3c00e74e
--- /dev/null
+++ b/packages/nocapd/.gitignore
@@ -0,0 +1,3 @@
+node_modules
+config.y*ml
+yarn.lock
\ No newline at end of file
diff --git a/packages/nocapd/docker-compose.yaml b/packages/nocapd/docker-compose.yaml
new file mode 100644
index 00000000..ebe3d210
--- /dev/null
+++ b/packages/nocapd/docker-compose.yaml
@@ -0,0 +1,26 @@
+version: "3"
+services:
+ # nocapd:
+ # container_name: trawler
+ # restart: unless-stopped
+ # build: .
+ # volumes:
+ # - ./nocapd.config.yaml:/etc/@nostrwatch/nocapd/config.yaml
+ # - ./.lmdb:/app/.lmdb
+ # environment:
+ # DOCKER: 'yes'
+ # env_file: .env
+ cache:
+ image: redis:latest
+ restart: always
+ ports:
+ - '6379:6379'
+ volumes:
+ - ./.redis:/.data:rw
+ env_file: .env
+ command: redis-server --loglevel $REDIS_LOGLEVEL
+volumes:
+ lmdb:
+ driver: local
+ cache:
+ driver: local
\ No newline at end of file
diff --git a/packages/nocapd/package.json b/packages/nocapd/package.json
new file mode 100644
index 00000000..0a0b38e5
--- /dev/null
+++ b/packages/nocapd/package.json
@@ -0,0 +1,17 @@
+{
+ "name": "@nostrwatch/nocapd",
+ "type": "module",
+ "version": "1.0.0",
+ "main": "index.js",
+ "license": "MIT",
+ "dependencies": {
+ "@nostr-fetch/adapter-nostr-tools": "0.14.1",
+ "chalk": "5.3.0",
+ "nostr-fetch": "0.14.1",
+ "nostr-geotags": "0.0.7",
+ "object-mapper": "6.2.0"
+ },
+ "scripts": {
+ "launch": "node src/index.js"
+ }
+}
diff --git a/packages/nocapd/scripts/genkeypair.js b/packages/nocapd/scripts/genkeypair.js
new file mode 100644
index 00000000..9c1acb0b
--- /dev/null
+++ b/packages/nocapd/scripts/genkeypair.js
@@ -0,0 +1,10 @@
+/*this should only ever be used for testing!!!!*/
+import { setEnvValue } from '@nostrwatch/utils'
+
+import { generatePrivateKey, getPublicKey } from 'nostr-tools'
+
+const PRIVATE_KEY = generatePrivateKey()
+const PUBLIC_KEY = getPublicKey(PRIVATE_KEY)
+
+setEnvValue('DAEMON_PUBKEY', PUBLIC_KEY)
+setEnvValue('DAEMON_PRIVKEY', PRIVATE_KEY)
\ No newline at end of file
diff --git a/packages/nocapd/src/classes/NocapWrapper.js b/packages/nocapd/src/classes/NocapWrapper.js
new file mode 100644
index 00000000..d6ad1387
--- /dev/null
+++ b/packages/nocapd/src/classes/NocapWrapper.js
@@ -0,0 +1,91 @@
+import { NocapdQueue } from '@nostrwatch/controlflow';
+import { Nocap, DeferredWrapper } from '@nostrwatch/nocap';
+
+export default class {
+ constructor(relay){
+ this.daemon_id = process.env?.NOCAPD_DAEMON_ID? process.env.NOCAPD_DAEMON_ID: 'unsetDaemonId'
+ this.relay = relay
+ this.$queue = new NocapdQueue()
+ this.nocap = new Nocap(this.relay)
+ this.nocap.checkOnline()
+ this.retries = {}
+ this.max_retries = 3
+ this.retry_every = 5000
+ this.deferreds = new DeferredWrapper()
+ }
+
+ async connect(){
+ this.nocap.check('connect')
+ }
+
+ async run(keys){
+ if(typeof keys === 'string')
+ keys = [keys]
+ for(const key of keys){
+ if(!this?.[`run_${key}_check`] && !(this?.[`run_${key}_check`] instanceof Function))
+ throw new Error(`No check method for ${key}`)
+ result = await this[`run_${key}_check`]()
+ }
+ }
+
+ async run_full_check(){
+ return this.nocap.check('all')
+ }
+
+ async run_websocket_check() {
+ let results = {}
+ ['connect', 'read', 'write'].forEach(key => {
+ results = {...results, ...await this.nocap.check(key)}
+ })
+ return results
+ }
+
+ async run_online_check(){
+ return this.nocap.results.getMany(['connect', 'connectLatency'])
+ }
+
+ async run_read_check(){
+ return this.nocap.check('read')
+ }
+
+ async run_write_check(){
+ return this.nocap.check('write')
+ }
+
+ async run_geo_check(concurrent=false){
+ if(!concurrent)
+ await this.wait_for_inactive()
+ return this.nocap.check('geo')
+ }
+
+ async run_info_check(concurrent=false){
+ if(!concurrent)
+ await this.wait_for_inactive()
+ return this.nocap.check('info')
+ }
+
+ async run_ssl_check(concurrent=false){
+ if(!concurrent)
+ await this.wait_for_inactive()
+ return this.nocap.check('ssl')
+ }
+
+ async run_dns_check(concurrent=false){
+ if(!concurrent)
+ await this.wait_for_inactive()
+ return this.nocap.check('dns')
+ }
+
+ async wait_for_inactive(){
+ return new Promise((resolve, reject) => {
+ if(this.nocap.isActive()){
+ setTimeout(() => {
+ this.wait_for_inactive().then(resolve)
+ }, 100)
+ } else {
+ resolve()
+ }
+ })
+ }
+}
+
diff --git a/packages/nocapd/src/classes/NocapdQueues.js b/packages/nocapd/src/classes/NocapdQueues.js
new file mode 100644
index 00000000..bfb1d6c0
--- /dev/null
+++ b/packages/nocapd/src/classes/NocapdQueues.js
@@ -0,0 +1,126 @@
+export class NocapdQueues {
+ constructor(config){
+ /** @type {object} */
+ this.managers = {}
+ /** @type {BullQueue} */
+ this.queue = null
+ /** @type {BullQueueEvents} */
+ this.events = null
+ /** @type {BullWorker} */
+ this.worker = null
+ /** @type {WorkerManager} */
+ this.managers = null
+ /** @type {Scheduler} */
+ this.scheduler = null
+ /** @type {object} */
+ this.cb = {}
+
+ this.pubkey = config?.pubkey? config.pubkey: null
+
+ /** @type {array} */
+ this.worker_events = ['completed', 'failed', 'progress', 'stalled', 'waiting', 'active', 'delayed', 'drained', 'paused', 'resumed']
+
+ if(!this.pubkey)
+ throw new Error(`NocapdQueues requires a pubkey`)
+ }
+
+ route(job){
+ const { name } = job
+ const daemonManager = name.split('@')[0]
+ const daemonPubkey = name.split('@')[1]
+
+ if(daemonPubkey !== this.pubkey)
+ console.warn(`[route] ${daemonPubkey} !== ${this.pubkey}`)
+
+ if(!this.managers[daemonManager])
+ throw new Error(`No manager found for ${daemonManager}`)
+
+ return this.managers[daemonManager].work(job)
+ }
+
+ route_event(event, ...args){
+ const job = args[0]
+ let name = null
+
+ if(typeof job === 'object')
+ name = job.name
+
+ else if (typeof job === 'string')
+ name = job.split(':')[0]
+
+ if(name) {
+ const daemonManager = name.split('@')[0]
+ const daemonPubkey = name.split('@')[1]
+
+ // if(daemonPubkey !== this.pubkey)
+ // return this.log.warn(`[route_event] ${daemonPubkey} !== ${this.pubkey}`)
+
+ if(!this.managers[daemonManager])
+ throw new Error(`No manager found for ${daemonManager}`)
+
+ return this.managers[daemonManager].cbcall(event, ...args)
+ }
+ else {
+ this.cbcall(event, ...args)
+ }
+
+ }
+
+ setWorker($worker){
+ this.worker = $worker
+ this.bind_events()
+ }
+
+ bind_events(){
+ // if(!this.bindEvents) return
+ this.worker_events.forEach(handler => {
+ // console.log(`bind on_${handler} event handler on ${this.worker.name}:${this.constructor.name}`)
+ this.worker.on(handler, (...args) => this.route_event(handler, ...args))
+ })
+ }
+
+ on(event, handler){
+ this.cb[event] = handler.bind(this)
+ }
+
+ cbcall(...args){
+ const handler = [].shift.call(args)
+ if(this?.[`on_${handler}`] && typeof this[`on_${handler}`] === 'function')
+ this[`on_${handler}`](...args)
+ if(typeof this.cb[handler] === 'function')
+ this.cb[handler](...args)
+ }
+
+ async populateAll(){
+ const mkeys = Object.keys(this.managers)
+ console.log(`populateAll()`, mkeys)
+ for await ( const mkey of mkeys ){
+ // this.log.debug(`populateAll() -> ${mkey}:populator()`)
+ await this.managers[mkey]._populator()
+ }
+ }
+
+ pause(q){
+ if(q)
+ return this.queue?.[q].pause()
+ Object.keys(this.queue).forEach(q => this.queue[q].pause())
+ }
+
+ start(q){
+ if(q)
+ return this.queue?.[q].start()
+ Object.keys(this.queue).forEach(q => this.queue[q].start())
+ }
+
+ drain(q){
+ if(q)
+ return this.queue?.[q].drain()
+ Object.keys(this.queue).forEach(q => this.queue[q].drain())
+ }
+
+ obliterate(q){
+ if(q)
+ return this.queue?.[q].obliterate()
+ Object.keys(this.queue).forEach(q => this.queue[q].obliterate())
+ }
+}
\ No newline at end of file
diff --git a/packages/nocapd/src/classes/WorkerManager.js b/packages/nocapd/src/classes/WorkerManager.js
new file mode 100644
index 00000000..8fc89bec
--- /dev/null
+++ b/packages/nocapd/src/classes/WorkerManager.js
@@ -0,0 +1,201 @@
+import hash from 'object-hash'
+
+import { Nocap } from '@nostrwatch/nocap'
+import { delay } from '@nostrwatch/utils'
+
+import chalk from 'chalk';
+
+
+export class WorkerManager {
+ constructor($q, rcache, config){
+ // if(config?.id)
+ // throw new Error('WorkerManager needs an id')
+ /** @type {NWQueue} */
+ this.$ = $q
+
+ /** @type {db} */
+ this.rcache = rcache
+
+ /** @type {object} */
+ this.cb = {}
+
+ /** @type {string} */
+ this.pubkey = process.env?.DAEMON_PUBKEY
+
+ /** @type {number} */
+ this.priority = config?.priority? config.priority: 1
+
+ /** @type {number} */
+ this.concurrency = config?.concurrency? config.concurrency: 1
+
+ this.networks = config?.networks? config.networks: ['clearnet']
+
+ this.bindEvents = true
+
+ /** @type {number} */
+ this.timeout = config?.timeout? config.timeout: 5000
+
+ this.log = config?.logger? config.logger.logger: console
+
+ /** @type {function} */
+ this.scheduler = config?.scheduler? config.scheduler.bind(this): () => { console.warn(`scheduler not defined for ${this.id}`) }
+
+ /** @type {array} */
+ this.worker_events = ['completed', 'failed', 'progress', 'stalled', 'waiting', 'active', 'delayed', 'drained', 'paused', 'resumed']
+
+ /** @type {array} */
+ this.queue_events = ['active', 'completed', 'delayed', 'drained', 'error', 'failed', 'paused', 'progress', 'resumed', 'stalled', 'waiting']
+
+ /** @type {Nocap} */
+ this.Nocap = Nocap
+
+ if(!(this.on_completed instanceof Function))
+ throw new Error('WorkerManager on_completed needs to be a function')
+
+ this.log.info(`${this.id()} initialized`)
+
+ this.interval = 24*60*60*1000 //24h
+
+ this.expires = 24*60*60*1000 //24h
+
+ this.stats = setInterval( async () => await this.counts(), 30*1000 )
+
+ this.delay = delay
+
+ this.processed = 0
+
+ this.total = 0
+ }
+
+ calculateProgress() {
+ if (this.total === 0) return "0.00%"; // Prevent division by zero
+ let percentage = (this.processed / this.total) * 100;
+ return percentage.toFixed(2) + "%";
+ }
+
+ progressMessage(url, result={}, error=false){
+ const failure = chalk.red;
+ const success = chalk.bold.green;
+ const mute = chalk.gray
+ this.log.info(
+ `[${chalk.bgBlack(this.calculateProgress())}]`,
+ `${mute(this.processed)}/${mute(this.total)}`,
+ `${url}:`,
+ result?.connect?.data? success("online"): failure("offline")),
+ error? chalk.gray.italic('error'): ''
+ }
+
+ siblingKeys(){
+ return Object.keys(this.$.managers).filter(key => key !== this.constructor.name)
+ }
+
+ siblings(){
+ const result = {}
+ this.siblingKeys().forEach( key => {
+ result[key] = this.$.managers[key]
+ })
+ return result
+ }
+
+ id(workerKey){
+ if(!workerKey)
+ workerKey = this.slug()
+ return `${workerKey}@${this.pubkey}`
+ }
+
+ slug(){
+ return this.constructor.name
+ }
+
+ async counts(){
+ const counts = await this.$.queue.getJobCounts()
+ this.log.info(`[stats] active: ${counts.active}, completed: ${ counts.completed }, failed: ${counts.failed}, prioritized: ${counts.prioritized}, delayed: ${counts.delayed}, waiting: ${counts.waiting}, paused: ${counts.paused}, total: ${counts.completed} / ${counts.active} + ${counts.waiting + counts.prioritized}`)
+ return counts
+ }
+
+ // setWorker($worker){
+ // this.$worker = $worker
+ // this.bind_events()
+ // }
+
+ cbcall(...args){
+ const handler = [].shift.call(args)
+ if(this?.[`on_${handler}`] && typeof this[`on_${handler}`] === 'function')
+ this[`on_${handler}`](...args)
+ if(typeof this.cb[handler] === 'function')
+ this.cb[handler](...args)
+ }
+
+ hasChanged(data1, data2){
+ this.log.debug(`hasChanged: ${hash(data1) !== hash(data2)}`)
+ return hash(data1) !== hash(data2)
+ }
+
+ on(event, handler){
+ this.cb[event] = handler.bind(this)
+ }
+
+ jobId(relay, workerKey){
+ return `${this.id(workerKey)}:${relay}`
+ }
+
+ async _work(job){
+ if(job.id.startsWith(this.id())) {
+ this.log.warn(`[work] ${job.id} is a ${this.constructor.name} job, running...`)
+ return this.work()
+ }
+ this.log.warn(`[work] ${job.id} is not a ${this.constructor.name} job, passing to next worker`)
+ }
+
+ _populator(){
+ this.total = 0
+ this.processed = 0
+ this.populator()
+ }
+
+ async addRelayJobs(relays, workerKey){
+
+ for await ( const relay of relays ){
+ await this.addRelayJob({ relay }, workerKey)
+ }
+ const c = await this.counts()
+ this.total = c.prioritized + c.waiting
+ }
+
+ async addRelayJob(jdata, workerKey){
+ // if(jdata?.relay) {
+ // if(!this.networks.includes(parseRelayNetwork(jdata.relay)))
+ // return this.log.info(`Skipping ${this.constructor.name} check for ${jdata.relay} because it is not in config.nocap.networks (default: clearnet only)`)
+ // }
+ const jobOpts = {
+ priority: this.priority,
+ removeOnComplete: {
+ age: 60*10,
+ },
+ removeOnFail: {
+ age: 60*10,
+ }
+ }
+ if(!workerKey)
+ workerKey = this.constructor.name
+ this.log.debug(`Adding job for ${workerKey}: ${JSON.stringify(jdata)}`)
+ return this.$.queue.add( this.id(workerKey), jdata, { jobId: this.jobId(jdata.relay, workerKey), ...jobOpts})
+ }
+
+ async populator(){
+ this.log.debug('Populator not defined')
+ const relays = this.rcache.relay.get.allIds()
+ relays.forEach(relay => { this.$.queue.add(this.constructor.name, { relay: relay, checks: [this.id] }) })
+ }
+
+ // async on_completed(job, rvalue) {
+ // if(typeof rvalue !== 'object') return
+ // if(rvalue?.skip === true) return this.log.debug(`${this.constructor.name} check skipped for ${job.data.relay}`)
+ // const { result } = rvalue
+ // this.log.debug(`DS check complete for ${job.data.relay}: ${JSON.stringify(result)}`)
+ // const dnsId = await this.rcache.check.dns.insert(result)
+ // const relayUpdate = { url: result.url, info: { ref: dnsId, changed_at: Date.now() } }
+ // await this.rcache.relay.get.one(result.url)
+ // await this.rcache.relay.patch(relayUpdate)
+ // }
+}
\ No newline at end of file
diff --git a/packages/nocapd/src/daemon.js b/packages/nocapd/src/daemon.js
new file mode 100644
index 00000000..5f2ab3ec
--- /dev/null
+++ b/packages/nocapd/src/daemon.js
@@ -0,0 +1,102 @@
+import schedule from 'node-schedule'
+
+import relaycache from '@nostrwatch/relaycache'
+import { NocapdQueue, BullMQ, Scheduler } from '@nostrwatch/controlflow'
+import { RedisConnectionDetails } from '@nostrwatch/utils'
+
+import { NocapdQueues } from './classes/NocapdQueues.js'
+import { parseRelayNetwork, relayId, capitalize, loadConfig } from "@nostrwatch/utils"
+
+// import { AllManager } from './managers/all.js'
+// import { WelcomeManager } from './managers/welcome.js'
+// import { WebsocketManager } from './managers/websocket.js'
+// import { GeoManager } from './managers/geo.js'
+// import { DnsManager } from './managers/dns.js'
+// import { InfoManager } from './managers/info.js'
+// import { SslManager } from './managers/ssl.js'
+
+import { bootstrap } from '@nostrwatch/seed'
+
+import Logger from '@nostrwatch/logger'
+
+const { QueueEvents, Worker } = BullMQ
+const log = new Logger('nocapd')
+const rcache = relaycache(process.env.NWCACHE_PATH || './.lmdb')
+
+let config
+
+const scheduleJob = (manager) =>{
+ const rule = new schedule.RecurrenceRule();
+ rule.start = Date.now(); // Set the start time
+ rule.rule = `*/${Math.round(manager.frequency / 1000)} * * * * *`; // Set the frequency in seconds
+ return schedule.scheduleJob(rule, () => manager.populator())
+}
+
+const initManagers = async ($q, config) => {
+ const managers = {}
+ console.log(`initManagers()`, `config.managers`, config.managers)
+ for await ( const Manager of config.managers ) {
+ const mpath = `./managers/${Manager}.js`
+ const imp = await import(mpath)
+ try {
+ const mname = imp[Manager].name
+ const $manager = new imp[Manager]($q, rcache, { logger: new Logger(mname), pubkey: process.env.DAEMON_PUBKEY })
+ managers[mname] = $manager
+ console.log( `$manager.constructor.name`, $manager.constructor.name )
+ console.log( Object.keys(managers) )
+ }
+ catch(e){
+ log.err(`Error initializing ${Manager}: ${e.message}`)
+ }
+ }
+ console.log(`initManagers()`, Object.keys(managers))
+ return managers
+}
+
+const initWorkers = async (config) => {
+ if(config?.managers?.length === 0 || !(config?.managers instanceof Array))
+ throw new Error('config.workers needs to be an array of WorkerManagers')
+ const $q = new NocapdQueues({ pubkey: process.env.DAEMON_PUBKEY })
+ const { $Queue:$NocapdQueue, $QueueEvents:$NocapdQueueEvents } = NocapdQueue()
+ $q.queue = $NocapdQueue
+ await $q.queue.pause()
+ await $q.queue.drain()
+ $q.events = $NocapdQueueEvents
+ $q.managers = await initManagers($q, config)
+ const $worker = new Worker($q.queue.name, $q.route.bind($q), { concurrency: 10 } )
+ await $worker.pause()
+ $q.setWorker($worker)
+ await $q.populateAll()
+ await $q.queue.resume()
+ return $q
+}
+
+const enabledWorkerManagers = () => {
+ const eman = []
+ for( const manager of Object.keys(config?.nocapd?.checks) ) {
+ if(config?.nocapd?.checks?.[manager]?.enabled === true)
+ eman.push(`${capitalize(manager)}Manager`)
+ }
+ return eman
+}
+
+export const Nocapd = async () => {
+ config = await loadConfig()
+ if(rcache.relay.count.all() === 0){
+ let relays = await bootstrap('nocapd')
+ console.log(`found ${relays.length} relays`)
+ relays = relays
+ .map(r => { return { url: r, network: parseRelayNetwork(r), online: null, geo: [], attributes: [] } })
+ const persisted = await rcache.relay.batch.insertIfNotExists(relays)
+ console.log('persisted:', persisted.length)
+ }
+ const $q = await initWorkers({
+ managers: enabledWorkerManagers() || [],
+ })
+ return {
+ stop: () => {
+ console.log('stopping')
+ },
+ $q
+ }
+}
\ No newline at end of file
diff --git a/packages/nocapd/src/index.js b/packages/nocapd/src/index.js
new file mode 100644
index 00000000..d1a1bdc7
--- /dev/null
+++ b/packages/nocapd/src/index.js
@@ -0,0 +1,19 @@
+import { Nocapd } from './daemon.js';
+
+const nocapd = Nocapd()
+
+async function gracefulShutdown(signal) {
+ console.log(`Received ${signal}, closing application...`);
+ // await nocapd.stop()
+ process.exit(0);
+}
+
+process.on('uncaughtException', async (error) => {
+ console.error('Uncaught Exception:', error);
+ // await gracefulShutdown('uncaughtException');
+});
+
+process.on('unhandledRejection', async (reason, promise) => {
+ console.error('Unhandled Rejection at:', promise, 'reason:', reason);
+ await gracefulShutdown('unhandledRejection');
+});
\ No newline at end of file
diff --git a/packages/nocapd/src/managers/AllManager.js b/packages/nocapd/src/managers/AllManager.js
new file mode 100644
index 00000000..5ad9e568
--- /dev/null
+++ b/packages/nocapd/src/managers/AllManager.js
@@ -0,0 +1,187 @@
+import mapper from 'object-mapper'
+import ngeotags from 'nostr-geotags'
+
+import { RetryManager } from '@nostrwatch/controlflow'
+import { parseRelayNetwork, lastCheckedId, delay } from '@nostrwatch/utils'
+import Publish from '@nostrwatch/publisher'
+
+import { WorkerManager } from '../classes/WorkerManager.js'
+
+const publish30066 = new Publish.Kind30066()
+
+export class AllManager extends WorkerManager {
+ constructor($, rcache, config){
+ super($, rcache, config)
+ this.interval = 60*1000 //checks for expired items every...
+ this.timeout = 9*1000
+ this.timeoutBuffer = 1000
+ this.priority = 10
+ this.retry = new RetryManager('nocapd', 'check', this.rcache.relay.get.all())
+ }
+
+ cacheId(url){
+ return lastCheckedId(this.id, url)
+ }
+
+ async populator(){
+ this.log.debug(`${this.id()}:populator()`)
+ await this.retry.init()
+ const relaysUnchecked = await this.getUncheckedRelays()
+ const relaysExpired = await this.retry.getExpiredRelays(this.cacheId.bind(this))
+ let relays = [...new Set([...relaysUnchecked, ...relaysExpired])]
+ relays = relays.map(r=>r.url).filter(relay => this.networks.includes(parseRelayNetwork(relay)))
+ this.log.info(`expired: ${relaysExpired.length}, unchecked: ${relaysUnchecked.length}, total: ${relays.length}`)
+ await this.$.worker.pause()
+ await this.addRelayJobs(relays)
+ this.log.info('Waiting for 5s...')
+ await delay(5000)
+ await this.$.worker.resume()
+ }
+
+ async work(job){
+ const error = (err) => { this.log.error(`Error running websocket check for ${job.data.relay}: ${err.message}`) }
+ try {
+ this.log.debug(`Running websocket check for ${job.data.relay}`)
+ const { relay:url } = job.data
+ const dpubkey = this.pubkey
+ const nocapOpts = {
+ timeout: {
+ connect: Math.floor(this?.timeout/3),
+ read: Math.floor(this?.timeout/3),
+ write: Math.floor(this?.timeout/3)
+ },
+ checked_by: dpubkey
+ }
+ const nocapd = new this.Nocap(url, nocapOpts)
+ const result = await nocapd.check('all').catch(error)
+ if( !result?.connect?.data )
+ return { result: false }
+ return { result }
+ }
+ catch(err) {
+ this.processed++
+ error(err)
+ return { result: false }
+ }
+ }
+
+ async on_completed(job, rvalue){
+ const { relay:url } = job.data
+ const { result } = rvalue
+ const { checked_at } = result
+ this.processed++
+ this.progressMessage(url, result)
+ if(!result)
+ return this.log.debug(`Nocap complete (all) check failed for ${url}`)
+ this.retry.setRetries( url, true )
+ await this.setLastChecked( url, checked_at )
+ // this.log.debug(`Nocap complete (all) check complete for ${url}: connect: ${result?.connect?.data}, read: ${result?.read?.data}, write: ${result?.write?.data}`)
+ result.retries = this.retry.getRetries(url)
+ const event30066Data = event30066DataFromResult( result )
+ await publish30066.one(event30066Data)
+ // const event10066Data = event10066DataFromResult( result )
+ // await publish10066.one(event10066Data)
+ }
+
+ async on_failed(job, err){
+ const { relay:url } = job.data
+ // console.log('url:onfailed', url)
+ this.log?.debug(`Websocket check failed for ${job.data.relay}: ${JSON.stringify(err)}`)
+ this.retry.setRetries(url, false)
+ this.processed++
+ this.progressMessage(url, null, true)
+ }
+
+ async getUncheckedRelays(){
+ let unchecked = await this.rcache.cachetime.get.all()?.filter( relay => relay.online == null )
+ if(this.networks.length)
+ unchecked = unchecked?.filter( relay => this.networks.includes(relay.network) )
+ return unchecked?.length? unchecked: []
+ }
+
+ async setLastChecked(url, date=Date.now()){
+ await this.rcache.cachetime.set( lastCheckedId('online',url), date )
+ }
+
+ async setLastPublished(url, date=Date.now()){
+ await this.rcache.cachetime.set( lastCheckedId('online',url), date )
+ }
+}
+
+const truncatedResult = (result, type) => {
+ const commonFields = ['connect', 'read', 'write'];
+ const fieldsToRemove = {
+ 'websocket': ['info', 'dns', 'geo', 'ssl'],
+ 'dns': ['info', 'geo', 'ssl', ...commonFields],
+ 'info': ['geo', 'ssl', 'dns', ...commonFields],
+ 'geo': ['ssl', 'dns', 'info', ...commonFields],
+ 'ssl': ['dns', 'info', 'geo', ...commonFields]
+ };
+ const res = { ...result };
+ fieldsToRemove[type].forEach(field => {
+ if(res?.[field])
+ delete res[field]
+ });
+ return res;
+};
+
+const event30066DataFromResult = result => {
+ const eventData = {}
+ const attributes = []
+
+ const geo = transformGeoResult(result.geo?.data) || {}
+ const isGeo = Object.keys(geo)?.length > 0
+
+ const info = result.info?.data || {}
+ const isInfo = Object.keys(info)?.length > 0
+
+ const ssl = result.ssl?.data || {}
+ const isSsl = Object.keys(ssl)?.length > 0
+
+ eventData.url = result.url
+ eventData.online = result.connect.data
+
+ if(eventData.retries > 0)
+ eventData.retries = result.retries
+
+ if(isGeo)
+ eventData.geo = ngeotags(geo, { iso31662: true })
+
+ if(isInfo){
+ if(info?.limitations?.payment_required === true)
+ attributes.push('payment-required')
+ if(info?.limitations?.auth_required === true)
+ attributes.push('auth-required')
+ if(info?.supported_nips instanceof Array)
+ info.supported_nips.forEach(nip => attributes.push(`nip-${nip}`))
+ }
+
+ if(isSsl)
+ attributes.push(ssl?.valid === true? 'ssl-valid' :'ssl-invalid')
+
+ if(isGeo)
+ if(geo?.as)
+ attributes.push(geo.as)
+ if(geo?.ip)
+ attributes.push(geo.ip)
+
+ if(attributes.length)
+ eventData.attributes = attributes
+
+ return eventData
+}
+
+const transformGeoResult = geo => {
+ const map = {
+ "as": "as",
+ "city": "cityName",
+ "countryCode": "countryCode",
+ "regionName": "regionName",
+ "continent": "contentName",
+ "continentCode": "continentCode",
+ "lat": "lat",
+ "lon": "lon",
+ "query": "ip",
+ }
+ return mapper(geo, map)
+}
\ No newline at end of file
diff --git a/packages/nocapd/src/managers/dns.js b/packages/nocapd/src/managers/dns.js
new file mode 100644
index 00000000..3d3af748
--- /dev/null
+++ b/packages/nocapd/src/managers/dns.js
@@ -0,0 +1,53 @@
+import { WorkerManager } from '../classes/WorkerManager.js'
+import transform from '@nostrwatch/transform'
+import { Nocap } from '@nostrwatch/nocap'
+
+const { RelayCheckResultDns } = transform
+
+export class DnsManager extends WorkerManager {
+ constructor($q, rdb, config){
+ super($q, rdb, config)
+ this.id = 'dns'
+ this.interval = 24*60*60*1000 //6 hours
+ this.expires = 6*24*60*60*1000 //6 hours
+ this.priority = 30
+
+ }
+
+ async populator(){
+ const { Relay } = this.rdb.schemas
+ const relays = this.rdb.relay.get.null('dns', ['url']).map(r=>r.url)
+ this.log.info(`Found ${relays.length} new info jobs`)
+ relays.forEach(relay => {
+ const job = { relay }
+ this.addJob(job)
+ })
+ }
+
+
+ async work(job){
+ this.log.debug(`Running dns check for ${job.data.relay.url}`)
+ const { relay } = job.data;
+ const nocapd = new Nocap(relay);
+ const dnsOld = this.rdb.checks.dns.get.one(relay.url)
+ const result = await nocapd.check('dns')
+ if(!this.hasChanged(dnsOld.data, result.dns))
+ return { skip: true }
+ return { result }
+ }
+
+ async on_completed(job, rvalue) {
+ if(typeof rvalue !== 'object') return
+ if(rvalue?.skip === true) return this.log.debug(`Info check skipped for ${job.data.relay}`)
+ const { result } = rvalue
+ this.log.debug(`DS check complete for ${job.data.relay}: ${JSON.stringify(result)}`)
+ const dnsId = await this.rdb.check.dns.insert(result)
+ const relayUpdate = { url: result.url, info: { ref: dnsId, changed_at: Date.now() } }
+ await this.rdb.relay.get.one(result.url)
+ await this.rdb.relay.patch(relayUpdate)
+ }
+
+ async on_failed(job, err){
+ this.log.info(`Info check failed for ${job.data.relay}: ${JSON.stringify(err)}`)
+ }
+};
\ No newline at end of file
diff --git a/packages/nocapd/src/managers/geo.js b/packages/nocapd/src/managers/geo.js
new file mode 100644
index 00000000..412e9790
--- /dev/null
+++ b/packages/nocapd/src/managers/geo.js
@@ -0,0 +1,32 @@
+import { WorkerManager } from '../classes/WorkerManager.js'
+
+export class GeoManager extends WorkerManager {
+ constructor($q, rdb, config){
+ super($q, rdb, config)
+ this.id = 'geo'
+ this.interval = 24*60*60*1000 //1 day
+ this.expires = 6*24*60*60*1000 //6 hours
+ this.priority = 40
+ }
+ async populator(){
+ this.log.info('Populating geo jobs')
+ const { RelayCheckDns } = this.rdb.schemas
+ const dnsNew = [...this.rdb.$.select(['url']).from( RelayCheckDns ).where( { RelayCheckDns: { last_checked: (v) => v < Date.now()-this.frequency } } )].flat()
+ this.log.info(`Found ${dnsNew.length} new dns jobs`)
+ new Set([...dnsNew]).forEach(relay => {
+ const job = { relay: relay, checks: ['dns', 'geo'], persists: ['geo'] }
+ this.$.queue.add(job)
+ })
+ }
+ async work(job){
+ this.log.info(`Running geo check for ${job.data.relay.url}`)
+ const { relay, checks } = job.data;
+ const nocapd = new this.Nocap(relay);
+ const result = await nocapd.check(['dns'], { timeout: { ssl: this.timeout }});
+ return result;
+ }
+
+ async on_complete(){
+
+ }
+};
diff --git a/packages/nocapd/src/managers/info.js b/packages/nocapd/src/managers/info.js
new file mode 100644
index 00000000..2153070f
--- /dev/null
+++ b/packages/nocapd/src/managers/info.js
@@ -0,0 +1,71 @@
+import { WorkerManager } from '../classes/WorkerManager.js'
+
+export class InfoManager extends WorkerManager {
+
+ constructor($q, nwc, config){
+ super($q, nwc, config)
+ this.interval = 5*60*1000 //checks for expired items every...
+ this.expires = 6*60*60*1000 //6 hours
+ this.timeout = 15*1000
+ this.timeoutBuffer = 1000
+ this.priority = 20
+ }
+
+ async populator(){
+ const { Relay } = this.nwc.schemas
+ const relays = this.nwc.cachetime.expired('info')
+ this.log.info(`Found ${relays.length} new info jobs`)
+ relays.forEach(relay => {
+ const job = { relay }
+ this.addJob(job)
+ })
+ }
+
+ async work(job){
+ const { relay } = job.data;
+ const infoOldId = this.nwc.relay.get.one(relay)?.info?.ref
+ let infoOld = {}
+ if(infoOldId) {
+ const record = this.nwc.checks.info.get(infoOldId)
+ infoOld = record?.data
+ }
+ const nocap = new this.Nocap(relay);
+ const infoCheck = await nocap.check('info')
+ const infoNew = infoCheck?.info?.data
+ if(!this.hasChanged(infoNew, infoOld))
+ return { skip: true, result: { url: relay } }
+ return { result: infoCheck }
+ }
+
+ async on_completed(job, rvalue){
+ if(typeof rvalue !== 'object') return
+ const { result } = rvalue
+ const persist = !rvalue?.skip && rvalue.skip !== true
+ let infoId = null
+ if(persist){
+ this.log.debug(`Info check complete for ${job.data.relay}: ${JSON.stringify(result)}`)
+ const infoId = await this.nwc.check.info.insert(result)
+ }
+ const record = await this.nwc.relay.get.one(result.url)
+ if(record.info === null || persist) {
+ await this.nwc.relay.patch({
+ url: result.url,
+ info: {
+ name: result?.info?.data?.name || null,
+ description: result?.info?.data?.description || null,
+ software: result?.info?.data?.software || null,
+ version: result?.info?.data?.version || null,
+ supported_nips: result?.info?.data?.supported_nips || [],
+ limitations: result?.info?.data?.limitations || {},
+ ref: infoId || null,
+ changed_at: infoId? Date.now(): null
+ }
+ })
+ }
+ }
+
+ async on_failed(job, err){
+ this.log.info(`Info check failed for ${job.data.relay}: ${JSON.stringify(err)}`)
+ }
+
+}
\ No newline at end of file
diff --git a/packages/nocapd/src/managers/ssl.js b/packages/nocapd/src/managers/ssl.js
new file mode 100644
index 00000000..b2faf242
--- /dev/null
+++ b/packages/nocapd/src/managers/ssl.js
@@ -0,0 +1,35 @@
+import { WorkerManager } from '../classes/WorkerManager.js'
+
+export class SslManager extends WorkerManager {
+ constructor($q, rdb, config){
+ super($q, rdb, config)
+ this.id = 'ssl'
+ this.frequency = 24*60*60*1000 //6 hours
+ this.priority = 50
+ }
+ async populator(){
+ // Implementation to be provided later
+ }
+ async runner(job){
+ this.log.info(`Running ssl check for ${job.data.relay.url}`);
+ const { relay, checks } = job.data;
+ const nocapd = new this.Nocap(relay);
+ const result = await nocapd.check(['ssl'], { timeout: { ssl: this.timeout } });
+ return { result };
+ }
+
+ async on_completed(job, rvalue) {
+ if(typeof rvalue !== 'object') return
+ if(rvalue?.skip === true) return this.log.debug(`Info check skipped for ${job.data.relay}`)
+ const { result } = rvalue
+ this.log.debug(`Info check complete for ${job.data.relay}: ${JSON.stringify(result)}`)
+ const sslId = await this.rdb.check.info.insert(result)
+ const relayUpdate = { url: result.url, info: { ref: infoId, changed_at: Date.now() } }
+ await this.rdb.relay.get.one(result.url).url
+ await this.rdb.relay.patch(relayUpdate)
+ }
+
+ async on_failed(job, err){
+ this.log.info(`Info check failed for ${job.data.relay}: ${JSON.stringify(err)}`)
+ }
+};
\ No newline at end of file
diff --git a/packages/nocapd/src/managers/websocket.js b/packages/nocapd/src/managers/websocket.js
new file mode 100644
index 00000000..2e65e254
--- /dev/null
+++ b/packages/nocapd/src/managers/websocket.js
@@ -0,0 +1,60 @@
+import { WorkerManager } from '../classes/WorkerManager.js'
+
+export class WebsocketManager extends WorkerManager {
+ constructor($q, rdb, config){
+ super($q, rdb, config)
+ this.interval = 60*1000 //checks for expired items every...
+ this.expires = 60*60*1000 //1 hour
+ this.timeout = 30*1000
+ this.timeoutBuffer = 1000
+ this.priority = 10
+ }
+
+ async populator(){
+ const { Relay } = this.rdb.schemas
+ const relays =
+ [...this.rdb.$
+ .select(['url'])
+ .from( Relay )
+ .where({ Relay: (R) => R.last_checked < (new Date() - this.expires)})
+ ].flat()
+ this.log.info(`Found ${relays.length} new websocket jobs`)
+ relays.map(r=>r.url).forEach(relay => {
+ this.addJob({ relay })
+ })
+ }
+
+ async work(job){
+ this.log.debug(`Running websocket check for ${job.data.relay}`)
+ const { relay } = job.data
+ const dpubkey = this.pubkey
+ const nocapd = new this.Nocap(relay)
+ const nocapOpts = {
+ timeout: {
+ connect: Math.floor(this?.timeout/3),
+ read: Math.floor(this?.timeout/3),
+ write: Math.floor(this?.timeout/3)
+ },
+ checked_by: dpubkey
+ }
+ const result = await nocapd.check(['connect', 'read', 'write'], nocapOpts)
+ // nocapd.on('change', (res) => console.log('changed!', res))
+ return { result }
+ }
+
+ async on_completed(job, rvalue){
+ const { result } = rvalue
+ this.log.info(`Websocket check complete for ${job.data.relay}: connect: ${result.connect.data}, read: ${result.read.data}, write: ${result.write.data}`)
+ // console.log(`@ insert (${result.url})`)
+ this.rdb.check.websocket.insert(result)
+ const relayUpdate = { url: result.url, checked_at: Date.now() }
+ if(result.connect.data === true)
+ relayUpdate.last_seen = Date.now()
+ await this.rdb.relay.get.one(result.url).url
+ await this.rdb.relay.patch(relayUpdate)
+ }
+
+ async on_failed(job, err){
+ this.log.error(`Websocket check failed for ${job.data.relay}: ${JSON.stringify(err)}`)
+ }
+}
\ No newline at end of file
diff --git a/packages/nocapd/src/managers/welcome.js b/packages/nocapd/src/managers/welcome.js
new file mode 100644
index 00000000..4e45e07b
--- /dev/null
+++ b/packages/nocapd/src/managers/welcome.js
@@ -0,0 +1,38 @@
+
+import { WorkerManager } from '../classes/WorkerManager.js'
+import { ParseSelect, RelayRecord } from '@nostrwatch/relaycache'
+const parseSelect = ParseSelect(RelayRecord, "Relay")
+
+export class WelcomeManager extends WorkerManager {
+ constructor(parent, rdb, config){
+ super(parent, rdb, config)
+ // this.id = 'welcome'
+ this.interval = 60*1000 //1m
+ this.expires = 60*1000 //24h
+ this.concurrency = 1
+ this.timeout = 10000
+ this.bindEvents = false
+ this.priority = 5
+ }
+
+ async work(job){
+ // console.log(job.data)
+ //welcomer doesn't produce any jobs.
+ console.warn(`[runner] Welcomer only produces jobs for other workers, so this should never fire!`)
+ }
+
+ async populator(){
+ const { Relay } = this.rdb.schemas
+ const select = parseSelect('url')
+ const relaysNew = [...this.rdb.$.select( select ).from( Relay ).where( { Relay: { last_checked: (v) => v < 0 } } )].flat()
+ this.log.info(`Found ${relaysNew.length} new welcome jobs`)
+ relaysNew.map(r=>r.url).forEach(relay => {
+ this.siblingKeys().forEach( async managerKey => await this.addJob( { relay }, managerKey ) )
+ })
+ }
+
+ async on_completed(job, result){
+ // console.log(result)
+ console.warn(`[on_completed] Welcomer only produces jobs for other workers, so this should never fire!`)
+ }
+}
\ No newline at end of file
diff --git a/packages/proxy/tor/Dockerfile b/packages/proxy/tor/Dockerfile
new file mode 100644
index 00000000..be92aafb
--- /dev/null
+++ b/packages/proxy/tor/Dockerfile
@@ -0,0 +1,65 @@
+FROM alpine
+
+# Install tor and privoxy
+RUN apk --no-cache --no-progress upgrade && \
+ apk --no-cache --no-progress add bash curl privoxy shadow tini tor tzdata&&\
+ file='/etc/privoxy/config' && \
+ sed -i 's|^\(accept-intercepted-requests\) .*|\1 1|' $file && \
+ sed -i '/^listen/s|127\.0\.0\.1||' $file && \
+ sed -i '/^listen.*::1/s|^|#|' $file && \
+ sed -i 's|^\(logfile\)|#\1|' $file && \
+ sed -i 's|^#\(log-messages\)|\1|' $file && \
+ sed -i 's|^#\(log-highlight-messages\)|\1|' $file && \
+ sed -i '/forward *localhost\//a forward-socks5t / 127.0.0.1:9050 .' $file&&\
+ sed -i '/^forward-socks5t \//a forward 172.16.*.*/ .' $file && \
+ sed -i '/^forward 172\.16\.\*\.\*\//a forward 172.17.*.*/ .' $file && \
+ sed -i '/^forward 172\.17\.\*\.\*\//a forward 172.18.*.*/ .' $file && \
+ sed -i '/^forward 172\.18\.\*\.\*\//a forward 172.19.*.*/ .' $file && \
+ sed -i '/^forward 172\.19\.\*\.\*\//a forward 172.20.*.*/ .' $file && \
+ sed -i '/^forward 172\.20\.\*\.\*\//a forward 172.21.*.*/ .' $file && \
+ sed -i '/^forward 172\.21\.\*\.\*\//a forward 172.22.*.*/ .' $file && \
+ sed -i '/^forward 172\.22\.\*\.\*\//a forward 172.23.*.*/ .' $file && \
+ sed -i '/^forward 172\.23\.\*\.\*\//a forward 172.24.*.*/ .' $file && \
+ sed -i '/^forward 172\.24\.\*\.\*\//a forward 172.25.*.*/ .' $file && \
+ sed -i '/^forward 172\.25\.\*\.\*\//a forward 172.26.*.*/ .' $file && \
+ sed -i '/^forward 172\.26\.\*\.\*\//a forward 172.27.*.*/ .' $file && \
+ sed -i '/^forward 172\.27\.\*\.\*\//a forward 172.28.*.*/ .' $file && \
+ sed -i '/^forward 172\.28\.\*\.\*\//a forward 172.29.*.*/ .' $file && \
+ sed -i '/^forward 172\.29\.\*\.\*\//a forward 172.30.*.*/ .' $file && \
+ sed -i '/^forward 172\.30\.\*\.\*\//a forward 172.31.*.*/ .' $file && \
+ sed -i '/^forward 172\.31\.\*\.\*\//a forward 10.*.*.*/ .' $file && \
+ sed -i '/^forward 10\.\*\.\*\.\*\//a forward 192.168.*.*/ .' $file && \
+ sed -i '/^forward 192\.168\.\*\.\*\//a forward 127.*.*.*/ .' $file && \
+ sed -i '/^forward 127\.\*\.\*\.\*\//a forward localhost/ .' $file && \
+ echo 'AutomapHostsOnResolve 1' >>/etc/tor/torrc && \
+ echo 'ControlPort 9051' >>/etc/tor/torrc && \
+ echo 'ControlSocket /etc/tor/run/control' >>/etc/tor/torrc && \
+ echo 'ControlSocketsGroupWritable 1' >>/etc/tor/torrc && \
+ echo 'CookieAuthentication 1' >>/etc/tor/torrc && \
+ echo 'CookieAuthFile /etc/tor/run/control.authcookie' >>/etc/tor/torrc && \
+ echo 'CookieAuthFileGroupReadable 1' >>/etc/tor/torrc && \
+ echo 'DNSPort 5353' >>/etc/tor/torrc && \
+ echo 'DataDirectory /var/lib/tor' >>/etc/tor/torrc && \
+ echo 'ExitPolicy reject *:*' >>/etc/tor/torrc && \
+ echo 'Log notice stderr' >>/etc/tor/torrc && \
+ echo 'RunAsDaemon 0' >>/etc/tor/torrc && \
+ echo 'SocksPort 0.0.0.0:9050 IsolateDestAddr' >>/etc/tor/torrc && \
+ echo 'TransPort 0.0.0.0:9040' >>/etc/tor/torrc && \
+ echo 'User tor' >>/etc/tor/torrc && \
+ echo 'VirtualAddrNetworkIPv4 10.192.0.0/10' >>/etc/tor/torrc && \
+ mkdir -p /etc/tor/run && \
+ chown -Rh tor. /var/lib/tor /etc/tor/run && \
+ chmod 0750 /etc/tor/run && \
+ rm -rf /tmp/*
+
+COPY torproxy.sh /usr/bin/
+
+EXPOSE 8118 9050 9051
+
+HEALTHCHECK --interval=60s --timeout=15s --start-period=20s \
+ CMD curl -sx localhost:8118 'https://check.torproject.org/' | \
+ grep -qm1 Congratulations
+
+VOLUME ["/etc/tor", "/var/lib/tor"]
+
+ENTRYPOINT ["/sbin/tini", "--", "/usr/bin/torproxy.sh"]
\ No newline at end of file
diff --git a/packages/proxy/tor/docker-compose.yaml b/packages/proxy/tor/docker-compose.yaml
new file mode 100644
index 00000000..c2c091d6
--- /dev/null
+++ b/packages/proxy/tor/docker-compose.yaml
@@ -0,0 +1,8 @@
+version: "3"
+services:
+ privoxy:
+ image: dperson/torproxy
+ restart: always
+ ports:
+ - '8118:8118'
+ - '9050:9050'
\ No newline at end of file
diff --git a/packages/proxy/tor/tor.log b/packages/proxy/tor/tor.log
new file mode 100644
index 00000000..8b137891
--- /dev/null
+++ b/packages/proxy/tor/tor.log
@@ -0,0 +1 @@
+
diff --git a/packages/proxy/tor/tor.sh b/packages/proxy/tor/tor.sh
new file mode 100755
index 00000000..329c60fa
--- /dev/null
+++ b/packages/proxy/tor/tor.sh
@@ -0,0 +1,79 @@
+#!/bin/bash
+
+# Proxy settings
+# proxy_address="184.170.248.5" # Replace with your proxy IP
+# proxy_port="4145" # Replace with your proxy port
+proxy_address="127.0.0.1" # Replace with your proxy IP
+proxy_port="9050"
+
+LOGFILE="./tor.log"
+
+file_path="websockets" # Replace with the path to your file
+
+echo "" > $LOGFILE
+
+# Function to extract address and port
+extract_address_port_protocol() {
+ local full_address=$1
+ local address port
+
+ # Remove the trailing slash from the address
+ address=$(echo $full_address | sed -e 's/\/$//')
+
+ # Check if a port is specified in the URL
+ if [[ $address =~ :([0-9]+)$ ]]; then
+ # Extract port from the address
+ port=${BASH_REMATCH[1]}
+ # Remove the port from the address
+ address=${address%:$port}
+ else
+ # Set default port based on the protocol
+ if [[ $address == *"wss://"* ]]; then
+ port=443
+ else
+ port=80
+ fi
+ fi
+
+ echo "$address $port"
+}
+
+if [[ ! -f $file_path ]]; then
+ echo "File not found: $file_path"
+ exit 1
+fi
+
+# Loop through each onion address in the file
+# Loop through each onion address in the file
+while IFS= read -r full_onion || [[ -n "$full_onion" ]]; do
+ # Extract address and port
+ read onion_url onion_port <<< $(extract_address_port_protocol "$full_onion")
+ echo "Attempting to connect to $onion_url on port $onion_port"
+
+ # Use websocat with timeout to attempt the connection and log the result
+ timeout 30 websocat --socks5 127.0.0.1:9050 -t "$onion_url" 2>&1 | tee -a ./websocket.log; echo "Connection attempt finished."
+
+ # Brief pause to allow for connection stabilization and to avoid rapid reconnection attempts
+ sleep 5
+done < "$file_path"
+
+# while IFS= read -r full_onion; do
+# # Extract address and port
+# read onion_url onion_port <<< $(extract_address_port_protocol "$full_onion")
+# echo "Attempting to connect to $onion_url on port $onion_port"
+
+# # Use websocat with timeout to attempt the connection
+# timeout 30 websocat --socks5 127.0.0.1:9050 -t "$onion_url" || true
+# exit_status=$?
+
+# if [ $exit_status -eq 0 ]; then
+# echo "$(date) - Successfully opened WebSocket connection to $onion_url" >> $LOGFILE
+# else
+# echo "$(date) - Failed to open WebSocket connection to $onion_url with exit status $exit_status" >> $LOGFILE
+# fi
+
+# # Brief pause to allow for connection stabilization and to avoid rapid reconnection attempts
+# sleep 5
+# done < "$file_path"
+
+echo "All connections attempted. Check ./tor.log for details."
\ No newline at end of file
diff --git a/packages/proxy/tor/websocket.log b/packages/proxy/tor/websocket.log
new file mode 100644
index 00000000..c9070a6e
--- /dev/null
+++ b/packages/proxy/tor/websocket.log
@@ -0,0 +1,177 @@
+Fri Dec 1 17:53:05 CET 2023 - Failed to open WebSocket connection to ws://bitcoinr6de5lkvx4tpwdmzrdfdpla5sya2afwpcabjup2xpi5dulbad.onion
+Fri Dec 1 17:53:36 CET 2023 - Failed to open WebSocket connection to ws://bitcoinr6de5lkvx4tpwdmzrdfdpla5sya2afwpcabjup2xpi5dulbad.onion
+Fri Dec 1 17:54:12 CET 2023 - Failed to open WebSocket connection to ws://bitcoinr6de5lkvx4tpwdmzrdfdpla5sya2afwpcabjup2xpi5dulbad.onion
+Fri Dec 1 17:55:47 CET 2023 - Failed to open WebSocket connection to ws://bitcoinr6de5lkvx4tpwdmzrdfdpla5sya2afwpcabjup2xpi5dulbad.onion with exit status 1
+Fri Dec 1 17:57:46 CET 2023 - WebSocket connection attempt to ws://bitcoinr6de5lkvx4tpwdmzrdfdpla5sya2afwpcabjup2xpi5dulbad.onion exited with status 0
+Fri Dec 1 17:59:58 CET 2023 - WebSocket connection attempt to ws://bitcoinr6de5lkvx4tpwdmzrdfdpla5sya2afwpcabjup2xpi5dulbad.onion exited with status 0
+websocat: SOCKS: host unreachable
+websocat: error running
+[WARN websocat::line_peer] Sending possibly incomplete line.
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
+["NOTICE","could not parse command"]
diff --git a/packages/proxy/tor/websockets b/packages/proxy/tor/websockets
new file mode 100644
index 00000000..0a886897
--- /dev/null
+++ b/packages/proxy/tor/websockets
@@ -0,0 +1,170 @@
+ws://bitcoinr6de5lkvx4tpwdmzrdfdpla5sya2afwpcabjup2xpi5dulbad.onion/
+ws://au4bcjeijqf2v45raa5hg36ifo3i2ezn62t5r264p6fhechy4vqx33qd.onion/
+ws://na3lu7fraf6olbz63arznnqcniaszdlm5x75ud5rzpza3o6q5phy6gid.onion/
+ws://vwy4pf523dmib4nny6l5slkghfx6yf3a2erfe4dygaa44hvyyhj4g2id.onion:4848/
+ws://wagvwfrdrikrqzp7h3b5lwl6btyuttu7mqpeji35ljzq36ovzgjhsfqd.onion/
+wss://zosypufrikjkm4ommyfi3pkwerpvfdvjg5nhczvykpwero2ridd7rnyd.onion/
+ws://rl4o6gi74z6kvgwhnnbjnvjnap25tchh2ragrrkqqlukuyecfamg2did.onion/
+ws://vy2klfczk2tca4pkgdrvqrt4kmlvcnqz6cmwszrohb2yl2bk3xhgztyd.onion/
+ws://cuqjqfuqxqk4ktcpbi57fihcp3qbrbno4ztnfxxm6fquuzhkni6tsvyd.onion/
+ws://q3zaylwjjhq77yzx34lbydz26szzjljberwetkjgxgsapcekrpjzsmqd.onion/
+wss://oi3vzygmuyvf2wbls2eqagflcdkp5hkw2orowakvveeunqbvwrw2jaad.onion/
+ws://rtdkv5ff4qxxh6ipobcgoekhqcpuouvnqbefwhfwx7yutnyfznticdad.onion/
+ws://l3ioni3gotlxqvbbrkbbhdid2xjrkcifieqt6shv5rjkjujwyxdwwkyd.onion/
+wss://5laki7ronqaju5yrck73big7mf7q3bctqlyzpuw7qsjqhl7ynisltqad.onion/
+wss://oszxnul6sljymfoykzfwuhegzfi57vqlvgowxb3zc4773wcpp322cpid.onion/
+ws://oarnx6xdrq5mygfdrbmzsvh3is3holefpz2x4qwbopwcicwd63gcivid.onion/
+wss://izj3isbk3pmade74ontdijodhehsytnw2iokdhh6k3flk4mq2pau6sid.onion/
+wss://7ab7qqbj2dw3pjnkoskgsfn4ikqc7orwnkpmcfjmeobw63kf4zgykjid.onion/
+ws://5dzvuefllevkhk7miqynaviguedxfnofrayu2xwfwtlkdg4radjdlyqd.onion/
+ws://mzpvksiq2j4ac5ug7oh3xtmztrdsrtigt3mcrmvjqcsw6rxcntmhu6ad.onion/
+ws://oif67vbszaijw742iueskyhs46dnnfcyjk6eskotfynkkswvzgcevzid.onion/
+ws://mcja3k7s7godsfe6n5cji5tvh3nbhacbb2kl7j5sisias4s3uls5cjid.onion/
+ws://e45sscni6e6y64bmla3yznokekcco2jfldvh5ptkbd3ljqqj23zb5hid.onion/
+wss://fnykm3jhukof4m3hc4lsdzcju2raiwekjfxvk6v6wnw32h6oheerwrad.onion/
+ws://dk62d2acqzfhya224lqzm4nknkathamoimjdmgm3ndbrkrocwej2kuyd.onion/
+ws://vavursybkbgfyow7nnst5jnqsj2xyteusf3zeerbjdizq6y7h25v4syd.onion:5050/
+wss://w2do75kvolcihgcsad2ftpswl2z2e6eckrvzeajyfrzpzdr7rc37x4yd.onion/nostrrelay/cyberpunkhardware
+ws://btcys3jbk3ao3d5pevhttjfmx5g3chvmdf6c4gyqxdaqvh74erqdchad.onion/
+ws://fnykm3jhukof4m3hc4lsdzcju2raiwekjfxvk6v6wnw32h6oheerwrad.onion/
+ws://ilmsol5ri2uc2i5zmtj4gafsezrey5u6kysk7byafc3txewer5p6u2ad.onion/
+ws://bp3ga2unzykgfxajixhwqwm57r2uqovzv5ok5hmubcaozyhy5mxhpcad.onion/
+wss://5dzvuefllevkhk7miqynaviguedxfnofrayu2xwfwtlkdg4radjdlyqd.onion/
+ws://h463uge4enisyfcgitx34mc6lmymkob5fpyh4u34nbsaqmki2ffkroad.onion/
+wss://n3k6eji4sqcwpsb6wzsqkwtapvis3buoiagbh2jzpc6h6mziblu7dnqd.onion/
+ws://7si6co27cvaw5yjyx6asvxfmaw5ah2arywwgrem4y5svi5ntskoeb5id.onion/
+wss://skzzn6cimfdv5e2phjc4yr5v7ikbxtn5f7dkwn5c7v47tduzlbosqmqd.onion/
+wss://2g2jzcfgq5lcrceuq23lmya2drm3ku5qmqimr3bvu3amol55vidctrad.onion/
+ws://h3avzcsyhvpi2mtmh5d3yqhz65cwsmbdlud37hk5gbnxi2gkajwoczqd.onion/
+wss://ypmwkd7mqlcxt3dg4ersmenoiqkcn3jyv6gnwuzrtiweg2zhjovojhqd.onion/
+ws://vo3m7gv2xsjwzmby6aox5rgunwzkhzqbid57dgc5x6pf7rliemesbuid.onion/
+wss://ecbsla2ad6bukfuprhp6fsl4nmk73zikyeveumuhfo37qwcbbldvgkid.onion/nostrrelay/bookit.relay
+wss://iq2nbjtlihqbvskgx37rq6kwxkz42jxe4q5ulfewh3zkxgywflbvo6yd.onion:4848/
+wss://o6ga6lxnax2z7pgkkenifollohkrv55r36mdzdtofi7d5yyif2f4o5yd.onion:5050/
+wss://o6ga6lxnax2z7pgkkenifollohkrv55r36mdzdtofi7d5yyif2f4o5yd.onion:5051/
+wss://btcqspp5dl4rlgl5pomcyv3odfeki7a5zrmjoekyu5vsoqz5bth4e7yd.onion/
+ws://fnxwipsg3lfzij64lvjgmutvkkpd7eo2mr2khxkofyywf3vsvbk73jad.onion/
+wss://zseqg6ldkdvumzr73jkjfsrozej47b6jmy7kcmuhd7e7u23ud2kh2zad.onion/nostrrelay/ipsd5w4x
+ws://niqpwkaxsdfw5cnkxgek76qncrvzmfluqhbaehi4hnetywbywykesvid.onion/
+ws://y7czvmhzcsmlglpsg4c2tyrlxf6o26ri4bqsd3pfpkyzl64inxvaqead.onion/
+ws://z4mf4irie7bo3l6c24lqu5stvgm5mfl5vuqjdc6mfhmddzzvcutvaqid.onion/
+ws://7fvtb63nbejaiuacmvllb3qatmkydtjxlxkvi3z7zsjoijcinyc3b6id.onion/
+ws://7jfgu2smh6lvw2wfixartnjcvfgfk47m5elv2y4oimkpizkrn5saz3ad.onion/
+ws://fihkwz47fairbjpuatg3hrfbsdrq35327idadcdr34ufvzmpdq2ib4qd.onion/
+ws://jqyfgvquh7n23zwdvargvjfdcshlamgjdqoyayha67wqzkfybl3lkyqd.onion/
+wss://xodpdwcomyfwnhnwja674roe4gcuov5psdyrrkexe4ay7qizn2xtkpid.onion/
+ws://rrvgi3fsztko2kfjyabd4celdayxljf3xrs2x6m2j5oma5tsxwymwhqd.onion/
+wss://2pbkpndvpeebljfvjew6auq63lndzszqnntct5aqfmazslerzxe75kad.onion/
+ws://cmuqxreb6oma3x3iq3zh62im7w4266uo3txac6e75qtlfqs7scwqyqyd.onion/
+wss://7otrzettas4b6cw6kdp7w2gjpoapkt6qgbkwq7elguw4ujvoj2ueagad.onion/
+ws://z2qr7h3532kpbhb2y5g4odwsbm3ydvk6dee3bx2bym4ome7n5y6phtid.onion/
+ws://ljjrqq6frnm56oeowjzqfzfniwqqpohyg4l6adawekgh4vligfcg2lyd.onion/
+ws://fre3yqrltwefuiu32hc573qhteu55j7rdfubzaqkqyoawconz62v76id.onion/
+wss://dgi4mb7antpcmrx4rynm6xq52xzt5duvxa4iwucq4mszgpz6smrjajqd.onion/
+wss://cmuqxreb6oma3x3iq3zh62im7w4266uo3txac6e75qtlfqs7scwqyqyd.onion/
+ws://2pbkpndvpeebljfvjew6auq63lndzszqnntct5aqfmazslerzxe75kad.onion/
+ws://n3k6eji4sqcwpsb6wzsqkwtapvis3buoiagbh2jzpc6h6mziblu7dnqd.onion/
+wss://iq2nbjtlihqbvskgx37rq6kwxkz42jxe4q5ulfewh3zkxgywflbvo6yd.onion/
+ws://6amyhf3sjvgxe5qzbx4xn52pcnqresdmi7szxurp6umkvz6mthxjdcad.onion/
+wss://7nj3e5opqjcpyaoxi56c5yvgzc3b3dmubbk4c6nrdq5lnzn64g5jwuad.onion/
+ws://jdqldw6hffnjdymzo5bikc75xuc7kg5zex23yuhczyacqcth5hxw2sid.onion/
+ws://aymg4xipavyuiyfnzm74xrd64aagylbjfmt64z246x25zawl622dp3ad.onion/
+wss://dnppj4kopczovvzvpzmihv2iwe5wt3gbrxjnltjc2zdjpttrdz4owpad.onion/
+wss://7raqve3vlhisvgihvlihwehmj4jt5r3ue2jf5vwwwrmelxmoygpdusid.onion/
+ws://2gs647qsmbui4ipmcivlkrbuhyx53apnhiy2tsedwgnqx2lgkjruujad.onion/
+ws://2g2jzcfgq5lcrceuq23lmya2drm3ku5qmqimr3bvu3amol55vidctrad.onion/
+wss://lsnmrd36ktdf34iwt2yt26to37fe5veibwkfcdsjwd7be3lezk6u7rid.onion/
+ws://fkoylst3427fhppg5exfqgcncfl77pacc4whox3w3fxcp5gaxexv46yd.onion/
+ws://i5vwqx2m4ofabmjo2oedqqcxh2sxyr7eaqhw44xp4dsmbuio2tkcfxqd.onion/
+wss://d463rbo7dgbfuxvvxpory2og2etl4gttfzmqcixdq7rpts47lpgolkyd.onion/
+wss://lufi62up3tn2khr4tmbw5cltyg5qmeen6ydgw4blu75zklrqzootbmad.onion/
+wss://3gkpphcfwb6w5iq6axnmlbvr7pz2t37uy4ofocyijzttzrbz4jy43fid.onion/
+ws://b6ctguuizhc24ufke7qnci7mmjyqiweobxckuhdxbd7kei4jlhilzjyd.onion/
+ws://jz2l2bf6f6wssdqwkg7ogthkc5i3ymyiwkaz3tbhff6ro3h3zqddekyd.onion/
+ws://ng4jk6yiqgfczo4wyxszuj7w6jok3fptehu533o3mlzs3vph3dvjfdid.onion/
+wss://eilztymyqseral57oyise5ai4jhrrwntet62nzdw2bxri25x2atmjpad.onion/
+wss://jgqaglhautb4k6e6i2g34jakxiemqp6z4wynlirltuukgkft2xuglmqd.onion/
+ws://rc3wsp47gcshm27jwcx4ruuu6sbw4jdykqjkw37l5bp3nr2g5zmx7tad.onion/
+wss://bitcoinr6de5lkvx4tpwdmzrdfdpla5sya2afwpcabjup2xpi5dulbad.onion/
+wss://w2do75kvolcihgcsad2ftpswl2z2e6eckrvzeajyfrzpzdr7rc37x4yd.onion/nostrrelay/cyberpunkhardwarepublic
+ws://wcl2meyp236fa3dmfzfyq6aacbdoixrlocb6zozjs6xklxizschj2did.onion/
+ws://ggnqqbsrrvl3s3rbo2zmdgdrezrwiws5y5iwnpgnucw27yjb2vccriid.onion/
+ws://d463rbo7dgbfuxvvxpory2og2etl4gttfzmqcixdq7rpts47lpgolkyd.onion/
+ws://inbwt4hlpkosan2tp5udt5m3ihdi6l3g6gcfokxc5ayxv4jdkbegm4id.onion/
+ws://5fjh7va2fdzzjzbrm7crih7l34dvcvkrp2j46puiw4etared3evwq5ad.onion/
+wss://hrxqtouyp7j3yqg4hsfde2hmjtkxltmyu64y3suk3dttcwjkdbapc4qd.onion:5051/
+ws://fnqdhz3df33da6wxg7jskvumd5rjn3nknln6ecun7uwwysc7vkwkjgid.onion/
+wss://ngkdank5vxgnpthkm2fx2syyekp632bvqzlb5dtqlwbkdxodfgtgkayd.onion/
+ws://izj3isbk3pmade74ontdijodhehsytnw2iokdhh6k3flk4mq2pau6sid.onion/
+ws://7nj3e5opqjcpyaoxi56c5yvgzc3b3dmubbk4c6nrdq5lnzn64g5jwuad.onion/
+ws://btcqspp5dl4rlgl5pomcyv3odfeki7a5zrmjoekyu5vsoqz5bth4e7yd.onion/
+wss://vavursybkbgfyow7nnst5jnqsj2xyteusf3zeerbjdizq6y7h25v4syd.onion:5051/
+wss://m275iixwsnldfh7govpgoimd4djkoy4lh6pvmodqy7ryilebc3qo4yid.onion/nostrclient/api/v1/relay
+ws://4ynamw47z4fiyusrqo75q6wzncyqni3ut6io5qm5sze5nblhgxlclgid.onion/
+ws://3vp46rj4ahp5ghcng3ukjoewi76c7eerg5hve3rj7ekbj5dbmv3mcaqd.onion/
+wss://ng4jk6yiqgfczo4wyxszuj7w6jok3fptehu533o3mlzs3vph3dvjfdid.onion/
+wss://6phpk6vlfctkqxmei2zou6zxaaa5ccbt2zqfbt75cxbfsfwgnqlmi7qd.onion/
+ws://esftusen4dwdc5avmoujgkv6s3l4ir54d7mtathnwxlswdbg4gbrs4qd.onion/
+wss://kpa4k6acxzjv2m2p72keftbpaymwpq2h67jqnin3d4y3djxyheuifoqd.onion/
+ws://ufyirz4f5sn5gbsx4mfabooievm3lxcx6fnsh4udzx7silws6kbpypid.onion/
+ws://mnebodcczpct6xz3jd56a2kurv265bu3jpdvzclqj3thpozhaox7rhid.onion/
+ws://lsnmrd36ktdf34iwt2yt26to37fe5veibwkfcdsjwd7be3lezk6u7rid.onion/
+ws://ccdt5ch6lg66i7gsc6yuu5de2uumemqdhqfj4ys5tmipuczs4r5xe2ad.onion/
+wss://d6egak3woofrixu26gr3utb5qezhkktavsuwlrfqaauu55lmpudxudqd.onion:5051/
+ws://53snncs7vegargpaardbxjnii2oan3xpmbeaf6czwoqa2axz5mvbsjid.onion/
+wss://4rcmjk2q45msnlbiipqa5nkinknu37qyczmngfvfk763yunabpsykmqd.onion/
+ws://7jdx65mkmffhghgihv26wuan6t2rl3u7ua65jgbtuvva226aeqhvqoad.onion/
+ws://oxtrdevav64z64yb7x6rjg4ntzqjhedm5b5zjqulugknhzr46ny2qbad.onion/
+wss://rwuyq3f3cvazsurkplwuzh3nvkeet7mkwenjkyr5erydr2fajxjresyd.onion:2121/
+ws://xaq4u4zuvmlgkdfjpkljli5lvecyobluu2565y772mtfxv3edwbk7eqd.onion/
+wss://mnjwoopwhao3yq72jttwafuxfhcrgmu7dbld33wfae3hou54e2dey3ad.onion/
+wss://yttmdhxezyfu2mjzbtynxw3gpjpg5qql6aazphiajlz24zd7omwr5dqd.onion:5051/
+ws://f4xyivffubak4tlyg3jmnd2lumrq4cxgwr2ce3xyln4ldfxtypnqhkid.onion/
+ws://uec2cmjauzufrtlq6wq6l2ujfncdvo3suezz423gsvz5xvhehm2mcgid.onion/
+ws://oszxnul6sljymfoykzfwuhegzfi57vqlvgowxb3zc4773wcpp322cpid.onion/
+ws://gbpcbjx2scm6i7i46djgdjibnxomhme65iqfuwt7nqum5ye2c4m7y2id.onion/
+wss://rbeovcraoafmtoungwtclo7pj5oalvkactksdlmtipt2mjmw4zzhvtyd.onion/
+ws://nlztwzceccoo3j7g4gqxl5ebf762aiqo7bepsxqr55wqecka4udi6xqd.onion/
+wss://63ragcfwb5xhoe5gfflazfyrde3qjdo73cblhhmnbviizowdo2q5haid.onion:5051/
+ws://ebnqcyniiqlcnod4ml4q2rgyflcndgazo77biszf2kperxxgnwdm27id.onion/
+ws://rwpmex54eltx5vpv63fbbjrk46sy7sumwowuxks4o66re4act6gygsid.onion/
+ws://uizdc3nrqznwngo5dsc6a5wvblpp2iksnsqjrvlrykijg27ixzssedad.onion/
+ws://thohd2q34agprpzybcmgrj7aky5tapuwkinj3zbiowe6k5asnnas7bad.onion/
+wss://ib3kefulyelfjvvw6ooxj4gkbdkgt7f4ovb4mukhs2qxuaychpbayjid.onion/
+wss://gx25hcmidmt522ofm6wdmbhzg6i4p5ygp4ovyxbgsql4ylrwjweibyyd.onion/
+wss://xbcpao7bl7ucgkcq2qicu4vhyormo3i3qiavrjpuio3ffxdyerbmopyd.onion:5051/
+ws://ehpe44o7vcsew7bigqcalpsmxz7j6xa67ctnxywz5is422jqtqax5oad.onion/
+ws://xjpv5c4sh2vtqy63ywvukyjajdn26pms6robbuy57iyyn557ibfwafid.onion/
+ws://6phpk6vlfctkqxmei2zou6zxaaa5ccbt2zqfbt75cxbfsfwgnqlmi7qd.onion/
+ws://2jsnlhfnelig5acq6iacydmzdbdmg7xwunm4xl6qwbvzacw4lwrjmlyd.onion/
+ws://6vnnhyxo2pkayko7pbn5njadaslozibrvctm6fxgusjn5oxpgjo747ad.onion/
+ws://eilztymyqseral57oyise5ai4jhrrwntet62nzdw2bxri25x2atmjpad.onion/
+ws://5lxvoghtsjhmj5qntlpk6bxyyoxpwapf6xl4lztde7dm34lujuznenyd.onion/
+ws://jgqaglhautb4k6e6i2g34jakxiemqp6z4wynlirltuukgkft2xuglmqd.onion/
+ws://lproxnb5xpeomugwmbhxywrihu46cimm2ikutcuvzkg4d7gvw5nn7sqd.onion/
+ws://4rcmjk2q45msnlbiipqa5nkinknu37qyczmngfvfk763yunabpsykmqd.onion/
+wss://lchdldwpnkzugmipaxkzo5dazeblieegp5txcueryn7eqyjbpawaprid.onion/
+ws://if4nmnw5vasc4yxw557zqxacmc3dc5z43zv6uzhcjzgrgr777onnzxyd.onion/
+ws://jcx4nuimw4q6lbgiadj65qjtvoikc2cgf3vxjjp5om7eaz62t2cdtlyd.onion/
+ws://amfenmirzxbvq5zy726sg2tdmoyc4i7tabzkh3lobhjn7vfiyvmcw3id.onion/
+ws://qusvtpzrxszpneissbvu3n43cvsr6jil3qc6igtd7mufdckuz7bkkyad.onion/
+ws://lufi62up3tn2khr4tmbw5cltyg5qmeen6ydgw4blu75zklrqzootbmad.onion/
+wss://u5epuanp2fbie4phw6zekzna6zotvsffji4td4ee7iwgwdxlz4kwqqad.onion:5051/
+ws://orallvr4ngnokmvowuyz52azezotrj52m3563vsaohhwrbmb7vkyqbqd.onion/
+wss://fnqdhz3df33da6wxg7jskvumd5rjn3nknln6ecun7uwwysc7vkwkjgid.onion/
+ws://4765b6q6tcglrc7hxfpk63tftwa52d43zxszal3eolqd6qyfuztup4yd.onion/
+ws://moauxczxwvggzqhtgjjyjqzmdmnwoxaa7mmgk5veljylj6yev4lqddad.onion/
+ws://vxlw4rlg7go34ol43g4gxbvfu4txdzjauquvnbptzwjflezs3vik55id.onion/
+ws://lpxyewdhgf4p26yjxl65e4in45q5qrtvyfn5bxgekl5gbytgdjjtd6yd.onion/
+wss://2jsnlhfnelig5acq6iacydmzdbdmg7xwunm4xl6qwbvzacw4lwrjmlyd.onion/
+ws://6gygg7t2ngtl6lvipwgbjwledrr6hnfuhufgijct3v425ba32n2scbid.onion/
+wss://oxtfxsmduqihxdzpti3bxzmudcsqm3tbrkxuh3lagnxhzxc364lwzhid.onion/
+ws://5begxlssgzlquewmbrvhdu7efzwqoomlgt6pymftdtbdiy5ypnjizjyd.onion/
+wss://5fjh7va2fdzzjzbrm7crih7l34dvcvkrp2j46puiw4etared3evwq5ad.onion/
+ws://tf2yfiqfzc2ybazi7ugmtub25gzjn3le7fetbajnrhdxtmecr2fqm7ad.onion/
+ws://lzybsdznklr56a5qnvj6qy45xx7qeh73ckv6yrww5ehxofinqn7kydqd.onion/
+ws://gqn3cjgqs5rwg6ib4z7bnbdlxczucod6p2yxmjslr2s2euqtguikk5yd.onion/
+ws://2b2bd5zc6lxzc4s3ufjgaw5zibexghmerwuh3ikqcwmvfjpzw5r5m6id.onion/
+ws://f5g46kofzifr32tgdr4m7otehkvsxefxh74uxci6izgj2kuvmg2t3yyd.onion/
+ws://3gkpphcfwb6w5iq6axnmlbvr7pz2t37uy4ofocyijzttzrbz4jy43fid.onion/
+wss://iyk7ziux7hilpybtb5dwd33bokzxmjeaxe7zsdwtywjmw7gqwi4z6eyd.onion/
\ No newline at end of file
diff --git a/packages/publisher/index.js b/packages/publisher/index.js
new file mode 100644
index 00000000..02d658bb
--- /dev/null
+++ b/packages/publisher/index.js
@@ -0,0 +1,11 @@
+export { Publisher } from './src/Publisher.js'
+export { Kind30066 } from './src/kinds/Kind30066.js'
+export { Kind10066 } from './src/kinds/Kind10066.js'
+
+import { Kind30066 } from './src/kinds/Kind30066.js'
+import { Kind10066 } from './src/kinds/Kind10066.js'
+
+export default {
+ Kind30066,
+ Kind10066
+}
\ No newline at end of file
diff --git a/packages/publisher/package.json b/packages/publisher/package.json
new file mode 100644
index 00000000..27f595bf
--- /dev/null
+++ b/packages/publisher/package.json
@@ -0,0 +1,13 @@
+{
+ "name": "@nostrwatch/publisher",
+ "version": "0.0.1",
+ "type": "module",
+ "description": "Library for publishing nostr.watch relay status and publisher registration events",
+ "main": "index.js",
+ "repository": "http://github.com/sandwichfarm/nostr-watch",
+ "author": "sandwich ",
+ "license": "MIT",
+ "dependencies": {
+ "nostr-tools": "1.17.0"
+ }
+}
diff --git a/packages/publisher/src/Publisher.js b/packages/publisher/src/Publisher.js
new file mode 100644
index 00000000..f88b09d2
--- /dev/null
+++ b/packages/publisher/src/Publisher.js
@@ -0,0 +1,95 @@
+import { validateEvent, verifySignature, getSignature, getEventHash, SimplePool } from 'nostr-tools'
+import Logger from '@nostrwatch/logger'
+import { loadConfig, chunkArray } from '@nostrwatch/utils'
+
+const config = await loadConfig()
+
+export class Publisher {
+
+ constructor(){
+ this.logger = new Logger('publisher')
+ }
+
+ async many(relays){
+ this.logger.debug(`many(): attempting to publish ${relays.length} events to ${JSON.stringify(config.publisher.to_relays)} relays`)
+ if(!(relays instanceof Array)) throw new Error('many(): relays must be an array')
+ const relaysChunks = chunkArray(relays, 50)
+ let count = 0
+ for await ( const chunk of relaysChunks ) {
+ let signedEvents = []
+ this.logger.info(`publishEvents(): publishing ${chunk.length} events from chunk ${count++}/${relaysChunks.length}`)
+ for ( const relay of chunk ) {
+ const unsignedEvent = this.generateEvent(relay)
+ signedEvents.push(this.signEvent(unsignedEvent))
+ }
+ await this.publishEvents(signedEvents)
+ }
+ }
+
+ async one(relay){
+ if(!relay?.url) throw new Error('one(): relay must have a url property')
+ if(!config.publisher?.to_relays) throw new Error('one(): config.publisher.to_relays is not configured')
+ this.logger.debug(`one(): attempting to publish event for relay ${relay.url} to ${JSON.stringify(config.publisher?.to_relays)} relays`)
+ const unsignedEvent = this.generateEvent(relay)
+ const signedEvent = this.signEvent(unsignedEvent)
+ await this.publishEvent(signedEvent)
+ this.logger.debug(`one(): published event`)
+ }
+
+ tpl(kind=1){
+ return {
+ pubkey: process.env.DAEMON_PUBKEY,
+ kind,
+ content: "",
+ tags: [],
+ created_at: Math.round(Date.now()/1000)
+ }
+ }
+
+ generateEvent(){
+ this.logger.warn('generateEvent(): has not been implemented by subclass, generating a blank event')
+ return this.tpl(30066)
+ }
+
+ generateEvents(relays){
+ const unsignedEvents = []
+ relays.forEach( relay => {
+ unsignedEvents.push(this.generateEvent(relay))
+ })
+ return unsignedEvents
+ }
+
+ signEvent(unsignedEvent){
+ const signedEvent = unsignedEvent
+ signedEvent.id = getEventHash(signedEvent)
+ signedEvent.sig = getSignature(signedEvent, process.env.DAEMON_PRIVKEY)
+ const valid = validateEvent(signedEvent) && verifySignature(signedEvent)
+ if(!valid)
+ throw new Error('generateEvent(): event does not validate')
+ // if(signedEvent.tags.filter( tag => tag[0]==='s' && tag[1]==='online' ).length > 0) console.log(signedEvent)
+ return signedEvent
+ }
+
+ signEvents(unsignedEvents){
+ const signedEvents = []
+ unsignedEvents.forEach( event => {
+ signedEvents.push(this.signEvent(event))
+ })
+ return signedEvents
+ }
+
+ async publishEvent(signedEvent){
+ const pool = new SimplePool();
+ const relays = config.publisher.to_relays
+ let pubs = pool.publish(relays, signedEvent)
+ return Promise.all( pubs )
+ }
+
+ async publishEvents(signedEvents){
+ let publishes = []
+ for await ( const signedEvent of signedEvents ) {
+ publishes.push( await this.publishEvent(signedEvent) )
+ }
+ return publishes
+ }
+}
\ No newline at end of file
diff --git a/packages/publisher/src/kinds/Kind10066.js b/packages/publisher/src/kinds/Kind10066.js
new file mode 100644
index 00000000..c4e1e2ad
--- /dev/null
+++ b/packages/publisher/src/kinds/Kind10066.js
@@ -0,0 +1,36 @@
+import { Publisher } from '../Publisher.js'
+
+export class Kind10066 extends Publisher {
+ constructor(){
+ super()
+ }
+
+ generateEvent(publisher){
+ const eventTpl = this.tpl(30066)
+ const tags = []
+
+ const { url, kinds, parameters, geo, } = publisher
+
+ if(kinds)
+ kinds.forEach( kind => tags.push(['kind', kind]) )
+
+ if(url)
+ tags.push(['url', url, ])
+
+ if(parameters instanceof Object)
+ tags.push(['parameters', JSON.stringify(parameters)])
+
+ if(geo)
+ if(typeof geo === 'string')
+ tags.push(['g', geo])
+ else if(typeof geo === 'array')
+ geo.forEach( g => tags.push(['g', g]) )
+
+ const event = {
+ ...eventTpl,
+ tags
+ }
+
+ return event
+ }
+}
\ No newline at end of file
diff --git a/packages/publisher/src/kinds/Kind11166.js b/packages/publisher/src/kinds/Kind11166.js
new file mode 100644
index 00000000..5f9efdea
--- /dev/null
+++ b/packages/publisher/src/kinds/Kind11166.js
@@ -0,0 +1,7 @@
+import { Publisher } from '../Publisher.js'
+
+export class Kind11166 extends Publisher {
+ constructor(){
+ super()
+ }
+}
\ No newline at end of file
diff --git a/packages/publisher/src/kinds/Kind30066.js b/packages/publisher/src/kinds/Kind30066.js
new file mode 100644
index 00000000..e85d52cc
--- /dev/null
+++ b/packages/publisher/src/kinds/Kind30066.js
@@ -0,0 +1,39 @@
+import { Publisher } from '../Publisher.js'
+
+export class Kind30066 extends Publisher {
+ constructor(){
+ super()
+ }
+
+ generateEvent(relay){
+ if(!relay?.url)
+ throw new Error('generateEvent(): relay must have a url property')
+ const eventTpl = this.tpl(30066)
+ const tags = []
+ tags.push(['d', relay?.url])
+
+ if(relay?.online)
+ tags.push(['s', relay?.online? 'online' : 'offline'])
+
+ if(relay?.network)
+ tags.push(['n', relay?.network])
+
+ if(relay?.geo)
+ if(typeof relay?.geo === 'string')
+ tags.push(['g', relay?.geo])
+ else if(typeof relay?.geo === 'array')
+ relay?.geo.forEach( geo => tags.push(['g', geo]) )
+
+ if(relay?.attributes)
+ relay?.attributes.forEach( attribute => tags.push(['t', attribute]) )
+
+ if(relay?.retries)
+ tags.push(['retries', relay?.retries])
+
+ const event = {
+ ...eventTpl,
+ tags
+ }
+ return event
+ }
+}
\ No newline at end of file
diff --git a/packages/redis/bullboard.js b/packages/redis/bullboard.js
new file mode 100644
index 00000000..bc0192e4
--- /dev/null
+++ b/packages/redis/bullboard.js
@@ -0,0 +1,38 @@
+const { createBullBoard } = require('@bull-board/api');
+const { BullMQAdapter } = require('@bull-board/api/bullMQAdapter');
+const { FastifyAdapter } = require('@bull-board/fastify');
+const { Queue: QueueMQ, Worker } = require('bullmq');
+const fastify = require('fastify');
+
+const sleep = (t) => new Promise((resolve) => setTimeout(resolve, t * 1000));
+
+const redisOptions = {
+ port: 6379,
+ host: 'localhost',
+ password: '',
+ tls: false,
+};
+
+const queueMQ = new QueueMQ();
+
+const serverAdapter = new FastifyAdapter()
+
+createBullBoard({
+ queues: [new BullMQAdapter(queueMQ)],
+ serverAdapter,
+ options: {
+ uiConfig: {
+ boardTitle: 'My BOARD',
+ boardLogo: {
+ path: 'https://cdn.my-domain.com/logo.png',
+ width: '100px',
+ height: 200,
+ },
+ miscLinks: [{text: 'Logout', url: '/logout'}],
+ favIcon: {
+ default: 'static/images/logo.svg',
+ alternative: 'static/favicon-32x32.png',
+ },
+ },
+ },
+});
\ No newline at end of file
diff --git a/packages/relaydb.new/.gitignore b/packages/relaydb.new/.gitignore
new file mode 100644
index 00000000..39a8a19a
--- /dev/null
+++ b/packages/relaydb.new/.gitignore
@@ -0,0 +1,4 @@
+node_modules
+yarn.lock
+config/config.json
+.pg
\ No newline at end of file
diff --git a/packages/relaydb.new/Dockerfile b/packages/relaydb.new/Dockerfile
new file mode 100644
index 00000000..2491e0fd
--- /dev/null
+++ b/packages/relaydb.new/Dockerfile
@@ -0,0 +1,17 @@
+# Use the official Node.js 20 image
+FROM node:20
+
+# Set the working directory inside the container
+WORKDIR /usr/src/app
+
+# Copy package.json and package-lock.json to the container
+COPY package*.json ./
+
+# Install app dependencies
+RUN npm install
+
+# Copy the source code into the container
+COPY src/ ./src/
+
+# Specify the entrypoint for the application
+CMD ["node", "src/daemon.js"]
diff --git a/packages/relaydb.new/docker-compose.yaml b/packages/relaydb.new/docker-compose.yaml
new file mode 100644
index 00000000..a6c90a8d
--- /dev/null
+++ b/packages/relaydb.new/docker-compose.yaml
@@ -0,0 +1,19 @@
+version: '3.8'
+services:
+ relaydb:
+ image: postgres:latest
+ ports:
+ - "5432:5432"
+ env_file:
+ - .env
+ volumes:
+ - .pg:/var/lib/postgresql/data
+
+ # nodejs_app:
+ # build: . # Build the Node.js app using the Dockerfile in the current directory
+ # ports:
+ # - "3000:3000" # Map your app's port to a host port if needed
+ # depends_on:
+ # - relaydb # Ensure that the PostgreSQL container is started before the Node.js app
+ # env_file:
+ # - .env # You can specify environment variables here if needed
diff --git a/packages/relaydb.new/package.json b/packages/relaydb.new/package.json
new file mode 100644
index 00000000..100f74cf
--- /dev/null
+++ b/packages/relaydb.new/package.json
@@ -0,0 +1,18 @@
+{
+ "name": "@nostrwatch/relaydb",
+ "version": "0.0.1",
+ "main": "src/index.js",
+ "type": "module",
+ "scripts": {
+ "launch": "node src/daemon.js",
+ "test": "echo \"Error: no test specified\" && exit 1"
+ },
+ "dependencies": {
+ "pg": "8.11.3",
+ "sequelize": "6.35.0",
+ "sequelize-pg-utilities": "2.0.2"
+ },
+ "devDependencies": {
+ "sequelize-test-helpers": "1.4.3"
+ }
+}
diff --git a/packages/relaydb.new/src/associations/AssociateCheckMeta.js b/packages/relaydb.new/src/associations/AssociateCheckMeta.js
new file mode 100644
index 00000000..4ff9a21e
--- /dev/null
+++ b/packages/relaydb.new/src/associations/AssociateCheckMeta.js
@@ -0,0 +1,35 @@
+import CheckMeta from '../models/CheckMeta.js';
+import InfoMeta from '../models/MetaInfo.js';
+import DnsMeta from '../models/MetaDns.js';
+import GeoMeta from '../models/MetaGeo.js';
+import SslMeta from '../models/MetaSsl.js';
+
+const associateCheckMetaWith = (Checkable) => {
+ const alias = `checkable${Checkable.name}`;
+
+ CheckMeta.belongsTo(Checkable, {
+ foreignKey: 'meta_id',
+ constraints: false,
+ as: alias,
+ scope: {
+ meta_type: Checkable.name
+ }
+ });
+ Checkable.hasMany(CheckMeta, {
+ foreignKey: 'meta_id',
+ constraints: false,
+ scope: {
+ meta_type: Checkable.name
+ },
+ as: `${alias}Metas`
+ });
+}
+
+
+export default () => {
+ associateCheckMetaWith(InfoMeta);
+ associateCheckMetaWith(GeoMeta);
+ associateCheckMetaWith(DnsMeta);
+ associateCheckMetaWith(SslMeta);
+}
+
diff --git a/packages/relaydb.new/src/associations/AssociateRelay.js b/packages/relaydb.new/src/associations/AssociateRelay.js
new file mode 100644
index 00000000..02107c6b
--- /dev/null
+++ b/packages/relaydb.new/src/associations/AssociateRelay.js
@@ -0,0 +1,31 @@
+import { Relay, Relayip, Checkmeta, Checkstatus, Info, Dns, Geo, Ssl } from '../models/index.js';
+
+export default () => {
+ //Checks
+ Checkmeta.belongsTo(Relay, { foreignKey: 'relay_id' });
+ Relay.hasMany(Checkmeta, { foreignKey: 'relay_id' });
+
+ Relay.belongsTo(Relay, { foreignKey: 'parent_id' });
+ Relay.hasOne(Relay, { foreignKey: 'parent_id' });
+
+ Checkstatus.belongsTo(Relay, { foreignKey:'relay_id' });
+ Relay.hasMany(Checkstatus, { foreignKey:'relay_id' });
+
+ //Meta
+ Info.belongsTo(Relay, { foreignKey:'relay_id' });
+ Relay.hasMany(Info, { foreignKey:'relay_id' });
+
+ Dns.belongsTo(Relay, { foreignKey:'relay_id' });
+ Relay.hasMany(Dns, { foreignKey:'relay_id' });
+
+ Geo.belongsTo(Relay, { foreignKey:'relay_id' });
+ Relay.hasMany(Geo, { foreignKey:'relay_id' });
+
+ Ssl.belongsTo(Relay, { foreignKey:'relay_id' });
+ Relay.hasMany(Ssl, { foreignKey:'relay_id' });
+
+ // Join Tables
+ Relayip.belongsTo(Relay, { foreignKey: 'relay_id' });
+ Relay.hasMany(Relayip, { foreignKey: 'relay_id' });
+}
+
diff --git a/packages/relaydb.new/src/associations/index.js b/packages/relaydb.new/src/associations/index.js
new file mode 100644
index 00000000..97ec9e3d
--- /dev/null
+++ b/packages/relaydb.new/src/associations/index.js
@@ -0,0 +1,7 @@
+import AssociateCheckMeta from './AssociateCheckMeta.js'
+import AssociateRelay from './AssociateRelay.js'
+
+export default () => {
+ AssociateCheckMeta()
+ AssociateRelay()
+}
\ No newline at end of file
diff --git a/packages/relaydb.new/src/config/config.json b/packages/relaydb.new/src/config/config.json
new file mode 100644
index 00000000..eb1d894e
--- /dev/null
+++ b/packages/relaydb.new/src/config/config.json
@@ -0,0 +1,19 @@
+{
+ "development": {
+ "username": "postgres",
+ "password": "postgres",
+ "database": "postgres",
+ "dialect": "postgres",
+ "benchmark": false
+ },
+ "test": {
+ "username": "my-test-user",
+ "password": "my-test-password",
+ "database": "relaydb-test"
+ },
+ "production": {
+ "username": "nw-prod",
+ "password": "my-production-password",
+ "database": "relaydb-prod"
+ }
+}
\ No newline at end of file
diff --git a/packages/relaydb.new/src/config/config.sample.json b/packages/relaydb.new/src/config/config.sample.json
new file mode 100644
index 00000000..292a7fa0
--- /dev/null
+++ b/packages/relaydb.new/src/config/config.sample.json
@@ -0,0 +1,12 @@
+{
+ "development": {
+ "username": "nw-dev",
+ "password": "password",
+ "database": "relaydb-dev"
+ },
+ "test": {
+ "username": "nw-test",
+ "password": "password",
+ "database": "relaydb-test"
+ }
+}
\ No newline at end of file
diff --git a/packages/relaydb.new/src/connect/index.js b/packages/relaydb.new/src/connect/index.js
new file mode 100644
index 00000000..40f64cf6
--- /dev/null
+++ b/packages/relaydb.new/src/connect/index.js
@@ -0,0 +1,20 @@
+import dotenv from 'dotenv';
+import postgres from './postgres.js';
+
+dotenv.config();
+
+const dialect = process.env.DB_DIALECT || 'postgres';
+const dialects = {
+ postgres: postgres
+};
+
+const sequelize = dialects[dialect]()
+
+try {
+ await sequelize.authenticate();
+ console.log('Connection has been established successfully.');
+} catch (error) {
+ throw new Error('Unable to connect to the database:', error);
+}
+
+export default sequelize;
\ No newline at end of file
diff --git a/packages/relaydb.new/src/connect/postgres.js b/packages/relaydb.new/src/connect/postgres.js
new file mode 100644
index 00000000..e9dae583
--- /dev/null
+++ b/packages/relaydb.new/src/connect/postgres.js
@@ -0,0 +1,28 @@
+import Sequelize from 'sequelize';
+import { configure } from 'sequelize-pg-utilities'
+import config from '../config/config.json' assert {type: 'json'};
+
+const { name, user, password, options } = configure(config, "nostrwatch")
+
+console.log(name, user, password, options)
+
+const sequelize = () => new Sequelize(
+ name,
+ user,
+ password,
+ {
+ // host: process.env.POSTGRES_HOST,
+ // port: process.env.POSTGRES_PORT,
+ // dialect: 'postgres',
+ // logging: process.env.NODE_ENV === 'development',
+ // define: {
+ // charset: 'utf8',
+ // dialectOptions: {
+ // collate: 'utf8_general_ci'
+ // }
+ // },
+ ...options
+ }
+);
+
+export default sequelize;
\ No newline at end of file
diff --git a/packages/relaydb.new/src/daemon.js b/packages/relaydb.new/src/daemon.js
new file mode 100644
index 00000000..c5ce26e7
--- /dev/null
+++ b/packages/relaydb.new/src/daemon.js
@@ -0,0 +1,5 @@
+import run from "./daemon/index.js"
+
+const $worker = await run()
+
+// $worker.on('completed', job => console.log(job, 'completed'))
\ No newline at end of file
diff --git a/packages/relaydb.new/src/daemon/index.js b/packages/relaydb.new/src/daemon/index.js
new file mode 100644
index 00000000..9c984f41
--- /dev/null
+++ b/packages/relaydb.new/src/daemon/index.js
@@ -0,0 +1,41 @@
+import rdb from '../index.js'
+import { SyncQueue } from '@nostrwatch/controlflow'
+import { RedisConnectionDetails } from '@nostrwatch/utils'
+import { Relay } from '../models/index.js'
+
+const sync = SyncQueue();
+sync.$Queue.drain()
+
+await rdb.connect.sync()
+
+export const generateModulePath = (jobData) => {
+ const { action, condition, type, batch } = jobData
+ if(!action || !type) throw new Error("No action or type provided, absolutely necessary!!!")
+ const batchStr = batch? "batch-": ""
+ return `${process.env.PWD}/src/daemon/jobs/${type.toLowerCase()}-${batchStr.toLowerCase()}${action.toLowerCase()}-${condition.toLowerCase()}.js`
+}
+
+export default async () => {
+ const handlers = {}; // Use an object instead of an array
+ let result
+ const worker = async (job) => {
+ const { data } = job;
+ let { roundtrip, payload } = data;
+ try {
+ if (!handlers[data.type]){
+ const { default: work } = await import(generateModulePath(data));
+ handlers[data.type] = work
+ }
+ result = await handlers[data.type](payload);
+ }
+ catch(err) {
+ console.log(err);
+ }
+ if(result.length > 0) console.log(`${await Relay.count()} total`)
+ return {
+ roundtrip,
+ result
+ };
+ };
+ return new sync.Worker(sync.$Queue.name, worker, { concurrency: 1, connection: RedisConnectionDetails() });
+};
\ No newline at end of file
diff --git a/packages/relaydb.new/src/daemon/jobs/meta-create-ifnotexists.js b/packages/relaydb.new/src/daemon/jobs/meta-create-ifnotexists.js
new file mode 100644
index 00000000..b0ed1dd3
--- /dev/null
+++ b/packages/relaydb.new/src/daemon/jobs/meta-create-ifnotexists.js
@@ -0,0 +1,61 @@
+import hash from 'object-hash'
+
+import { sequelize } from '../connect/index.js'
+import { CheckMeta, Info, Dns, Geo, Ssl } from '../../models/index.js'
+// import { insertIfNotExists } from '../../helpers/index.js'
+import { BaseHelpers } from '../../helpers/RelayHelpers.js'
+
+import { relayId } from '@nostrwatch/utils'
+
+const route = (type) => {
+ switch(type){
+ case 'info': return Info
+ case 'dns': return Dns
+ case 'geo': return Geo
+ case 'ssl': return Ssl
+ }
+}
+
+export default async (payload) => {
+ if(!payload.url) return { "status": "error", "message": "Normalized relay url was not provided in job data." }
+ if(!payload.type) return { "status": "error", "message": "No `type` provided in job data. (info, dns, geo, ssl...)" }
+ if(!payload.data) return { "status": "error", "message": "No `data` provided in job data. (nocap result for respective type)" }
+
+ let { url, checked_by:publisher_id, data, status, message } = payload
+ let $meta, $checkmeta
+
+ const Model = route(data.type)
+ const ModelHelpers = BaseHelpers(Model)
+ const relay_id = relayId(url)
+ const id = hash(data)
+
+ const $txn = await sequelize.transaction();
+
+ try {
+ $meta = ModelHelpers.insertIfNotExists(Model, {
+ id,
+ relay_id,
+ data
+ })
+
+ if(!$meta) return []
+
+ $checkmeta = await CheckMeta.create({
+ relay_id,
+ publisher_id,
+ data,
+ status,
+ message,
+ meta_type: data.type,
+ meta_id: $meta.id,
+ });
+
+ $txn.commit();
+ }
+ catch(err){
+ await $txn.rollback();
+ return []
+ }
+
+ return [$meta.id, $checkmeta.id]
+}
\ No newline at end of file
diff --git a/packages/relaydb.new/src/daemon/jobs/relay-batch-create-ifnotexists.js b/packages/relaydb.new/src/daemon/jobs/relay-batch-create-ifnotexists.js
new file mode 100644
index 00000000..7885ec1e
--- /dev/null
+++ b/packages/relaydb.new/src/daemon/jobs/relay-batch-create-ifnotexists.js
@@ -0,0 +1,9 @@
+import { Relay } from '../../models/index.js'
+import { RelayHelpers } from '../../helpers/RelayHelpers.js'
+
+const $RelayHelpers = new RelayHelpers(Relay)
+
+export default async (records) => {
+ const $rows = await $RelayHelpers.batch.createIfNotExists(records)
+ return $rows.map($row => $row.id)
+}
\ No newline at end of file
diff --git a/packages/relaydb.new/src/daemon/jobs/relay-create-ifnotexists.js b/packages/relaydb.new/src/daemon/jobs/relay-create-ifnotexists.js
new file mode 100644
index 00000000..41734036
--- /dev/null
+++ b/packages/relaydb.new/src/daemon/jobs/relay-create-ifnotexists.js
@@ -0,0 +1,15 @@
+import { relayId } from '@nostrwatch/utils'
+
+import { Relay } from '../../models/index.js'
+import { RelayHelpers } from '../../helpers/RelayHelpers.js'
+
+const $RelayHelpers = new RelayHelpers(Relay)
+
+export default async (payload) => {
+ if(!payload.url) throw new Error(`No Relay URL provided in payload: ${JSON.stringify(payload)}`)
+
+ const { url, data } = payload
+
+ const $row = await $RelayHelpers.insertIfNotExists(Relay, { ...data, id: relayId(url) })
+ return [$row.id]
+}
\ No newline at end of file
diff --git a/packages/relaydb.new/src/daemon/jobs/relay-create.js b/packages/relaydb.new/src/daemon/jobs/relay-create.js
new file mode 100644
index 00000000..8e9af0eb
--- /dev/null
+++ b/packages/relaydb.new/src/daemon/jobs/relay-create.js
@@ -0,0 +1,9 @@
+import { relayId } from '@nostrwatch/utils'
+import { Relay } from '../models/index.js'
+
+export const addRelay = async (payload) => {
+ if(!payload.url) throw new Error("No Relay URL provided, absolutely necessary!!!")
+ let { url, data } = payload
+ const $row = await Relay.create({ ...data, id: relayId(url) })
+ return [$row.id]
+}s
\ No newline at end of file
diff --git a/packages/relaydb.new/src/daemon/jobs/status-create.js b/packages/relaydb.new/src/daemon/jobs/status-create.js
new file mode 100644
index 00000000..51a57037
--- /dev/null
+++ b/packages/relaydb.new/src/daemon/jobs/status-create.js
@@ -0,0 +1,55 @@
+import mapper from 'object-mapper'
+import { relayId } from '@nostrwatch/utils'
+import { sequelize } from '../../connect/index.js'
+import { CheckStatus, Relay } from '../../models/index.js'
+
+export const addRelay = async (record) => {
+ if(!record.url) throw new Error("No Relay URL provided, absolutely necessary!!!")
+
+ //status checks only need the data because websocket payloads are streamlined.
+ let { data } = payload
+
+ const $txn = await sequelize.transaction();
+
+ const map = {
+ 'url': 'relay_id',
+ 'checked_by': 'publisher_id',
+ 'connect.data': 'connect',
+ 'read.data': 'read',
+ 'write.data': 'write',
+ 'connect.duration': 'connectDuration',
+ 'read.duration': 'readDuration',
+ 'write.duration': 'writeDuration',
+ 'connect.duration + read.duration + write.duration': 'duration'
+ }
+
+ let recordMapped
+ recordMapped = mapper(data, map)
+ recordMapped.relay_id = relayId(recordMapped.relay_id)
+
+ let $status
+
+ try {
+ $status = await CheckStatus.create({ ...recordMapped })
+ const now = new Date()
+
+ const $relay = Relay.select( { where: { id: recordMapped.relay_id } })
+
+ let relayQuery = { last_checked: now }
+
+ if($relay.first_seen === null)
+ relayQuery = { ...relayQuery, first_seen: now }
+
+ if(recordMapped.connect)
+ relayQuery = { ...relayQuery, last_seen: now }
+
+ await Relay.update(relayQuery, { where: { id: recordMapped.relay_id } })
+
+ await $txn.commit();
+ }
+ catch(err){
+ await $txn.rollback();
+ }
+
+ return [$status.id]
+}
diff --git a/packages/relaydb.new/src/daemon/routines/find-and-relate-ips.js b/packages/relaydb.new/src/daemon/routines/find-and-relate-ips.js
new file mode 100644
index 00000000..e69de29b
diff --git a/packages/relaydb.new/src/daemon/routines/infer-relay-relationships.js b/packages/relaydb.new/src/daemon/routines/infer-relay-relationships.js
new file mode 100644
index 00000000..e69de29b
diff --git a/packages/relaydb.new/src/helpers/BaseHelpers.js b/packages/relaydb.new/src/helpers/BaseHelpers.js
new file mode 100644
index 00000000..a504a3f4
--- /dev/null
+++ b/packages/relaydb.new/src/helpers/BaseHelpers.js
@@ -0,0 +1,53 @@
+export class BaseHelpers {
+ constructor(model){
+ this.model = model
+ this.count = count(this)
+ this.batch = batch(this)
+ }
+
+ async createIfNotExists(records, options){
+ const existingRecord = await model.findOne({
+ where: { id: record.id }
+ });
+ if(existingRecord) return false
+ const insertedRecord = await model.create(record, {
+ ...options,
+ ignoreDuplicates: true,
+ returning: true,
+ });
+ return insertedRecord
+ }
+}
+
+const batch = function(self){
+ const { model } = self
+ return {
+ createIfNotExists: async function(records, options) {
+ let insertedRecords = []
+ try {
+ const ids = records.map(record => record.id);
+ const existingRecords = await model.findAll({
+ where: { id: ids }
+ });
+ const existingIds = new Set(existingRecords.map(record => record.id));
+ const newRecords = records.filter(record => !existingIds.has(record.id));
+ insertedRecords = await model.bulkCreate(newRecords, {
+ ...options,
+ ignoreDuplicates: true,
+ returning: true,
+ });
+ }
+ catch(e){ console.warn(e) }
+ return insertedRecords
+ }
+ }
+}
+
+const count = function(self){
+ const { model } = self
+ return {
+ all: async function() {
+ return model.count()
+ }
+ }
+}
\ No newline at end of file
diff --git a/packages/relaydb.new/src/helpers/RelayHelpers.js b/packages/relaydb.new/src/helpers/RelayHelpers.js
new file mode 100644
index 00000000..99f1294d
--- /dev/null
+++ b/packages/relaydb.new/src/helpers/RelayHelpers.js
@@ -0,0 +1,7 @@
+import { BaseHelpers } from './BaseHelpers.js'
+
+export class RelayHelpers extends BaseHelpers {
+ constructor(model){
+ super(model)
+ }
+}
\ No newline at end of file
diff --git a/packages/relaydb.new/src/helpers/index.js b/packages/relaydb.new/src/helpers/index.js
new file mode 100644
index 00000000..cb56c511
--- /dev/null
+++ b/packages/relaydb.new/src/helpers/index.js
@@ -0,0 +1,32 @@
+export const batchInsertIfNotExists = async (model, records, options={}) => {
+ const ids = records.map(record => record.id);
+ const existingRecords = await model.findAll({
+ where: { id: ids }
+ });
+ const existingIds = new Set(existingRecords.map(record => record.id));
+ const newRecords = records.filter(record => !existingIds.has(record.id));
+ const insertedRecords = await model.bulkCreate(newRecords, {
+ ...options,
+ ignoreDuplicates: true,
+ returning: true,
+ });
+ return insertedRecords
+}
+
+export const insertIfNotExists = async (model, record, options={}) => {
+ const existingRecord = await model.findOne({
+ where: { id: record.id }
+ });
+ if(existingRecord) return false
+ const insertedRecord = await model.create(record, {
+ ...options,
+ ignoreDuplicates: true,
+ returning: true,
+ });
+ return insertedRecord
+}
+
+export default {
+ batchInsertIfNotExists,
+ insertIfNotExists
+}
\ No newline at end of file
diff --git a/packages/relaydb.new/src/index.js b/packages/relaydb.new/src/index.js
new file mode 100644
index 00000000..bf8654b3
--- /dev/null
+++ b/packages/relaydb.new/src/index.js
@@ -0,0 +1,24 @@
+import sequelize from './connect/index.js'
+import models from './models/index.js'
+import helpers from './helpers/index.js'
+
+import initdb from './init.js'
+const initRes = await initdb(10)
+
+console.log('created new:', initRes.dbNew)
+console.log('message:', initRes.message)
+
+try {
+ await sequelize.sync();
+ console.log('Database synchronized.');
+} catch (e) {
+ console.error('Error during Sequelize synchronization:', e);
+ throw e;
+}
+
+export default {
+ sequelize,
+ connect: sequelize,
+ models,
+ helpers,
+}
\ No newline at end of file
diff --git a/packages/relaydb.new/src/init.js b/packages/relaydb.new/src/init.js
new file mode 100644
index 00000000..8c17736a
--- /dev/null
+++ b/packages/relaydb.new/src/init.js
@@ -0,0 +1,18 @@
+import { makeInitialiser } from 'sequelize-pg-utilities'
+import config from './config/config.json' assert {type: 'json'};
+
+const init = makeInitialiser(config)
+
+const start = async () => {
+ try {
+ const result = await initialise()
+ console.log(result.message)
+
+ // now do whatever else is needed to start your server
+ } catch (err) {
+ console.error('Could not start server', err)
+ process.exit(1)
+ }
+}
+
+export default init
\ No newline at end of file
diff --git a/packages/relaydb.new/src/migrations/.gitkeep b/packages/relaydb.new/src/migrations/.gitkeep
new file mode 100644
index 00000000..e69de29b
diff --git a/packages/relaydb.new/src/models/AppMeta.js b/packages/relaydb.new/src/models/AppMeta.js
new file mode 100644
index 00000000..f4d9e239
--- /dev/null
+++ b/packages/relaydb.new/src/models/AppMeta.js
@@ -0,0 +1,19 @@
+import { Model, DataTypes } from 'sequelize'
+import sequelize from '../connect/index.js'
+
+class AppMeta extends Model {}
+
+AppMeta.init({
+ key: {
+ type: DataTypes.INTEGER,
+ primaryKey: true
+ },
+ value: {
+ type: DataTypes.STRING
+ }
+}, {
+ sequelize,
+ modelName: 'AppMeta'
+});
+
+export default AppMeta
\ No newline at end of file
diff --git a/packages/relaydb.new/src/models/CheckMeta.js b/packages/relaydb.new/src/models/CheckMeta.js
new file mode 100644
index 00000000..badb0053
--- /dev/null
+++ b/packages/relaydb.new/src/models/CheckMeta.js
@@ -0,0 +1,50 @@
+import { Model, DataTypes } from 'sequelize';
+import { Relay, Publisher} from './index.js'
+import sequelize from '../connect/index.js'
+
+class Checkmeta extends Model {}
+
+Checkmeta.init({
+ id: {
+ type: DataTypes.INTEGER,
+ primaryKey: true,
+ autoIncrement: true
+ },
+ relay_id: {
+ type: DataTypes.STRING,
+ allowNull: false,
+ reference: {
+ model: "Relays",
+ key: 'id'
+ }
+ },
+ publisher_id: {
+ type: DataTypes.STRING,
+ allowNull: false,
+ reference: {
+ model: "Publishers",
+ key: 'id'
+ }
+ },
+ status: {
+ type: DataTypes.ENUM,
+ values: ['success', 'error', 'forward'],
+ },
+ message: {
+ type: DataTypes.STRING,
+ defaultValue: ''
+ },
+ meta_type: {
+ type: DataTypes.ENUM,
+ values: ['info', 'geo', 'dns', 'ssl'],
+ allowNull: true
+ },
+ meta_id: {
+ type: DataTypes.INTEGER,
+ allowNull: true
+ }
+}, {
+ sequelize: sequelize
+});
+
+export default Checkmeta;
diff --git a/packages/relaydb.new/src/models/CheckStatus.js b/packages/relaydb.new/src/models/CheckStatus.js
new file mode 100644
index 00000000..98292958
--- /dev/null
+++ b/packages/relaydb.new/src/models/CheckStatus.js
@@ -0,0 +1,60 @@
+import { Model, DataTypes } from 'sequelize';
+import sequelize from '../connect/index.js'
+
+class Checkstatus extends Model {}
+
+Checkstatus.init({
+ id: {
+ type: DataTypes.INTEGER,
+ primaryKey: true,
+ autoIncrement: true
+ },
+ relay_id: {
+ type: DataTypes.STRING,
+ allowNull: false,
+ reference: {
+ model: "Relays",
+ key: 'id'
+ }
+ },
+ publisher_id: {
+ type: DataTypes.STRING,
+ allowNull: false,
+ reference: {
+ model: "Publishers",
+ key: 'id'
+ }
+ },
+ connect: {
+ type: DataTypes.BOOLEAN,
+ defaultValue: false
+ },
+ read: {
+ type: DataTypes.BOOLEAN,
+ defaultValue: false
+ },
+ write: {
+ type: DataTypes.BOOLEAN,
+ defaultValue: false
+ },
+ connectDuration: {
+ type: DataTypes.INTEGER,
+ defaultValue: 0
+ },
+ readDuration: {
+ type: DataTypes.INTEGER,
+ defaultValue: 0
+ },
+ writeDuration: {
+ type: DataTypes.INTEGER,
+ defaultValue: 0
+ },
+ duration: {
+ type: DataTypes.INTEGER,
+ defaultValue: 0
+ }
+}, {
+ sequelize: sequelize
+});
+
+export default Checkstatus;
diff --git a/packages/relaydb.new/src/models/Ip.js b/packages/relaydb.new/src/models/Ip.js
new file mode 100644
index 00000000..a3e1690f
--- /dev/null
+++ b/packages/relaydb.new/src/models/Ip.js
@@ -0,0 +1,35 @@
+import { Model, DataTypes } from 'sequelize'
+import sequelize from '../connect/index.js'
+
+class Ip extends Model {}
+
+Ip.init({
+ id: {
+ type: DataTypes.INTEGER,
+ primaryKey: true,
+ unique: true
+ },
+ ip: {
+ type: DataTypes.STRING,
+ unique: true
+ },
+ type: {
+ type: DataTypes.ENUM,
+ values: ['ipv4', 'ipv6']
+ }
+}, {
+ sequelize,
+ modelName: 'Ip',
+ indexes: [
+ {
+ unique: true,
+ fields: ['ip']
+ },
+ {
+ unique: false,
+ fields: ['type']
+ }
+ ]
+});
+
+export default Ip
\ No newline at end of file
diff --git a/packages/relaydb.new/src/models/MetaDns.js b/packages/relaydb.new/src/models/MetaDns.js
new file mode 100644
index 00000000..e2838b07
--- /dev/null
+++ b/packages/relaydb.new/src/models/MetaDns.js
@@ -0,0 +1,34 @@
+import { Model, DataTypes } from 'sequelize'
+import sequelize from '../connect/index.js'
+
+class Dns extends Model {}
+
+Dns.init({
+ id: {
+ type: DataTypes.STRING, //hash
+ primaryKey: true
+ },
+ relay_id: {
+ type: DataTypes.STRING,
+ allowNull: false,
+ references: {
+ model: 'Relays',
+ key: 'id'
+ }
+ },
+ data: {
+ type: DataTypes.JSONB,
+ defaultValue: {}
+ },
+}, {
+ sequelize,
+ modelName: 'Dns',
+ indexes: [
+ {
+ fields: ['data'],
+ using: 'GIN'
+ }
+ ]
+});
+
+export default Dns
\ No newline at end of file
diff --git a/packages/relaydb.new/src/models/MetaGeo.js b/packages/relaydb.new/src/models/MetaGeo.js
new file mode 100644
index 00000000..8229cf0d
--- /dev/null
+++ b/packages/relaydb.new/src/models/MetaGeo.js
@@ -0,0 +1,34 @@
+import { Model, DataTypes } from 'sequelize'
+import sequelize from '../connect/index.js'
+
+class Geo extends Model {}
+
+Geo.init({
+ id: {
+ type: DataTypes.STRING, //geohash
+ primaryKey: true
+ },
+ relay_id: {
+ type: DataTypes.STRING,
+ allowNull: false,
+ references: {
+ model: 'Relays',
+ key: 'id'
+ }
+ },
+ data: {
+ type: DataTypes.JSONB,
+ defaultValue: {}
+ }
+}, {
+ sequelize,
+ modelName: 'Geo',
+ indexes: [
+ {
+ unique: true,
+ fields: ['id']
+ }
+ ]
+});
+
+export default Geo
\ No newline at end of file
diff --git a/packages/relaydb.new/src/models/MetaInfo.js b/packages/relaydb.new/src/models/MetaInfo.js
new file mode 100644
index 00000000..d2c7d77c
--- /dev/null
+++ b/packages/relaydb.new/src/models/MetaInfo.js
@@ -0,0 +1,45 @@
+import { Sequelize, Model, DataTypes } from 'sequelize'
+import sequelize from '../connect/index.js'
+
+class Info extends Model {}
+
+Info.init({
+ id: {
+ type: DataTypes.STRING, //hash
+ primaryKey: true
+ },
+ relay_id: {
+ type: DataTypes.STRING,
+ allowNull: false,
+ references: {
+ model: 'Relays',
+ key: 'id'
+ }
+ },
+ data: {
+ type: DataTypes.JSONB,
+ defaultValue: {}
+ },
+}, {
+ sequelize,
+ indexes: [
+ {
+ name: 'index_software_on_data',
+ fields: [Sequelize.literal("((data ->> 'software')::text)")]
+ },
+ {
+ name: 'index_version_on_data',
+ fields: [Sequelize.literal("((data ->> 'version')::text)")]
+ },
+ {
+ name: 'index_payment_required_on_data',
+ fields: [Sequelize.literal("((data -> 'limitation' ->> 'payment_required')::boolean)")]
+ },
+ {
+ fields: ['data'],
+ using: 'GIN'
+ }
+ ]
+});
+
+export default Info
\ No newline at end of file
diff --git a/packages/relaydb.new/src/models/MetaSsl.js b/packages/relaydb.new/src/models/MetaSsl.js
new file mode 100644
index 00000000..b3cfac4b
--- /dev/null
+++ b/packages/relaydb.new/src/models/MetaSsl.js
@@ -0,0 +1,36 @@
+import { Model, DataTypes } from 'sequelize'
+import sequelize from '../connect/index.js'
+
+class Ssl extends Model {}
+
+Ssl.init(
+ {
+ // Model attributes are defined here
+ id: {
+ type: DataTypes.STRING, //hash
+ primaryKey: true
+ },
+ relay_id: {
+ type: DataTypes.STRING,
+ allowNull: false,
+ references: {
+ model: 'Relays',
+ key: 'id'
+ }
+ },
+ data: {
+ type: DataTypes.JSONB,
+ defaultValue: {}
+ },
+ }, {
+ sequelize,
+ indexes: [
+ {
+ fields: ['data'],
+ using: 'GIN'
+ }
+ ]
+ }
+);
+
+export default Ssl
\ No newline at end of file
diff --git a/packages/relaydb.new/src/models/Publisher.js b/packages/relaydb.new/src/models/Publisher.js
new file mode 100644
index 00000000..85e44705
--- /dev/null
+++ b/packages/relaydb.new/src/models/Publisher.js
@@ -0,0 +1,31 @@
+import { Model, DataTypes } from 'sequelize'
+import sequelize from '../connect/index.js'
+
+class Publisher extends Model {}
+
+Publisher.init({
+ id: {
+ type: DataTypes.STRING,
+ primaryKey: true
+ },
+ slug: {
+ type: DataTypes.STRING,
+ allowNull: false,
+ unique: true
+ },
+ name: {
+ type: DataTypes.STRING,
+ allowNull: true
+ },
+ geo_id: {
+ type: DataTypes.STRING,
+ reference: {
+ model: 'Geos',
+ key: 'id'
+ }
+ },
+}, {
+ sequelize
+});
+
+export default Publisher
\ No newline at end of file
diff --git a/packages/relaydb.new/src/models/Relay.js b/packages/relaydb.new/src/models/Relay.js
new file mode 100644
index 00000000..e32a7700
--- /dev/null
+++ b/packages/relaydb.new/src/models/Relay.js
@@ -0,0 +1,78 @@
+import { Model, DataTypes } from 'sequelize';
+import sequelize from '../connect/index.js'
+
+class Relay extends Model {}
+
+Relay.init({
+ id: {
+ type: DataTypes.STRING,
+ allowNull: false,
+ primaryKey: true
+ },
+ url: {
+ type: DataTypes.STRING,
+ allowNull: false,
+ unique: true
+ },
+ network: {
+ type: DataTypes.ENUM,
+ values: ['clearnet', 'tor', 'i2p', 'cjdns'],
+ allowNull: false
+ },
+ type: {
+ type: DataTypes.ENUM,
+ values: ['general', 'proxy', 'aggregate', 'personal', 'archive', 'specialized'],
+ defaultValue: 'general'
+ },
+ first_seen: {
+ type: DataTypes.DATE,
+ defaultValue: null
+ },
+ last_seen: {
+ type: DataTypes.DATE,
+ defaultValue: null
+ },
+ dead: {
+ type: DataTypes.BOOLEAN,
+ allowNull: true,
+ defaultValue: false
+ },
+ parent_id: {
+ type: DataTypes.STRING,
+ allowNull: true,
+ references: {
+ model: 'Relays',
+ key: 'id'
+ }
+ }
+}, {
+ sequelize: sequelize,
+ indexes: [
+ {
+ unique: true,
+ fields: ['url']
+ },
+ {
+ unique: false,
+ fields: ['network']
+ },
+ {
+ unique: false,
+ fields: ['last_seen']
+ },
+ {
+ unique: false,
+ fields: ['first_seen']
+ },
+ {
+ unique: false,
+ fields: ['dead']
+ },
+ {
+ unique: false,
+ fields: ['parent_id']
+ }
+ ]
+});
+
+export default Relay;
diff --git a/packages/relaydb.new/src/models/RelayIp.js b/packages/relaydb.new/src/models/RelayIp.js
new file mode 100644
index 00000000..bf297a65
--- /dev/null
+++ b/packages/relaydb.new/src/models/RelayIp.js
@@ -0,0 +1,32 @@
+import { Model, DataTypes } from 'sequelize'
+import sequelize from '../connect/index.js'
+
+class Relayip extends Model {}
+
+Relayip.init({
+ id: {
+ type: DataTypes.INTEGER,
+ autoIncrement: true,
+ primaryKey: true
+ },
+ relay_id: {
+ type: DataTypes.STRING,
+ allowNull: false,
+ references: {
+ model: 'Relays',
+ key: 'id'
+ }
+ },
+ ip_id: {
+ type: DataTypes.INTEGER,
+ allowNull: false,
+ references: {
+ model: 'Ips',
+ key: 'id'
+ }
+ },
+}, {
+ sequelize
+});
+
+export default Relayip;
diff --git a/packages/relaydb.new/src/models/index.js b/packages/relaydb.new/src/models/index.js
new file mode 100644
index 00000000..bcbec47b
--- /dev/null
+++ b/packages/relaydb.new/src/models/index.js
@@ -0,0 +1,43 @@
+// export { default as AppMeta } from './AppMeta.js'
+export { default as Checkmeta } from './CheckMeta.js'
+export { default as Checkstatus } from './CheckStatus.js'
+// export { default as ExecutionTimestamp } from './ExecutionTimestamp.js'
+export { default as Ip } from './Ip.js'
+export { default as Dns } from './MetaDns.js'
+export { default as Geo } from './MetaGeo.js'
+export { default as Info } from './MetaInfo.js'
+export { default as Ssl } from './MetaSsl.js'
+export { default as Publisher } from './Publisher.js'
+export { default as Relay } from './Relay.js'
+export { default as Relayip } from './RelayIp.js'
+// export { default as User } from './User.js'
+// export { default as UserRelayList } from './UserRelayList.js'
+
+// import AppMeta from './AppMeta.js';
+import Checkmeta from './CheckMeta.js';
+import Checkstatus from './CheckStatus.js';
+// import ExecutionTimestamp from './ExecutionTimestamp.js';
+import Ip from './Ip.js';
+import Dns from './MetaDns.js';
+import Geo from './MetaGeo.js';
+import Info from './MetaInfo.js';
+import Ssl from './MetaSsl.js';
+import Publisher from './Publisher.js';
+import Relay from './Relay.js';
+import Relayip from './RelayIp.js';
+
+import associations from '../associations/index.js'
+associations()
+
+export default {
+ Checkmeta,
+ Checkstatus,
+ Ip,
+ Dns,
+ Geo,
+ Info,
+ Ssl,
+ Publisher,
+ Relay,
+ Relayip
+};
diff --git a/packages/relaydb.new/src/seeds/.gitkeep b/packages/relaydb.new/src/seeds/.gitkeep
new file mode 100644
index 00000000..e69de29b
diff --git a/packages/relaydb/defaults.js b/packages/relaydb/defaults.js
new file mode 100644
index 00000000..8b69dded
--- /dev/null
+++ b/packages/relaydb/defaults.js
@@ -0,0 +1,12 @@
+export const RelayRecord = {
+ url: '',
+ network: '',
+ websocket: null,
+ info: null,
+ geo: null,
+ dns: null,
+ ssl: null,
+ last_checked: -1,
+ first_seen: -1,
+ last_seen: -1
+}
\ No newline at end of file
diff --git a/packages/relaydb/index.js b/packages/relaydb/index.js
index da9b08b4..64a54313 100644
--- a/packages/relaydb/index.js
+++ b/packages/relaydb/index.js
@@ -1,9 +1,10 @@
// import { open } from 'lmdb'
import { withExtensions } from "lmdb-oql";
-import schemas from "./schemas.js";
+import { defineSchemas, schemas } from "./schemas.js";
import RelayMixin from "./mixins/relay.js"
-import CheckMixin from "./mixins/check.js"
+import RetryMixin from "./mixins/retry.js"
+import ChecksMixin from "./mixins/checks.js"
import InfoMixin from "./mixins/info.js"
import CacheTimeMixin from "./mixins/cachetime.js"
import StatMixin from "./mixins/stat.js"
@@ -11,7 +12,6 @@ import ServiceMixin from "./mixins/service.js"
import NoteMixin from "./mixins/note.js";
import Logger from "@nostrwatch/logger"
-const logger = new Logger('lmdb')
let open;
@@ -28,21 +28,28 @@ if (typeof window !== 'undefined') {
class DbWrapper {
constructor(dbPath, opts={}){
this.$ = withExtensions(open(dbPath, opts));
- this.$ = schemas(this.$);
+ this.$ = defineSchemas(this.$);
+ this.initialized = false
+ this.schemas = schemas
+ this.logger = new Logger('lmdb')
}
- addSchema(cl) {
+ addHelpers(cl) {
const key = cl.name.toLowerCase().replace("mixin","")
if(!cl)
throw new Error("Missing schema class")
if(this?.[key])
- throw new Error("Mixin already added")
+ this.logger.warn(`Mixin already added: ${key}`)
+ // throw new Error("Mixin already added")
this[key] = new cl(this)
if(this[key]?.init)
- this[key].init()
+ this[key].init()
}
}
-let db
+let db
+
+export { RelayRecord } from './defaults.js'
+export { ParseSelect } from "./utils.js";
export default (dbPath, opts={}) => {
if(!db) {
@@ -50,12 +57,16 @@ export default (dbPath, opts={}) => {
if(!db?.$)
throw new Error("Failed to initialize LMDB database")
}
- db.addSchema(ServiceMixin)
- db.addSchema(RelayMixin)
- db.addSchema(CheckMixin)
- db.addSchema(InfoMixin)
- db.addSchema(CacheTimeMixin)
- db.addSchema(StatMixin)
- db.addSchema(NoteMixin)
+ if(db.initialized) return db
+ db.addHelpers(ServiceMixin)
+ db.addHelpers(RelayMixin)
+ db.addHelpers(RetryMixin)
+ db.addHelpers(ChecksMixin)
+ db.addHelpers(InfoMixin)
+ db.addHelpers(CacheTimeMixin)
+ db.addHelpers(StatMixin)
+ db.addHelpers(NoteMixin)
+ db.initialized = true
return db
-}
\ No newline at end of file
+}
+
diff --git a/packages/relaydb/migrations/find_weird_urls.js b/packages/relaydb/migrations/find_weird_urls.js
new file mode 100644
index 00000000..051da0dc
--- /dev/null
+++ b/packages/relaydb/migrations/find_weird_urls.js
@@ -0,0 +1,103 @@
+/**
+ * Fix Note Indices (1):
+ * -----------------------
+ * Some notes were added without the schema being instantiated
+ * This just reinserts all notes as-is back into the db.
+ *
+ * @Additionally, some of the notes had additional fields
+ * that slipped through the nostr-fetch verifier
+ * remove them.
+ */
+
+
+import Relaydb from '../index.js'
+import dotenv from 'dotenv'
+dotenv.config()
+
+const dbpath = process.env.NWCACHE_PATH
+
+const relaydb = Relaydb(dbpath? dbpath : './.lmdb')
+
+console.log(relaydb.$.env.stat())
+
+function isDuplicatedURL(url) {
+ // Use a regular expression to match URLs with protocol colon stripped
+ const protocolStrippedRegex = /^([a-zA-Z0-9_-]+:\/\/)(.*)$/;
+ const match = url.match(protocolStrippedRegex);
+
+ if (match) {
+ const strippedURL = match[2]; // Get the part after the protocol
+ const regex = new RegExp(strippedURL.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'), 'g');
+ const matches = url.match(regex);
+
+ // Check if there are more than one match, indicating duplication
+ return matches && matches.length > 1;
+ }
+
+ return false;
+}
+
+function hasForwardSlashInHostname(urlString) {
+ try {
+ const url = new URL(urlString);
+ const hostname = url.hostname;
+ return hostname.includes('/');
+ } catch (error) {
+ // Handle invalid URLs or other errors
+ console.error('Error:', error);
+ return false;
+ }
+}
+
+const chunkArray = function(arr, chunkSize) {
+ if (chunkSize <= 0) {
+ throw new Error("Chunk size must be greater than 0.");
+ }
+ const result = [];
+ for (let i = 0; i < arr.length; i += chunkSize) {
+ const chunk = arr.slice(i, i + chunkSize);
+ result.push(chunk);
+ }
+ return result;
+}
+
+ // const relays = await relaydb.relay.get.allIds()
+ console.log('before', relaydb.relay.count.all())
+ const relays = await relaydb.relay.get.all()
+
+ const chunks = chunkArray(relays, 100)
+
+ // console.log(chunks)
+ // process.exit()
+
+
+ let count = 0
+ const weirdUrls = new Set()
+ for(const chunk of chunks){
+ for await (const relay of chunk){
+ const test = /^(ws:\/\/http|wss:\/\/http|wss:\/\/ws|wss:\/\/wss)/.test(relay.url)
+ || !relay.url.includes('.')
+ || relay.url.endsWith('.')
+ || new URL(relay.url).hostname.endsWith('.')
+
+ if(!test)
+ continue
+
+ count++
+ // const id = relaydb.relay.id(relay.url)
+ const deleted = await relaydb.relay.delete(relay.url)
+ console.log('deleted', deleted, relay.url)
+ }
+ }
+ console.log('deleted', count)
+
+ console.log('after', relaydb.relay.count.all())
+
+// }
+
+
+
+
+
+
+
diff --git a/packages/relaydb/migrations/fix_cachetime_kv.js b/packages/relaydb/migrations/fix_cachetime_kv.js
new file mode 100644
index 00000000..7a55d3a3
--- /dev/null
+++ b/packages/relaydb/migrations/fix_cachetime_kv.js
@@ -0,0 +1,52 @@
+
+import Relaydb from '../index.js'
+import dotenv from 'dotenv'
+dotenv.config()
+
+const dbpath = process.env.NWCACHE_PATH
+
+const relaydb = Relaydb(dbpath? dbpath : './.lmdb')
+
+console.log(relaydb.$.env.stat())
+
+const chunkArray = function(arr, chunkSize) {
+ if (chunkSize <= 0) {
+ throw new Error("Chunk size must be greater than 0.");
+ }
+ const result = [];
+ for (let i = 0; i < arr.length; i += chunkSize) {
+ const chunk = arr.slice(i, i + chunkSize);
+ result.push(chunk);
+ }
+ return result;
+}
+
+// relaydb.cachetime.init()
+const kv = await relaydb.cachetime.get.allIds()
+
+const chunks = chunkArray(kv, 100)
+
+console.log(chunks)
+process.exit()
+
+
+ let count = 0
+ for(const chunk of chunks){
+ console.log('CHUNKS', chunks.length)
+ for await (const kv of chunk){
+ console.log('SETTING:', relay.url)
+ const url = new URL(relay.url).toString()
+ try {
+ const RELAYRECORD = {
+ url: new URL(url).toString()
+ }
+ console.log(RELAYRECORD)
+ console.log('SET:', `#${count++}`, await relaydb.relay.patch(RELAYRECORD))
+ }
+ catch(e){ console.log('ERROR:', `${relay}: ${e}`) }
+ }
+ }
+
+// }
+
+
diff --git a/packages/relaydb/migrations/fix_note_indices.js b/packages/relaydb/migrations/fix_note_indices.js
index 3333c206..08bb0a5c 100644
--- a/packages/relaydb/migrations/fix_note_indices.js
+++ b/packages/relaydb/migrations/fix_note_indices.js
@@ -9,10 +9,13 @@
* remove them.
*/
+import dotenv from 'dotenv'
import lmdb from '../index.js'
const db = lmdb('/Users/sandwich/Develop/nostr-watch/packages/trawler/lmdb/nw.mdb')
+dotenv.config()
+
const chunkArray = function(arr, chunkSize) {
if (chunkSize <= 0) {
throw new Error("Chunk size must be greater than 0.");
@@ -33,16 +36,12 @@ export default async () => {
const chunks = chunkArray(notes, 500)
- console.log(chunks.length)
-
let count = 0
for(const chunk of chunks){
- console.log('CHUNKS', chunks.length)
for await (const { key, value:note } of chunk){
- console.log('SETTING:', key)
try {
const NOTE = { id: note.id, created_at: note.created_at, kind: note.kind, pubkey: note.pubkey, content: note.content, sig: note.sig, tags: note.tags }
- console.log('SET:', `#${count++}`, await db.note.set.one(NOTE))
+ await db.note.set.one(NOTE)
}
catch(e){ console.log('ERROR:', note.id) }
}
diff --git a/packages/relaydb/migrations/fix_relay_records.js b/packages/relaydb/migrations/fix_relay_records.js
new file mode 100644
index 00000000..7e04b160
--- /dev/null
+++ b/packages/relaydb/migrations/fix_relay_records.js
@@ -0,0 +1,65 @@
+/**
+ * Fix Note Indices (1):
+ * -----------------------
+ * Some notes were added without the schema being instantiated
+ * This just reinserts all notes as-is back into the db.
+ *
+ * @Additionally, some of the notes had additional fields
+ * that slipped through the nostr-fetch verifier
+ * remove them.
+ */
+
+
+import Relaydb from '../index.js'
+import { RelayRecord } from '../index.js'
+import dotenv from 'dotenv'
+dotenv.config()
+
+const dbpath = process.env.NWCACHE_PATH
+
+const relaydb = Relaydb(dbpath? dbpath : './.lmdb')
+
+console.log(relaydb.$.env.stat())
+
+const chunkArray = function(arr, chunkSize) {
+ if (chunkSize <= 0) {
+ throw new Error("Chunk size must be greater than 0.");
+ }
+ const result = [];
+ for (let i = 0; i < arr.length; i += chunkSize) {
+ const chunk = arr.slice(i, i + chunkSize);
+ result.push(chunk);
+ }
+ return result;
+}
+
+ const relays = await relaydb.relay.get.all()
+
+ const chunks = chunkArray(relays, 100)
+
+ // console.log(chunks)
+ // process.exit()
+
+
+ let count = 0
+ for(const chunk of chunks){
+ console.log('CHUNKS', chunks.length)
+ for await (const relay of chunk){
+ console.log('SETTING:', relay.url)
+ const url = new URL(relay.url).toString()
+ try {
+ const RELAYRECORD = {
+ ...RelayRecord,
+ url: url,
+ network: relay.network,
+ }
+ console.log(RELAYRECORD)
+ console.log('SET:', `#${count++}`, await relaydb.relay.insert(RELAYRECORD))
+ }
+ catch(e){ console.log('ERROR:', `${relay}: ${e}`) }
+ }
+ }
+
+// }
+
+
diff --git a/packages/relaydb/migrations/normalize_relay_urls.js b/packages/relaydb/migrations/normalize_relay_urls.js
new file mode 100644
index 00000000..bde37a0b
--- /dev/null
+++ b/packages/relaydb/migrations/normalize_relay_urls.js
@@ -0,0 +1,64 @@
+/**
+ * Fix Note Indices (1):
+ * -----------------------
+ * Some notes were added without the schema being instantiated
+ * This just reinserts all notes as-is back into the db.
+ *
+ * @Additionally, some of the notes had additional fields
+ * that slipped through the nostr-fetch verifier
+ * remove them.
+ */
+
+
+import Relaydb from '../index.js'
+import dotenv from 'dotenv'
+dotenv.config()
+
+const dbpath = process.env.NWCACHE_PATH
+
+const relaydb = Relaydb(dbpath? dbpath : './.lmdb')
+
+console.log(relaydb.$.env.stat())
+
+const chunkArray = function(arr, chunkSize) {
+ if (chunkSize <= 0) {
+ throw new Error("Chunk size must be greater than 0.");
+ }
+ const result = [];
+ for (let i = 0; i < arr.length; i += chunkSize) {
+ const chunk = arr.slice(i, i + chunkSize);
+ result.push(chunk);
+ }
+ return result;
+}
+
+ // const relays = await relaydb.relay.get.allIds()
+ console.log(relaydb.relay.count.all())
+ const relays = await relaydb.relay.get.all()
+
+ const chunks = chunkArray(relays, 100)
+
+ // console.log(chunks)
+ // process.exit()
+
+
+ let count = 0
+ for(const chunk of chunks){
+ console.log('CHUNKS', chunks.length)
+ for await (const relay of chunk){
+ console.log('SETTING:', relay.url)
+ const url = new URL(relay.url).toString()
+ try {
+ const RELAYRECORD = {
+ url: new URL(url).toString()
+ }
+ console.log(RELAYRECORD)
+ console.log('SET:', `#${count++}`, await relaydb.relay.patch(RELAYRECORD))
+ }
+ catch(e){ console.log('ERROR:', `${relay}: ${e}`) }
+ }
+ }
+
+// }
+
+
diff --git a/packages/relaydb/migrations/remove_url_hash.js b/packages/relaydb/migrations/remove_url_hash.js
new file mode 100644
index 00000000..e7447ba0
--- /dev/null
+++ b/packages/relaydb/migrations/remove_url_hash.js
@@ -0,0 +1,71 @@
+/**
+ * Fix Note Indices (1):
+ * -----------------------
+ * Some notes were added without the schema being instantiated
+ * This just reinserts all notes as-is back into the db.
+ *
+ * @Additionally, some of the notes had additional fields
+ * that slipped through the nostr-fetch verifier
+ * remove them.
+ */
+
+
+import Relaydb from '../index.js'
+import dotenv from 'dotenv'
+dotenv.config()
+
+const dbpath = process.env.NWCACHE_PATH
+
+const relaydb = Relaydb(dbpath? dbpath : './.lmdb')
+
+import { RelayRecord } from '../index.js'
+
+console.log(relaydb.$.env.stat())
+
+const chunkArray = function(arr, chunkSize) {
+ if (chunkSize <= 0) {
+ throw new Error("Chunk size must be greater than 0.");
+ }
+ const result = [];
+ for (let i = 0; i < arr.length; i += chunkSize) {
+ const chunk = arr.slice(i, i + chunkSize);
+ result.push(chunk);
+ }
+ return result;
+}
+
+ // const relays = await relaydb.relay.get.allIds()
+ console.log(relaydb.relay.count.all())
+ const relays = await relaydb.relay.get.all()
+
+ const chunks = chunkArray(relays, 100)
+
+ // console.log(chunks)
+ // process.exit()
+
+
+ let count = 0
+ for(const chunk of chunks){
+ // console.log('CHUNKS', chunks.length)
+ for await (const relay of chunk){
+ // console.log('SETTING:', relay.url)
+ let url = new URL(relay.url)
+ if(!url.hash)
+ continue
+
+ console.log('exists', relay.url, relaydb.relay.id(relay.url), await relaydb.$.get(relaydb.relay.id(relay.url)))
+ url.hash = ''
+
+ await relaydb.relay.delete(relay.url)
+ await relaydb.relay.patch({...RelayRecord, url: url.toString() })
+ }
+ }
+
+
+
+// }
+
+
+
+
+
diff --git a/packages/relaydb/mixins/app.js b/packages/relaydb/mixins/app.js
deleted file mode 100644
index b085c53d..00000000
--- a/packages/relaydb/mixins/app.js
+++ /dev/null
@@ -1,5 +0,0 @@
-export default class AppMixin {
- constructor(db) {
- this.db = db;
- }
-}
\ No newline at end of file
diff --git a/packages/relaydb/mixins/cachetime.js b/packages/relaydb/mixins/cachetime.js
index 83a2992d..efecbfeb 100644
--- a/packages/relaydb/mixins/cachetime.js
+++ b/packages/relaydb/mixins/cachetime.js
@@ -1,20 +1,50 @@
-import { Relay } from "../schemas.js"
-import { operators } from "lmdb-oql";
-import { cacheTimeId, now } from "../utils.js"
+import { operators as ops, IDS } from "lmdb-oql";
+import { cacheTimeId, now, ParseSelect, helperHandler } from "../utils.js"
import { CacheTime } from "../schemas.js"
-const { $eq, $gte } = operators
-
export default class CacheTimeMixin {
- constructor(db) {
+ constructor(db, schema) {
this.db = db;
+ this.get = this.__get()
+ this.schema = CacheTime
+ this.parseSelect = ParseSelect({k: null, v: null}, this.nameClean())
}
- async get(key){
- return this.db.$.get(cacheTimeId(key))
- }
+ // get(key){
+ // return this.db.$.get(this.id(key))?.v
+ // }
async set(key, value=now()){
- return this.db.$.put(cacheTimeId(key), new CacheTime({k: key, v: value}))
+ return this.db.$.put(this.id(key), new CacheTime({k: key, v: value}))
+ }
+
+ id(key){
+ return key? `CacheTime@${key}`: `CacheTime@`
+ }
+
+ nameClean(){
+ return this.constructor.name.replace('Mixin','')
+ }
+
+ __get(){
+ const fns = {
+ all: (select=null, where=null) => {
+ select = this.parseSelect(select)
+ if(!where)
+ where = { [this.nameClean()]: { '#': this.id() } }
+ return [...this.db.$.select().from( this.schema ).where( where )] || []
+ },
+ allIds: () => {
+ const where = { [this.nameClean()]: { '#': this.id() } }
+ return [...this.db.$.select(IDS).from( this.schema ).where( where )] || []
+ },
+ one: (key, select=null) => {
+ select = this.parseSelect(select)
+ if(!key.includes(this.id()))
+ key = this.id(key)
+ return this.db.$.get( key )?.v
+ }
+ }
+ return helperHandler(fns)
}
}
\ No newline at end of file
diff --git a/packages/relaydb/mixins/check.js b/packages/relaydb/mixins/check.js
deleted file mode 100644
index ddf44614..00000000
--- a/packages/relaydb/mixins/check.js
+++ /dev/null
@@ -1,5 +0,0 @@
-export default class CheckMixin {
- constructor(db) {
- this.db = db;
- }
-}
\ No newline at end of file
diff --git a/packages/relaydb/mixins/checks.js b/packages/relaydb/mixins/checks.js
new file mode 100644
index 00000000..3af53053
--- /dev/null
+++ b/packages/relaydb/mixins/checks.js
@@ -0,0 +1,105 @@
+import { ParseSelect, helperHandler} from '../utils.js'
+import { RelayCheckWebsocket, RelayCheckInfo, RelayCheckDns, RelayCheckGeo, RelayCheckSsl } from '../schemas.js'
+import { operators, IDS } from "lmdb-oql";
+
+const { $isDefined } = operators
+
+import transform from '@nostrwatch/transform'
+// const { $eq, $gte, $and, $isDefined, $type, $isUndefined, $includes, $in, $nin, $matches } = operators
+
+export default class CheckMixin {
+ constructor(db) {
+ this.db = db;
+ this.init()
+ }
+
+ init(){
+ ['websocket', 'info', 'dns', 'geo','ssl'].forEach(check => {
+ this[check] = {}
+ this[check].get = check_get(this.db, check)
+ this[check].insert = check_insert(this, check)
+ })
+ }
+
+ validate(RelayObj){
+ // console.log(RelayObj)
+ if(!RelayObj?.url)
+ throw new Error("Relay object must have a url property")
+ }
+}
+
+const check_insert = (self, key) => {
+ const Schema = inferSchema(key)
+ // const schema = Schema.name
+ return (DataObj) => {
+ self.validate(DataObj)
+ const RdbDataObj = maybeTransform(DataObj, key)
+ return self.db.$.put(null, new Schema(RdbDataObj))
+ }
+}
+
+const check_get = (self, key) => {
+ const Schema = inferSchema(key)
+ const schema = Schema.name
+ const transformer = new transform[key]()
+ const parseSelect = ParseSelect(transformer.toJson(), transformer.constructor.name)
+
+ const fns = {
+ one(relayUrl, select=null){
+ select = parseSelect( select )
+ return [...self.db.$.select(select).from( Schema ).where( { [schema]: { 'relay_id': self.db.relay.id(relayUrl) } } )][0] || false
+ },
+ mostRecent(relayUrl, select=null) {
+ select = parseSelect( select )
+ return [...self.db.$.select( select ).from( Schema ).where({ [schema]: { '#': `${schema}@` } })][0] || false
+ },
+ all(select=null) {
+ select = parseSelect( select )
+ // return [...this.db.$.select(select).from( Relay ).where({ Relay: { url: (value) => value?.length } })] || []
+ return [...self.$.select( select ).from( Schema ).where({ [schema]: { '#': `${schema}@` } })] || []
+ // return [...self.$.select( select ).from( Schema ).where({ [schema]: { relay_id: $isDefined() } })] || []
+ },
+ allIds(){
+ const result = self.all(IDS).flat()
+ return result || []
+ }
+ }
+ return helperHandler(fns)
+}
+
+const maybeTransform = (data, key) => {
+ // console.log(data)
+ //relaydb data format
+ if(data?.relay_id && data.relay_id !== null && data.relay_id.length)
+ return data
+
+ // console.log(key, Object.keys(transform))
+
+ const ToRdbData = new transform[key]()
+
+ //event
+ if(data?.sig && data?.tags && data?.pubkey)
+ return ToRdbData.fromEvent(data)
+
+ //nocap
+ if(data?.adapters instanceof Array)
+ return ToRdbData.fromNocap(data)
+
+ throw new Error(`Data provided for ${key} did not match any known formats: ${JSON.stringify(data)}`)
+}
+
+const inferSchema = (key) => {
+ switch(key){
+ case 'info':
+ return RelayCheckInfo
+ case 'dns':
+ return RelayCheckDns
+ case 'geo':
+ return RelayCheckGeo
+ case'ssl':
+ return RelayCheckSsl
+ case 'websocket':
+ default:
+ return RelayCheckWebsocket
+ }
+}
\ No newline at end of file
diff --git a/packages/relaydb/mixins/relay.js b/packages/relaydb/mixins/relay.js
index a6279d68..b0c5b26c 100644
--- a/packages/relaydb/mixins/relay.js
+++ b/packages/relaydb/mixins/relay.js
@@ -1,12 +1,18 @@
-import { Relay, Info } from "../schemas.js"
+import { schemas } from "../schemas.js"
import { operators, IDS } from "lmdb-oql";
-import { relayId, now } from "../utils.js"
+import { relayId, ParseSelect, helperHandler } from "../utils.js"
-const { $eq, $gte, $and, $isDefined, $type, $isUndefined, $includes, $in, $nin, $matches } = operators
+const { Relay, RelayCheckWebsocket, RelayCheckInfo } = schemas
+const { $eq, $gte, $and, $isNuall, $isDefined, $type, $isUndefined, $includes, $in, $nin, $matches } = operators
import Logger from "@nostrwatch/logger"
+
const logger = new Logger('lmdb:relay')
+import { RelayRecord } from "../defaults.js"
+
+const parseSelect = ParseSelect(RelayRecord, "Relay")
+
import { ResultInterface as ResultType } from "@nostrwatch/nocap";
export default class RelayMixin {
@@ -15,19 +21,18 @@ export default class RelayMixin {
}
init(){
+ this.batch = relay_batch(this.db)
this.get = relay_get(this.db)
this.count = relay_count(this.db)
this.is = relay_is(this.db)
- this.has = relay_has(this.db)
- this.requires = relay_requires(this.db)
- this.supports = relay_supports(this.db)
- this.limits = relay_limits(this.db)
- this.batch = relay_batch(this.db)
+ // this.has = relay_has(this.db)
+ // this.requires = relay_requires(this.db)
+ // this.supports = relay_supports(this.db)
+ // this.limits = relay_limits(this.db)
}
async insert(RelayObj){
- if(!RelayObj?.url)
- throw new Error("Relay object must have a url property")
+ this.validate(RelayObj)
return this.db.$.put(relayId(RelayObj.url), new Relay(RelayObj))
}
@@ -37,18 +42,25 @@ export default class RelayMixin {
}
async update(RelayObj) {
- if(!RelayObj?.url)
- throw new Error("Relay object must have a url property")
- const _old = this.$.get(relayId(RelayObj.url))
- if(!_old)
+ this.validate(RelayObj)
+ const current = this.db.$.get(relayId(RelayObj.url))
+ if(!current)
throw new Error(`Cannot update because ${RelayObj.url} does not exist`)
- const new_ = {..._old, ...RelayObj}
- return this.insert(new_)
+ return this.insert({...RelayObj})
+ }
+
+ async patch(RelayFieldsObj) {
+ this.validate(RelayFieldsObj)
+ const current = await this.db.$.get(relayId(RelayFieldsObj.url))
+ if(!current)
+ throw new Error(`Cannot patch because ${RelayFieldsObj.url} does not exist`)
+ RelayFieldsObj.url = new URL(RelayFieldsObj.url).toString()
+ if(current?.['#']) delete current['#']
+ return this.insert({...current, ...RelayFieldsObj})
}
async upsert(RelayObj) {
- if(!RelayObj?.url)
- throw new Error("Relay object must have a url property")
+ this.validate(RelayObj)
if( await this.exists(RelayObj.url) )
return this.update(RelayObj)
return this.insert(RelayObj)
@@ -61,6 +73,11 @@ export default class RelayMixin {
return this.db.$.remove(relayId(url))
}
+ validate(RelayObj){
+ if(!RelayObj?.url)
+ throw new Error("Relay object must have a url property")
+ }
+
async select(select=null, where=null) {
return [...this.db.$.select(select).from(Relay).where(where)]
}
@@ -76,6 +93,10 @@ export default class RelayMixin {
retention(relayUrl) {
return this.get.one(relayUrl)?.retention
}
+
+ id(relayUrl) {
+ return relayId(relayUrl)
+ }
}
const relay_batch = (db) => {
@@ -86,10 +107,11 @@ const relay_batch = (db) => {
if(!(RelayObjs instanceof Array))
throw new Error("Relay.batch: Must be an array")
const result = []
- //process records in series. This is important for cache time and reduction/elimination of commit errors.
for await (const RelayObj of RelayObjs) {
try {
- result.push(await db.relay[key](RelayObj))
+ const id = await db.relay[key](RelayObj).catch()
+ if(typeof id !== 'undefined')
+ result.push(id)
}
catch(e) { logger.warn(e) }
}
@@ -99,76 +121,55 @@ const relay_batch = (db) => {
return fns
}
-const parseSelect = (key) => {
- const $ResultType = new ResultType()
- if(!key)
- key = Object.keys($ResultType).filter(key => typeof key !== 'function' && key !== 'defaults')
- if(key instanceof Object && !(key instanceof Array))
- return key
- if(key == 'id')
- key = '#'
- if(typeof key === 'string')
- key = [key]
- const select = { Relay: {} }
- for (const k of key) {
- select.Relay[k] = (value,{root}) => { root[k]=value; }
- }
- return select
-}
-
const relay_limits = (db) => {
- const fn = {
- db,
- country(relayUrl, country_code){
- if(!country_code)
- return logger.warn(`Country code is required`)
- return this.countries.includes(country_code)
- },
- countries(relayUrl){
- return this.db.relay.get.one(relayUrl)?.relay_countries
- },
- }
- return handler(fn)
+ // const fn = {
+ // country(relayUrl, country_code){
+ // if(!country_code)
+ // throw new Error(`Country code is required (example: US)`)
+ // return this.countries.includes(country_code)
+ // },
+ // countries(relayUrl){
+ // return db.relay.get.one(relayUrl)?.relay_countries
+ // },
+ // }
+ // return helperHandler(fn)
}
const relay_is = (db) => {
const fn = {
- db,
online(relayUrl) {
- return this.db.relay.get.one(relayUrl)?.connect
- },
- readable(relayUrl) {
- return this.db.relay.get.one(relayUrl)?.read
- },
- writable(relayUrl) {
- return this.db.relay.get.one(relayUrl)?.write
+ return db.relay.get.one(relayUrl)?.online
},
- dead(relayUrl) {
- return this.db.relay.get.one(relayUrl)?.dead
- },
- public(relayUrl) {
- return !this.db.relay.requires.payment(relayUrl) && !this.db.relay.requires.auth(relayUrl)
- }
+ // readable(relayUrl) {
+ // return db.relay.get.one(relayUrl)?.read
+ // },
+ // writable(relayUrl) {
+ // return db.relay.get.one(relayUrl)?.write
+ // },
+ // dead(relayUrl) {
+ // return db.relay.get.one(relayUrl)?.dead
+ // },
+ // public(relayUrl) {
+ // return !db.relay.requires.payment(relayUrl) && !db.relay.requires.auth(relayUrl)
+ // }
}
- return handler(fn)
+ return helperHandler(fn)
}
const relay_requires = (db) => {
const fn = {
- db,
auth(relayUrl) {
- return this.db.relay.get.one(relayUrl)?.auth
+ return db.relay.get.one(relayUrl)?.auth
},
payment(relayUrl) {
- return this.db.relay.has.limitation(relayUrl, 'payment_required')
+ return db.relay.has.limitation(relayUrl, 'payment_required')
},
}
- return handler(fn)
+ return helperHandler(fn)
}
const relay_has = (db) => {
const fn = {
- db,
limitation(relayUrl, key) {
const record = this.db.relay.get.info(relayUrl)?.limitation
if(!key)
@@ -178,17 +179,16 @@ const relay_has = (db) => {
return record?.[key]
},
}
- return handler(fn)
+ return helperHandler(fn)
}
const relay_supports = (db) => {
const fn = {
- db,
nip(relay, nip){
if(relay instanceof String)
- return this.db.relay.get.one(relayUrl)?.info?.supported_nips?.[nip]
+ return db.relay.get.one(relayUrl)?.info?.supported_nips?.[nip]
else
- return this.db.relay.all(null, {Relay: { '#': relayId(relay), info: { supported_nips: $includes(nip) }}})
+ return db.relay.all(null, {Relay: { '#': relayId(relay), info: { supported_nips: $includes(nip) }}})
},
nips(relay=null, nips=[], supportsAll=false){
if(!(nips instanceof Array))
@@ -208,11 +208,11 @@ const relay_supports = (db) => {
else
return supports
},
- nips_many(relay, nips, supportsAll=false){
+ nips_many(relay, nips, selectKeys, supportsAll=false){
const supports = {}
let select = null
if(selectKeys)
- select = parseSelect('url')
+ select = parseSelect(selectKeys)
nips.forEach(nip => {
supports[nip]=this.db.relay.all(select, { Relay: { info: (value) => value?.supported_nips?.[nip] }})
})
@@ -223,21 +223,29 @@ const relay_supports = (db) => {
const commonUrls = urlsFromEachKey.reduce((a, b) => new Set([...a].filter(x => b.has(x))));
return Array.from(commonUrls);
}
+ return supports
}
}
- return handler(fn)
+ return helperHandler(fn)
}
const relay_get = (db) => {
const fns = {
- db,
- one(relayUrl) {
- return this.db.$.get(relayId(relayUrl)) || false
+ one(relay) {
+ if(typeof relay !== 'string')
+ throw new Error("Relay.get.one(): Argument must be a string")
+ if(!relay.startsWith('Relay@'))
+ relay = relayId(relay)
+ return db.$.get(relay) || false
+ },
+ many(relayUrls) {
+ return relayUrls.map(relayUrl => this.one(relayUrl))
},
all(select=null, where=null) {
select = parseSelect(select)
- // return [...this.db.$.select(select).from( Relay ).where({ Relay: { url: (value) => value?.length } })] || []
- return [...this.db.$.select(select).from( Relay ).where({ Relay: { '#': 'Relay@' } })] || []
+ if(!where)
+ where = { Relay: { '#': 'Relay@' } }
+ return [...db.$.select(select).from( Relay ).where(where)] || []
},
allIds(){
const result = this.all(IDS).flat()
@@ -245,43 +253,49 @@ const relay_get = (db) => {
},
online(select=null) {
select = parseSelect(select)
- return [...this.db.$.select(select).from( Relay ).where({ Relay: { connect: $matches(true) } })] || []
+ return [...db.$.select(select).from( Relay, RelayCheckWebsocket ).where({ Relay: { online: true } })] || []
},
network(network, select=null) {
select = parseSelect(select)
- return [...this.db.$.select(select).from( Relay ).where({ Relay: { network } })] || []
+ return [...db.$.select(select).from( Relay ).where({ Relay: { network } })] || []
},
public(select=null) {
select = parseSelect(select)
- return [...this.db.$
- .select(select)
- .from( Relay )
- .where({ Relay: { info: (value) => !value?.payment_required || value.payment_required === false }})
- ] || []
+ return db.check.info.get.all(select).filter( rci => !rci?.data?.limitation || !rci?.data?.limitation?.payment_required || rci.data.limitation.payment_required === false )
},
paid(select=null) {
select = parseSelect(select)
- return [...this.db.$
- .select(select)
- .from( Relay )
- .where({ Relay: { info: (value) => value?.payment_required && value.payment_required === true }})
- ] || []
+ const paymentRequired = db.check.info.get.all().filter( rci => rci?.data?.limitation && rci?.data?.limitation?.payment_required && rci.data.limitation.payment_required === true ).map( res => [ res.relay_id, res['#'] ] )
+ const relayIds = paymentRequired.map( r => r[0] )
+ const relayInfoIds = paymentRequired.map( r => r[1] )
+ console.log('ids', relayIds)
+ let relays = this.many(relayIds)
+ relays = relays.filter( relay => relay.info.ref )
+
+ return this.many(relayIds)
},
dead(select=null) {
select = parseSelect(select)
+
// const toBeAlive = now() - config?.global?.relayAliveThreshold || timestring('30d')
// return [...this.db.$.select(select).from(Relay).where({ Relay: { last_seen: $gte(toBeAlive) } })] || []
},
supportsNip(nip, select=null) {
select = parseSelect(select)
- return [...this.db.$.select(select).from(Relay).where({ Relay: { supported_nips: (value) => value.includes(nip) } })] || []
+ return [...db.$.select(select).from(Relay).where({ Relay: { supported_nips: (value) => value.includes(nip) } })] || []
},
doesNotSupportNip(nip, select=null) {
- return [...this.db.$.select(select).from(Relay).where({ Relay: { supported_nips: (value) => !value.includes(nip) } })] || []
+ return [...db.$.select(select).from(Relay).where({ Relay: { supported_nips: (value) => !value.includes(nip) } })] || []
+ },
+ null(key, select=null){
+ if(typeof key !== 'string')
+ throw new Error("Relay.get.null(): Argument must be a string")
+ select = parseSelect(select)
+ // return [...db.$.select(select).from(Relay).where({ Relay: { [key]: (k)=>k==null } })] || []
+ return db.relay.get.all(select).filter((r)=>r[key]==null)
}
}
-
const validator = (...args) => {
// const relayUrl = args[0]
// const key = args[1]
@@ -295,7 +309,7 @@ const relay_get = (db) => {
return true
}
- return handler(fns, validator)
+ return helperHandler(fns, validator)
}
const relay_count = (db) => {
@@ -310,21 +324,4 @@ const relay_count = (db) => {
delete fns.one // not a count
fns.all = fns.allIds // alias
return fns
-}
-
-const handler = (fn, validator=null) => {
- const _ = (..._args) => {
- const fnkey = _args[0]
- const args = Array.from(_args).slice(1)
- if(validator && !validator(...args))
- return
- return fn[fnkey](...args)
- }
-
- const $fns = {}
- Object.keys(fn).forEach(fnkey => {
- if(fn[fnkey] instanceof Function)
- $fns[fnkey] = (...args) => _(fnkey, ...args)
- })
- return $fns
}
\ No newline at end of file
diff --git a/packages/relaydb/mixins/retry.js b/packages/relaydb/mixins/retry.js
new file mode 100644
index 00000000..421bd6ee
--- /dev/null
+++ b/packages/relaydb/mixins/retry.js
@@ -0,0 +1,41 @@
+import { operators } from "lmdb-oql";
+import { Retry } from "../schemas.js"
+
+const { $eq, $gte } = operators
+
+export default class RetryMixin {
+ constructor(db) {
+ this.db = db;
+ }
+
+ id(key){
+ return `Retry@${key}`
+ }
+
+ inferKey(key){
+ return key.includes('Retry@')? key: this.id(key)
+ }
+
+ get(key){
+ key = this.inferKey(key)
+ let value = this.db.$.get(key)?.v
+ return value? value: null
+ }
+
+ async set(key, value=0){
+ key = this.inferKey(key)
+ return this.db.$.put(key, new Retry({k: key, v: value}))
+ }
+
+ async increment(key, amt=1){
+ key = this.inferKey(key)
+ const current = await this.get(key)
+ return this.set(key, current? current + amt: amt)
+ }
+
+ async decrement(key, amt=1){
+ key = this.inferKey(key)
+ const current = await this.get(key)
+ return this.set(key, current? current - amt: -amt)
+ }
+}
\ No newline at end of file
diff --git a/packages/relaydb/package.json b/packages/relaydb/package.json
index e6f0a799..1bf948fb 100644
--- a/packages/relaydb/package.json
+++ b/packages/relaydb/package.json
@@ -1,5 +1,5 @@
{
- "name": "@nostrwatch/relaydb",
+ "name": "@nostrwatch/relaycache",
"version": "0.0.1",
"type": "module",
"main": "index.js",
@@ -7,8 +7,8 @@
"dependencies": {
"dotenv": "16.3.1",
"lmdb-index": "1.1.0",
- "lmdb-indexeddb": "0.0.9",
"lmdb-oql": "0.5.5",
- "murmurhash": "2.0.1"
+ "murmurhash": "2.0.1",
+ "object-hash": "3.0.0"
}
}
diff --git a/packages/relaydb/sandbox.js b/packages/relaydb/sandbox.js
index db680a2a..0dd687ae 100644
--- a/packages/relaydb/sandbox.js
+++ b/packages/relaydb/sandbox.js
@@ -5,110 +5,47 @@
import { operators, IDS } from "lmdb-oql";
import lmdb from './index.js'
-import { Relay } from "./schemas.js"
+import { Relay, RelayCheckInfo } from "./schemas.js"
import { parseRelayNetwork } from "@nostrwatch/utils"
import { delay } from "../trawler/src/utils.js";
+import { inspect } from 'util'
+const log = obj => console.log(inspect(obj, false, null, true))
+
import Logger from "@nostrwatch/logger"
const logger = new Logger('lmdb')
-const { $notDefined, $isDefined, $isNull, $isFalsy, $isTruthy, $isUndefined } = operators
+import { ParseSelect } from '@nostrwatch/relaydb'
+
+
+const { $and, $gt, $lt, $eq, $includes, $type, $notDefined, $isDefined, $isNull, $isFalsy, $isTruthy, $isUndefined } = operators
// const db = withExtensions(open('/Users/sandwich/Develop/nostr-watch/packages/trawler/lmdb/nw.mdb', {indexOptions:{fulltext:true}}))
await delay(1000)
-const db = lmdb('/Users/sandwich/Develop/nostr-watch/packages/trawler/lmdb/nw.mdb') //{indexOptions:{fulltext:true}}
+const db = lmdb('/Users/sandwich/Develop/nostr-watch/.lmdb/nw.mdb') //{indexOptions:{fulltext:true}}
const start = new Date().getTime()
-// console.log(db.$)
-
-// console.log([...db.$.select(IDS).from(Relay).where( { Relay: { url: $isDefined } } )])
-// console.log([...db.getRangeFromIndex({ network: "clearnet" },null,null,{fulltext:true})].length)
-
-// for (txn of db.$.begin())
-// console.log(txn.stat()['entries'])
-
-// process.exit()
-
-// const getIds = async (table, where) => {
-// const ids = [...db.$.select(IDS).from(table).where(where)]
-// return ids
-// }
-
-// const updateRelays = async (ids) => {
-// for ( const id of ids ) {
-// const relay = await db.relays.get.one(id)
-// if(!relay.network)
-// await db.relays.set.one({...relay, network: parseRelayNetwork(relay) })
-// }
-// }
-
-// $type(typeName:string) - property value is of typeName type
-// $isOdd() - property value is odd
-// $isEven() - property value is even
-// $isPrime() - property value is prime
-// $isComposite() - property value is composite
-// $isPositive() - property value is positive
-// $isNegative() - property value is negative
-// $isInteger() - property value is an integer
-// $isFloat() - property value is a float
-// $isNaN() - property value is not a number
-// $isArray() - property value is an array
-// $isObject() - property value is an object
-// $isPrimitive() - property value is a primitive
-// $isUndefined() - property value is undefined
-// $isNull() - property value is null
-// $isTruthy() - property value is truthy
-// $isFalsy() - property value is falsy
-
-// const connectIsNull = [...db.$.select(IDS).from(Relay).where({ Relay: { connect: $isNull } })]
-// const connectIsFalse = [...db.$.select(IDS).from(Relay).where({ Relay: { connect: false } })]
-// const connectIsTrue = [...db.$.select(IDS).from(Relay).where({ Relay: { connect: true } })]
-// const connectIsDefined = [...db.$.select(IDS).from(Relay).where({ Relay: { url: $isDefined } })]
-// const connectIsFalsy = [...db.$.select(IDS).from(Relay).where({ Relay: { connect: $isFalsy } })]
-// const connectIsTruthy = [...db.$.select(IDS).from(Relay).where({ Relay: { connect: $isTruthy } })]
-// const connectIsUndefined = [...db.$.select().from(Relay).where({ Relay: { connect: $isUndefined } })]
-// const nokey = [...db.$.select().from(Relay).where({ Relay: { $notDefined('nokey') } })]
-// const tor = [...db.$.select().from(Relay).where({ Relay: { network: 'tor' } })]
-// console.log(nokey.length)
-
-
-// const urls = db.relay.get.all()
-// const urls = db.relay.get.all()
-
-// process.exit()
-// console.log(urls)
-// const without = []
-// urls.forEach(id => {
-// const relay = db.relay.get.one(id)
-// if(!relay?.network)
-// without.push(id)
-// })
-
-
-// console.log(connectIsDefined.length, connectIsTrue.length, connectIsNull.length, connectIsFalse.length, connectIsFalsy.length, connectIsTruthy.length, connectIsUndefined.length)
-
-// console.log(tor.map( relay => { return { url: relay.Relay.url, connect: relay.Relay.connect } } ))
-// console.log(connectIsUndefined.map( relay => { return { url: relay.Relay.url, connect: relay.Relay.connect } } ))
-
+let $it = db.$.select( ).from( RelayCheckInfo ).where({ RelayCheckInfo: { data: $type('object') } } )
-// console.log(db.note.get.one('Note@f90a551dfed65b3bbbf88b4cbe90d29d4510f79049a0b3e594991ebd45320ad6'))
-// console.log(db.note.get.allIds())
-// console.log(db.relay.get.allIds())
-// console.log('real', db.note.exists('Note@1e3646f98b4daff6554b42abfe251600116da86a91a3e9a933d10289bde72f4c'))
-// console.log('fake', db.note.exists('Note@1e3646f98b4daff6554b42abfe251600116da86a91a3e9a933d10289bde72f4cfdsfdsfds'))
+for await (const item of $it) {
+ // Process each item
+ log(item);
+}
-// console.log(ids.length, '/', await db.relay.count.all())
-// updateRelays(ids)
+console.log(db.relay.count.online(), 'online')
console.log('RELAYS')
console.log(db.relay.count.network('clearnet'), 'clearnet relays')
console.log(db.relay.count.network('tor'), 'tor relays')
console.log(db.relay.count.network('i2p'), 'i2p relays')
console.log(db.relay.count.network('cjdns'), 'cjdns relays')
-console.log('NOTES')
-console.log(db.note.count.all(), 'notes')
-console.log('STAT')
+console.log(db.relay.count.online(), 'online relays')
+console.log(db.relay.count.public(), 'public relays')
+// console.log(db.relay.count.paid(), 'paid relays')
+// console.log('NOTES')
+// console.log(db.note.count.all(), 'notes')
+// console.log('STAT')
const duration = new Date().getTime() - start
console.log(`${duration}ms`)
\ No newline at end of file
diff --git a/packages/relaydb/schemas.js b/packages/relaydb/schemas.js
index ef752bc0..b392639d 100644
--- a/packages/relaydb/schemas.js
+++ b/packages/relaydb/schemas.js
@@ -4,48 +4,80 @@ export class Relay {
}
}
-export class Check {
+export class Retry {
constructor(config={}) {
Object.assign(this,config);
}
}
-export class Info {
+export class CacheTime {
constructor(config={}) {
Object.assign(this,config);
}
}
-export class CacheTime {
+
+export class Service {
constructor(config={}) {
Object.assign(this,config);
}
}
-export class Stat {
+export class Note {
constructor(config={}) {
Object.assign(this,config);
}
}
-export class Service {
+export class RelayCheckWebsocket {
constructor(config={}) {
Object.assign(this,config);
}
}
-export class Note {
+export class RelayCheckInfo {
constructor(config={}) {
Object.assign(this,config);
}
}
-export default ($db) => {
+export class RelayCheckDns {
+ constructor(config={}) {
+ Object.assign(this,config);
+ }
+}
+
+export class RelayCheckGeo {
+ constructor(config={}) {
+ Object.assign(this,config);
+ }
+}
+
+export class RelayCheckSsl {
+ constructor(config={}) {
+ Object.assign(this,config);
+ }
+}
+
+export const defineSchemas = ($db) => {
+ //relay record
$db.defineSchema(Relay);
- $db.defineSchema(Check);
- $db.defineSchema(Info);
- $db.defineSchema(CacheTime);
- $db.defineSchema(Stat);
+
+ //note cache
$db.defineSchema(Note);
+
+ //relay checks
+ $db.defineSchema(RelayCheckWebsocket);
+ $db.defineSchema(RelayCheckInfo);
+ $db.defineSchema(RelayCheckDns);
+ $db.defineSchema(RelayCheckGeo);
+ $db.defineSchema(RelayCheckSsl);
+
+ //app meta
+ $db.defineSchema(CacheTime);
return $db
+}
+
+export const schemas = {
+ Relay, CacheTime, Note, RelayCheckWebsocket, RelayCheckInfo, RelayCheckDns, RelayCheckGeo, RelayCheckSsl
}
\ No newline at end of file
diff --git a/packages/relaydb/utils.js b/packages/relaydb/utils.js
index f84c0893..bd12b30b 100644
--- a/packages/relaydb/utils.js
+++ b/packages/relaydb/utils.js
@@ -3,4 +3,41 @@ import murmurhash from 'murmurhash'
export const relayId = (relay, schema="Relay") => `${schema}@${murmurhash.v3(relay)}`
export const serviceId = (service) => `Service@${service}`
export const cacheTimeId = (key) => `CacheTime@${key}`
-export const now = () => new Date().getTime()
\ No newline at end of file
+export const now = () => new Date().getTime()
+
+export const ParseSelect = (OBJ, OBJSLUG) => {
+ const fn = (key) => {
+ if(!key)
+ key = Object.keys(OBJ)
+ if(key instanceof Object && !(key instanceof Array))
+ return key
+ if(key == 'id')
+ key = '#'
+ if(typeof key === 'string')
+ key = [key]
+ const select = { [OBJSLUG]: {} }
+ key.push('#')
+ for (const k of key) {
+ // select.Relay[k] = (value,{root}) => { root[k]=value; }
+ select[OBJSLUG][k] = (value,{root}) => { root[k] = value; }
+ }
+ return select
+ }
+ return fn
+}
+
+export const helperHandler = (fns, validator=null) => {
+ const _ = (..._args) => {
+ const fnkey = _args[0]
+ const args = Array.from(_args).slice(1)
+ if(validator && !validator(...args)) return
+ return fns[fnkey](...args)
+ }
+
+ const $fns = {}
+ Object.keys(fns).forEach(fnkey => {
+ if(fns[fnkey] instanceof Function)
+ $fns[fnkey] = (...args) => _(fnkey, ...args)
+ })
+ return $fns
+}
\ No newline at end of file
diff --git a/packages/schemata/schema/note.yaml b/packages/schemata/schema/note.yaml
new file mode 100644
index 00000000..c33d788f
--- /dev/null
+++ b/packages/schemata/schema/note.yaml
@@ -0,0 +1,35 @@
+$schema: http://json-schema.org/draft-07/schema#
+name: 'nostr.note'
+type: object
+properties:
+ id:
+ type: string
+ pattern: '^[0-9a-f]{64}$' # 32 bytes lowercase hex-encoded sha256
+ kind:
+ type: integer
+ enum: [1] # Assuming "kind" can only be 1, you can adjust the allowed values accordingly
+ pubkey:
+ type: string
+ pattern: '^[0-9a-f]{64}$' # 32 bytes lowercase hex-encoded public key
+ created_at:
+ type: integer
+ content:
+ type: string
+ tags:
+ type: array
+ items:
+ type: array
+ items:
+ type: string
+ sig:
+ type: string
+ pattern: '^[0-9a-f]{128}$' # 64 bytes lowercase hex
+required:
+ - id
+ - kind
+ - pubkey
+ - created_at
+ - content
+ - tags
+ - sig
+additionalProperties: false
\ No newline at end of file
diff --git a/packages/seed/index.js b/packages/seed/index.js
deleted file mode 100644
index 59fbc813..00000000
--- a/packages/seed/index.js
+++ /dev/null
@@ -1,15 +0,0 @@
-import fs from 'fs/promises'
-import yaml from 'js-yaml'
-
-const STATIC_SEED_FILE = new URL('seed.yaml', import.meta.url);
-
-export const getSeedStatic = async () => {
- try {
- const fileContents = await fs.readFile(STATIC_SEED_FILE, 'utf8');
- const data = yaml.load(fileContents);
- return data?.relays || [];
- } catch (e) {
- console.error(e);
- return null;
- }
-}
\ No newline at end of file
diff --git a/packages/seed/package.json b/packages/seed/package.json
index 2a866d59..4258f724 100644
--- a/packages/seed/package.json
+++ b/packages/seed/package.json
@@ -2,9 +2,9 @@
"name": "@nostrwatch/seed",
"version": "0.0.1",
"type": "module",
- "main": "index.js",
+ "main": "src/index.js",
"license": "MIT",
"dependencies": {
"js-yaml": "4.1.0"
}
-}
+}
\ No newline at end of file
diff --git a/packages/seed/src/config.js b/packages/seed/src/config.js
new file mode 100644
index 00000000..acc66cfd
--- /dev/null
+++ b/packages/seed/src/config.js
@@ -0,0 +1,7 @@
+import { loadConfig } from '@nostrwatch/utils'
+
+let config
+if(!config)
+ config = await loadConfig('seed')
+
+export default config
\ No newline at end of file
diff --git a/packages/seed/src/index.js b/packages/seed/src/index.js
new file mode 100644
index 00000000..1eb5df09
--- /dev/null
+++ b/packages/seed/src/index.js
@@ -0,0 +1,150 @@
+import fs from 'fs/promises'
+import yaml from 'js-yaml'
+
+import { extractConfig } from "@nostrwatch/utils"
+import Logger from "@nostrwatch/logger"
+import { fetch } from "cross-fetch"
+import config from "./config.js"
+
+const STATIC_SEED_FILE = new URL('seed.yaml', import.meta.url);
+
+const logger = new Logger('@nostrwatch/seed')
+
+export const bootstrap = async (caller) => {
+ const opts = await extractConfig(caller, 'seed')
+
+ if(!Object.keys(opts).length === 0)
+ return logger.warn(`Skipping seed because there is no seeed config`)
+
+ if(!opts?.sources)
+ return logger.warn(`No seed sources specified in 'config.${caller}.seed.sources' nor in 'config.seed.sources', cannot seed`)
+
+ let configseed = [],
+ staticseed = [],
+ nwcache = [],
+ api = [],
+ events = []
+
+ console.log(opts.sources, opts.sources.includes('api'))
+
+ if(opts.sources.includes('config'))
+ configseed = config?.seed
+
+ if(opts.sources.includes('static'))
+ staticseed = await relaysFromStaticSeed(opts)
+
+ if(opts.sources.includes('nwcache'))
+ nwcache = await relaysOnlineFromCache(opts)
+
+ if(opts.sources.includes('api'))
+ api = await relaysOnlineFromApi(opts)
+
+ if(opts.sources.includes('events'))
+ events = await relaysFromEvents(opts)
+
+ const uniques = new Set([...configseed, ...staticseed, ...nwcache, ...api, ...events])
+
+ logger.info(`Bootstrapped ${uniques.size} relays`)
+
+ return [...uniques]
+}
+
+export const relaysFromEvents = async (opts) => {
+ if(!opts?.options?.events?.pubkeys)
+ throw new Error(`No pubkeys specified at 'config.${caller}.seed.options.events.pubkeys'`)
+ if(!opts?.options?.events?.relays)
+ throw new Error(`No relays specified at 'config.${caller}.seed.options.events.relays'`)
+ if(!(opts.options.events.pubkeys instanceof Array))
+ throw new Error(`'config.${caller}.seed.options.events.pubkeys' is not an array`)
+ if(!(opts.options.events.relays instanceof Array))
+ throw new Error(`'config.${caller}.seed.options.events.relays' is not an array`)
+
+ const { NostrFetcher } = await import("nostr-fetch")
+ const { simplePoolAdapter } = await import("@nostr-fetch/adapter-nostr-tools")
+ const { SimplePool } = await import("nostr-tools")
+
+ const pool = new SimplePool();
+
+ const fetcher = NostrFetcher.withCustomPool(simplePoolAdapter(pool));
+
+ const kinds = [ 30066 ]
+ const authors = opts.options.events.publisherPubKeys
+ const fetchFromRelays = opts.options.events.relays
+
+ const events = await fetcher.fetchAllEvents(
+ fetchFromRelays,
+ { kinds, authors },
+ { since: 0 }
+ )
+ const relays = []
+ for await(const ev of events) {
+ const relay = ev.tags.find( tag => tag[0] === 'd' && !tag[0].includes('#') )?.[1]
+ if(!relay) continue
+ relays.push(relay)
+ }
+ return [...new Set(relays)]
+}
+
+export const relaysOnlineFromCache = async (opts) => {
+ const { default: nwcache } = await import("@nostrwatch/relaycache")
+ const $nwcache = nwcache(process.env.NWCACHE_PATH)
+ return $nwcache.relay.get.online('url').map( relay => relay.url )
+}
+
+export const relaysFromStaticSeed = async (opts) => {
+ try {
+ const fileContents = await fs.readFile(STATIC_SEED_FILE, 'utf8');
+ const data = yaml.load(fileContents);
+ return data?.relays || [];
+ } catch (e) {
+ console.error(e);
+ return []
+ }
+}
+
+export const relaysOnlineFromApi = async (opts) => {
+ if(!opts.remotes.rest_api) throw new Error("relaysOnlineFromApi(): No nostr-watch rest_api specified in opts (host.com/v1 or host.com/v2)")
+ const controller = new AbortController();
+ const rest_api = opts.remotes.rest_api
+ let timeout = setTimeout( () => controller.abort(), 10000)
+ let found = false
+ logger.debug('api results retrieved.')
+ return new Promise( resolve => {
+ fetch(`${rest_api}/online`, { signal: controller.signal })
+ .then((response) => {
+ if (!response.ok) {
+ resolve()
+ clearTimeout(timeout)
+ resolve([])
+ }
+ response.json()
+ .then( response => {
+ found = true
+ logger.debug('api results retrieved.')
+
+ let relays
+
+ //v1
+ if(response instanceof Array) {
+ relays = response
+ }
+ //v2
+ else if (response instanceof Object) {
+ relays = response.relays //presumed
+ }
+
+ resolve(relays)
+
+ clearTimeout(timeout)
+ })
+ .catch( () => {
+ resolve([])
+ clearTimeout(timeout)
+ })
+ })
+ .catch( () => {
+ resolve([])
+ clearTimeout(timeout)
+ })
+ })
+}
diff --git a/packages/seed/src/nwcache.js b/packages/seed/src/nwcache.js
new file mode 100644
index 00000000..bfa1ee22
--- /dev/null
+++ b/packages/seed/src/nwcache.js
@@ -0,0 +1,16 @@
+import rcache from '@nostrwatch/relaycache'
+import config from './config.js'
+
+let $rcache
+
+console.log(process.env.PWD, process.cwd())
+console.log('config', config)
+
+if(!config?.cache_path)
+ throw new Error("No LMDB path specified in config")
+
+if(!$rcache) {
+ $rcache = rcache(config.cache_path)
+}
+
+export default $rcache
\ No newline at end of file
diff --git a/packages/transform.new/package.json b/packages/transform.new/package.json
new file mode 100644
index 00000000..e006c74d
--- /dev/null
+++ b/packages/transform.new/package.json
@@ -0,0 +1,11 @@
+{
+ "name": "@nostrwatch/transform2",
+ "version": "1.0.0",
+ "type": "module",
+ "main": "index.js",
+ "license": "MIT",
+ "dependencies": {
+ "class-transformer": "0.5.1",
+ "object-mapper": "6.2.0"
+ }
+}
diff --git a/packages/transform.new/src/Transformable.ts b/packages/transform.new/src/Transformable.ts
new file mode 100644
index 00000000..e180744b
--- /dev/null
+++ b/packages/transform.new/src/Transformable.ts
@@ -0,0 +1,28 @@
+import { plainToClass as p2c, classToPlain as c2p } from 'class-transformer';
+
+export abstract class Transformable {
+ static plainToClass(cls: new (...args: any[]) => T, plain: object): T {
+ return p2c(cls, plain, { excludeExtraneousValues: true });
+ }
+
+ classToPlain(): object {
+ return c2p(this);
+ }
+
+ // Moved getExtraFields method here if it's commonly used across different types
+ protected getExtraFields(object: any, parentKey: string = ''): string[] {
+ let extraFields: string[] = [];
+
+ for (const key of Object.keys(object)) {
+ const fullKey = parentKey ? `${parentKey}.${key}` : key;
+
+ if (!this.hasOwnProperty(key)) {
+ extraFields.push(fullKey);
+ } else if (object[key] !== null && typeof object[key] === 'object' && !Array.isArray(object[key])) {
+ extraFields = extraFields.concat(this.getExtraFields(object[key], fullKey));
+ }
+ }
+
+ return extraFields;
+ }
+}
diff --git a/packages/transform.new/src/datatypes/RelayCheckInfo/common/BaseRelayCheckInfo.test.ts b/packages/transform.new/src/datatypes/RelayCheckInfo/common/BaseRelayCheckInfo.test.ts
new file mode 100644
index 00000000..174736b4
--- /dev/null
+++ b/packages/transform.new/src/datatypes/RelayCheckInfo/common/BaseRelayCheckInfo.test.ts
@@ -0,0 +1,20 @@
+import { describe, it, expect } from 'vitest';
+import { BaseRelayCheckInfo } from './BaseRelayCheckInfo';
+
+describe('BaseRelayCheckInfo', () => {
+ it('should correctly initialize default properties', () => {
+ const baseInfo = new BaseRelayCheckInfo();
+
+ expect(baseInfo.relay_id).toBe('');
+ expect(baseInfo.checked_at).toBe(-1);
+ expect(baseInfo.checked_by).toBe('');
+ expect(baseInfo.adapters).toBeNull();
+ expect(baseInfo.dropped_fields).toEqual([]);
+ });
+ it('should correctly identify extra fields', () => {
+ const baseInfo = new BaseRelayCheckInfo();
+ const extraFields = baseInfo.getExtraFields({ unknownField: 'value' });
+
+ expect(extraFields).toEqual(['unknownField']);
+ });
+});
diff --git a/packages/transform.new/src/datatypes/RelayCheckInfo/common/BaseRelayCheckInfo.ts b/packages/transform.new/src/datatypes/RelayCheckInfo/common/BaseRelayCheckInfo.ts
new file mode 100644
index 00000000..cbd0ff39
--- /dev/null
+++ b/packages/transform.new/src/datatypes/RelayCheckInfo/common/BaseRelayCheckInfo.ts
@@ -0,0 +1,10 @@
+import { Expose, Type } from 'class-transformer';
+import { Transformable } from './Transformable';
+
+export abstract class BaseRelayCheckInfo extends Transformable {
+ @Expose() url: string = '';
+ @Expose() checked_at: number = -1;
+ @Expose() checked_by: string = '';
+ @Expose() @Type(() => String) adapters: string[] | null = null;
+ @Expose() dropped_fields: string[] = [];
+}
\ No newline at end of file
diff --git a/packages/transform.new/src/datatypes/RelayCheckInfo/common/NocapRelayCheckInfoDataSample.ts b/packages/transform.new/src/datatypes/RelayCheckInfo/common/NocapRelayCheckInfoDataSample.ts
new file mode 100644
index 00000000..b078fafc
--- /dev/null
+++ b/packages/transform.new/src/datatypes/RelayCheckInfo/common/NocapRelayCheckInfoDataSample.ts
@@ -0,0 +1,43 @@
+export default {
+ url: 'wss://example.com',
+ network: 'clearnet',
+ adapters: ['InfoAdapterDefault'],
+ checked_at: 1702054745791,
+ checked_by: 'testUser',
+ info: {
+ status: 'success',
+ data: {
+ name: 'Example Relay',
+ description: 'A test relay for example purposes.',
+ pubkey: 'abcdef1234567890',
+ contact: 'contact@example.com',
+ supported_nips: [1, 2, 3, 4],
+ retention: [
+ { kinds: [1, 2], time: 72 }
+ ],
+ language_tags: ['en', 'es'],
+ tags: ['tag1', 'tag2'],
+ posting_policy: 'open',
+ relay_countries: ['US', 'CA'],
+ version: '1.0.0',
+ limitation: {
+ max_message_length: 1024,
+ max_subscriptions: 50,
+ max_filters: 25,
+ max_limit: 100,
+ max_subid_length: 32,
+ min_prefix: 2,
+ max_event_tags: 5,
+ max_content_length: 2048,
+ min_pow_difficulty: 0,
+ auth_required: false,
+ payment_required: true
+ },
+ payments_url: 'https://example.com/payments',
+ fees: {
+ admission: [{ amount: 1000, unit: 'msats', period: 30 }]
+ }
+ },
+ duration: 123
+ }
+ };
\ No newline at end of file
diff --git a/packages/transform.new/src/datatypes/RelayCheckInfo/nocap/NocapRelayCheckInfo.test.ts b/packages/transform.new/src/datatypes/RelayCheckInfo/nocap/NocapRelayCheckInfo.test.ts
new file mode 100644
index 00000000..457caefc
--- /dev/null
+++ b/packages/transform.new/src/datatypes/RelayCheckInfo/nocap/NocapRelayCheckInfo.test.ts
@@ -0,0 +1,71 @@
+import { describe, it, expect } from 'vitest';
+import { NocapRelayCheckInfo } from './NocapRelayCheckInfo';
+import data from '../common/NocapRelayCheckInfoDataSample.js'
+
+describe('NocapRelayCheckInfo', () => {
+ it('should correctly initialize with default values', () => {
+ const nocapInfo = new NocapRelayCheckInfo();
+
+ expect(nocapInfo.url).toBe('');
+ expect(nocapInfo.network).toBe('');
+ expect(nocapInfo.adapters).toEqual([]);
+ expect(nocapInfo.checked_at).toBe(-1);
+ expect(nocapInfo.checked_by).toBe('');
+ expect(nocapInfo.info).toBeNull();
+ });
+
+ it('should correctly initialize with provided values', () => {
+ const nocapInfo = new NocapRelayCheckInfo(data);
+
+ expect(nocapInfo.url).toBe(data.url);
+ expect(nocapInfo.network).toBe(data.network);
+ expect(nocapInfo.adapters).toEqual(data.adapters);
+ expect(nocapInfo.checked_at).toBe(data.checked_at);
+ expect(nocapInfo.checked_by).toBe(data.checked_by);
+ expect(nocapInfo.info).toEqual(data.info);
+ });
+
+ // ... [previous test setup]
+
+ it('should correctly transform to RdbRelayCheckInfo format', () => {
+
+ const nocapInfo = new NocapRelayCheckInfo(data);
+ const rdbInfo = nocapInfo.toRdb();
+
+ // Verify the instance type
+ expect(rdbInfo).toBeInstanceOf(RdbRelayCheckInfo);
+
+ // Check basic properties
+ expect(rdbInfo.checked_at).toBe(nocapData.checked_at);
+ expect(rdbInfo.checked_by).toBe(nocapData.checked_by);
+ expect(rdbInfo.adapters).toEqual(nocapData.adapters);
+
+ // Verify nested data transformation
+ expect(rdbInfo.data.name).toBe(nocapData.info.data.name);
+ expect(rdbInfo.data.description).toBe(nocapData.info.data.description);
+ expect(rdbInfo.data.pubkey).toBe(nocapData.info.data.pubkey);
+ expect(rdbInfo.data.contact).toBe(nocapData.info.data.contact);
+ expect(rdbInfo.data.software).toBe(nocapData.info.data.software);
+ expect(rdbInfo.data.version).toBe(nocapData.info.data.version);
+ expect(rdbInfo.data.payments_url).toBe(nocapData.info.data.payments_url);
+
+ // Verify limitation mapping
+ expect(rdbInfo.data.limitation.max_message_length).toBe(nocapData.info.data.limitation.max_message_length);
+ expect(rdbInfo.data.limitation.auth_required).toBe(nocapData.info.data.limitation.auth_required);
+ expect(rdbInfo.data.limitation.payment_required).toBe(nocapData.info.data.limitation.payment_required);
+
+ // Verify fees mapping
+ expect(rdbInfo.data.fees.admission).toEqual(nocapData.info.data.fees.admission);
+
+ // Verify supported NIPs mapping
+ expect(rdbInfo.data.supported_nips).toEqual(nocapData.info.data.supported_nips);
+ expect(rdbInfo.data.supported_nip_extensions).toEqual(nocapData.info.data.supported_nip_extensions);
+
+ // Check for dropped fields
+ expect(rdbInfo.dropped_fields).toContain('url');
+ expect(rdbInfo.dropped_fields).toContain('network');
+ expect(rdbInfo.dropped_fields).toContain('info.status');
+ // ... any other dropped fields
+ });
+
+});
diff --git a/packages/transform.new/src/datatypes/RelayCheckInfo/nocap/NocapRelayCheckInfo.ts b/packages/transform.new/src/datatypes/RelayCheckInfo/nocap/NocapRelayCheckInfo.ts
new file mode 100644
index 00000000..4a2fcdc8
--- /dev/null
+++ b/packages/transform.new/src/datatypes/RelayCheckInfo/nocap/NocapRelayCheckInfo.ts
@@ -0,0 +1,59 @@
+import { BaseRelayCheckInfo } from '../common/BaseRelayCheckInfo';
+import { Expose, Type } from 'class-transformer';
+import { RelayCheckInfoTransformers } from '../transformers/RelayCheckInfoTransformers';
+
+
+class InfoDataLimitation {
+ @Expose() auth_required: boolean = false;
+ @Expose() created_at_lower_limit: number = -1;
+ @Expose() created_at_upper_limit: number = -1;
+ @Expose() max_event_tags: number = -1;
+ @Expose() max_limit: number = -1;
+ @Expose() max_message_length: number = -1;
+ @Expose() max_subid_length: number = -1;
+ @Expose() max_subscriptions: number = -1;
+ @Expose() min_pow_difficulty: number = 0;
+ @Expose() payment_required: boolean = false;
+ @Expose() restricted_writes: boolean = false;
+}
+
+class InfoDataFeesAdmission {
+ @Expose() amount: number = -1;
+ @Expose() unit: string = '';
+}
+
+class InfoData {
+ @Expose() contact: string = '';
+ @Expose() description: string = '';
+ @Expose() icon: string = '';
+ @Expose() name: string = '';
+ @Expose() payments_url: string = '';
+ @Expose() pubkey: string = '';
+ @Expose() software: string = '';
+ @Expose() version: string = '';
+
+ @Expose() @Type(() => InfoDataFeesAdmission) fees: InfoDataFeesAdmission[] | null = null;
+ @Expose() @Type(() => InfoDataLimitation) limitation: InfoDataLimitation | null = null;
+ @Expose() @Type(() => Number) supported_nips: number[] | null = null;
+}
+
+class Info {
+ @Expose() status: string = '';
+ @Expose() duration: number = -1;
+ @Expose() @Type(() => InfoData) data: InfoData | null = null;
+}
+
+export class NocapRelayCheckInfo extends BaseRelayCheckInfo {
+ @Expose() @Type(() => Info) info: Info | null = null;
+
+ constructor(plainObject: any) {
+ super();
+ Object.assign(this, NocapRelayCheckInfo.plainToClass(NocapRelayCheckInfo, plainObject));
+ this.dropped_fields = this.getExtraFields(plainObject);
+ }
+
+ toRdb(): object {
+ return RelayCheckInfoTransformers.nocapToRdb(this);
+ }
+
+}
\ No newline at end of file
diff --git a/packages/transform.new/src/datatypes/RelayCheckInfo/rdb/RdbRelayCheckInfo.test.ts b/packages/transform.new/src/datatypes/RelayCheckInfo/rdb/RdbRelayCheckInfo.test.ts
new file mode 100644
index 00000000..e69de29b
diff --git a/packages/transform.new/src/datatypes/RelayCheckInfo/rdb/RdbRelayCheckInfo.ts b/packages/transform.new/src/datatypes/RelayCheckInfo/rdb/RdbRelayCheckInfo.ts
new file mode 100644
index 00000000..5a07fb41
--- /dev/null
+++ b/packages/transform.new/src/datatypes/RelayCheckInfo/rdb/RdbRelayCheckInfo.ts
@@ -0,0 +1,61 @@
+import { Expose, Type } from 'class-transformer';
+import { BaseRelayCheckInfo } from '../common/BaseRelayCheckInfo';
+import { RelayCheckInfoTransformers } from '../transformers/RelayCheckInfoTransformers';
+
+class RdbInfoDataLimitation {
+ @Expose() max_message_length: number = -1;
+ @Expose() max_subscriptions: number = -1;
+ @Expose() max_filters: number = -1;
+ @Expose() max_limit: number = -1;
+ @Expose() max_subid_length: number = -1;
+ @Expose() min_prefix: number = -1;
+ @Expose() max_event_tags: number = -1;
+ @Expose() max_content_length: number = -1;
+ @Expose() min_pow_difficulty: number = -1;
+ @Expose() auth_required: boolean = false;
+ @Expose() payment_required: boolean = false;
+}
+
+class RdbInfoDataFeesAdmission {
+ @Expose() amount: number = -1;
+ @Expose() unit: string = '';
+}
+
+class RdbInfoData {
+ @Expose() name: string = '';
+ @Expose() description: string = '';
+ @Expose() pubkey: string = '';
+ @Expose() contact: string = '';
+ @Expose() software: string = '';
+ @Expose() version: string = '';
+ @Expose() payments_url: string = '';
+ @Expose() @Type(() => Number) supported_nips: number[] | null = null;
+ @Expose() @Type(() => String) supported_nip_extensions: string[] | null = null;
+ @Expose() @Type(() => RdbInfoDataFeesAdmission) fees: { admission: RdbInfoDataFeesAdmission[] } | null = null;
+ @Expose() @Type(() => RdbInfoDataLimitation) limitation: RdbInfoDataLimitation | null = null;
+}
+
+export class RdbRelayCheckInfo extends BaseRelayCheckInfo {
+ @Expose() @Type(() => RdbInfoData) data: RdbInfoData | null = null;
+ private _recordId: string = '';
+
+ constructor(plainObject: any) {
+ super();
+ Object.assign(this, RdbRelayCheckInfo.plainToClass(RdbRelayCheckInfo, plainObject));
+ this._recordId = plainObject['#'] || '';
+ this.dropped_fields = this.getExtraFields(plainObject);
+ }
+
+ // Getter and setter for the record ID
+ get recordId(): string {
+ return this._recordId;
+ }
+
+ set recordId(value: string) {
+ this._recordId = value;
+ }
+
+ toNocap(): object {
+ return RelayCheckInfoTransformers.rdbToNocap(this);
+ }
+}
diff --git a/packages/transform.new/src/datatypes/RelayCheckInfo/transformers/RelayCheckInfoTransformers.ts b/packages/transform.new/src/datatypes/RelayCheckInfo/transformers/RelayCheckInfoTransformers.ts
new file mode 100644
index 00000000..b3a30516
--- /dev/null
+++ b/packages/transform.new/src/datatypes/RelayCheckInfo/transformers/RelayCheckInfoTransformers.ts
@@ -0,0 +1,84 @@
+import objectMapper from 'object-mapper';
+import { NocapRelayCheckInfo } from '../NocapRelayCheckInfo';
+import { RdbRelayCheckInfo } from './RdbRelayCheckInfo';
+
+export class RelayCheckInfoTransformers {
+ static nocapToRdb(nocap: NocapRelayCheckInfo): object {
+ const nocapToRdbMapping = {
+ // Top-level fields mapping
+ "url": "dropped_fields[]",
+ "network": "dropped_fields[]",
+ "adapters": "adapters",
+ "checked_at": "checked_at",
+ "checked_by": "checked_by",
+ "info.status": "dropped_fields[]",
+ "info.duration": "dropped_fields[]",
+
+ // Mapping nested 'info.data' fields
+ "info.data.name": "data.name",
+ "info.data.description": "data.description",
+ "info.data.pubkey": "data.pubkey",
+ "info.data.contact": "data.contact",
+ "info.data.software": "data.software",
+ "info.data.version": "data.version",
+ "info.data.payments_url": "data.payments_url",
+ "info.data.supported_nips": "data.supported_nips",
+ "info.data.supported_nip_extensions": "data.supported_nip_extensions",
+
+ // Mapping 'info.data.limitation' object
+ "info.data.limitation.max_message_length": "data.limitation.max_message_length",
+ "info.data.limitation.max_subscriptions": "data.limitation.max_subscriptions",
+ "info.data.limitation.max_filters": "data.limitation.max_filters",
+ "info.data.limitation.max_limit": "data.limitation.max_limit",
+ "info.data.limitation.max_subid_length": "data.limitation.max_subid_length",
+ "info.data.limitation.min_prefix": "data.limitation.min_prefix",
+ "info.data.limitation.max_event_tags": "data.limitation.max_event_tags",
+ "info.data.limitation.max_content_length": "data.limitation.max_content_length",
+ "info.data.limitation.min_pow_difficulty": "data.limitation.min_pow_difficulty",
+ "info.data.limitation.auth_required": "data.limitation.auth_required",
+ "info.data.limitation.payment_required": "data.limitation.payment_required",
+
+ // Mapping 'info.data.fees' object
+ "info.data.fees.admission": "data.fees.admission"
+ };
+
+ return objectMapper(nocap, nocapToRdbMapping);
+ }
+
+ static rdbToNocap(rdb: RdbRelayCheckInfo): object {
+ const rdbToNocapMapping = {
+ "relay_id": "dropped_fields[]",
+ "checked_at": "checked_at",
+ "checked_by": "checked_by",
+ "adapters": "adapters",
+ "data.name": "info.data.name",
+ "data.description": "info.data.description",
+ "data.pubkey": "info.data.pubkey",
+ "data.contact": "info.data.contact",
+ "data.software": "info.data.software",
+ "data.version": "info.data.version",
+ "data.payments_url": "info.data.payments_url",
+ "data.supported_nips": "info.data.supported_nips",
+ "data.supported_nip_extensions": "info.data.supported_nip_extensions",
+
+ // Mapping 'data.limitation' object
+ "data.limitation.max_message_length": "info.data.limitation.max_message_length",
+ "data.limitation.max_subscriptions": "info.data.limitation.max_subscriptions",
+ "data.limitation.max_filters": "info.data.limitation.max_filters",
+ "data.limitation.max_limit": "info.data.limitation.max_limit",
+ "data.limitation.max_subid_length": "info.data.limitation.max_subid_length",
+ "data.limitation.min_prefix": "info.data.limitation.min_prefix",
+ "data.limitation.max_event_tags": "info.data.limitation.max_event_tags",
+ "data.limitation.max_content_length": "info.data.limitation.max_content_length",
+ "data.limitation.min_pow_difficulty": "info.data.limitation.min_pow_difficulty",
+ "data.limitation.auth_required": "info.data.limitation.auth_required",
+ "data.limitation.payment_required": "info.data.limitation.payment_required",
+
+ // Mapping 'data.fees' object
+ "data.fees.admission": "info.data.fees.admission"
+ };
+
+ return objectMapper(rdb, rdbToNocapMapping);
+ }
+
+}
diff --git a/packages/transform.new/src/datatypes/RelayCheckInfo/transformers/TransformationHelpers.ts b/packages/transform.new/src/datatypes/RelayCheckInfo/transformers/TransformationHelpers.ts
new file mode 100644
index 00000000..8a015ff6
--- /dev/null
+++ b/packages/transform.new/src/datatypes/RelayCheckInfo/transformers/TransformationHelpers.ts
@@ -0,0 +1,12 @@
+import { NocapRelayCheckInfo } from './NocapRelayCheckInfo';
+import { RdbRelayCheckInfo } from './RdbRelayCheckInfo';
+
+export function nocapToRdb(nocap: NocapRelayCheckInfo): RdbRelayCheckInfo {
+ const rdbPlainObject = nocap.toRdb();
+ return NocapRelayCheckInfo.plainToClass(RdbRelayCheckInfo, rdbPlainObject);
+}
+
+export function rdbToNocap(rdb: RdbRelayCheckInfo): NocapRelayCheckInfo {
+ const nocapPlainObject = rdb.toNocap();
+ return RdbRelayCheckInfo.plainToClass(NocapRelayCheckInfo, nocapPlainObject);
+}
\ No newline at end of file
diff --git a/packages/transform.new/src/datatypes/RelayRecord/RelayRecord_rdb.ts b/packages/transform.new/src/datatypes/RelayRecord/RelayRecord_rdb.ts
new file mode 100644
index 00000000..e69de29b
diff --git a/packages/transform/index.js b/packages/transform/index.js
new file mode 100644
index 00000000..fd9f1714
--- /dev/null
+++ b/packages/transform/index.js
@@ -0,0 +1,15 @@
+import { RelayCheckDns as dns } from './src/dns.js'
+import { RelayCheckGeo as geo } from './src/geo.js'
+import { RelayCheckSsl as ssl } from './src/ssl.js'
+import { RelayCheckInfo as info } from './src/info.js'
+import { RelayCheckWebsocket as websocket } from './src/websocket.js'
+import { RelayRecord } from './src/relay.js'
+
+export default {
+ dns,
+ geo,
+ info,
+ ssl,
+ websocket,
+ RelayRecord
+}
\ No newline at end of file
diff --git a/packages/transform/package.json b/packages/transform/package.json
new file mode 100644
index 00000000..bd7de1a5
--- /dev/null
+++ b/packages/transform/package.json
@@ -0,0 +1,7 @@
+{
+ "name": "@nostrwatch/transform",
+ "version": "1.0.0",
+ "type": "module",
+ "main": "index.js",
+ "license": "MIT"
+}
diff --git a/packages/transform/src/dns.js b/packages/transform/src/dns.js
new file mode 100644
index 00000000..5bade1e2
--- /dev/null
+++ b/packages/transform/src/dns.js
@@ -0,0 +1,40 @@
+import { RelayCheck } from './headers.js'
+
+export class RelayCheckDns extends RelayCheck {
+ /**
+ * @type {object} The data structure specific to RelayCheckResultDns
+ */
+ data = {
+ status: '',
+ TC: false,
+ RD: true,
+ RA: true,
+ AD: false,
+ CD: false,
+ Question: [{ name: '', type: -1 }],
+ Answer: [{}]
+ };
+
+ constructor(data) {
+ super(data);
+ if (data?.data) {
+ this.data = { ...this.data, ...data.data };
+ }
+ }
+
+ toNocap() {
+ const nocapResult = this.setHeadersToNocap({});
+ nocapResult.data = this.data;
+
+ return nocapResult;
+ }
+
+ fromNocap(nocapResult) {
+ this.setHeadersFromNocap(nocapResult);
+ if (nocapResult.data) {
+ this.data = { ...this.data, ...nocapResult.data };
+ }
+ this.hashData()
+ return this.toJSON()
+ }
+}
\ No newline at end of file
diff --git a/packages/transform/src/geo.js b/packages/transform/src/geo.js
new file mode 100644
index 00000000..c29c1d1d
--- /dev/null
+++ b/packages/transform/src/geo.js
@@ -0,0 +1,57 @@
+import { RelayCheck } from './headers.js'
+
+export class RelayCheckGeo extends RelayCheck {
+ /**
+ * @type {object} The data structure specific to RelayCheckResultGeo
+ */
+ data = {
+ country: '',
+ countryCode: '',
+ region: '',
+ regionName: '',
+ city: '',
+ zip: '',
+ lat: -1.1,
+ lon: -1.1,
+ timezone: '',
+ isp: '',
+ org: '',
+ as: ''
+ };
+
+ constructor(data) {
+ super(data);
+ if (data?.data) {
+ // Only copy relevant fields, excluding 'status' and 'query'
+ for (const key of Object.keys(this.data)) {
+ if (data.data[key] !== undefined) {
+ this.data[key] = data.data[key];
+ }
+ }
+ }
+ }
+
+ toNocap() {
+ const nocapResult = this.setHeadersToNocap({});
+
+ // Include the relevant fields only, excluding 'status' and 'query'
+ nocapResult.data = { ...this.data };
+
+ return nocapResult;
+ }
+
+ fromNocap(nocapResult) {
+ this.setHeadersFromNocap(nocapResult);
+
+ if (nocapResult.data) {
+ // Update the class data, excluding 'status' and 'query'
+ for (const key of Object.keys(this.data)) {
+ if (nocapResult.data[key] !== undefined) {
+ this.data[key] = nocapResult.data[key];
+ }
+ }
+ }
+ this.hashData()
+ return this.toJSON()
+ }
+}
\ No newline at end of file
diff --git a/packages/transform/src/headers.js b/packages/transform/src/headers.js
new file mode 100644
index 00000000..defb937a
--- /dev/null
+++ b/packages/transform/src/headers.js
@@ -0,0 +1,91 @@
+
+import hash from 'object-hash'
+import { relayId } from '@nostrwatch/utils'
+
+export class RelayCheck {
+ /**
+ * @type {string} Unique identifier for the relay
+ */
+ relay_id = '';
+
+ /**
+ * @type {number} Timestamp when the check was performed
+ */
+ checked_at = -1;
+
+ /**
+ * @type {string} Identifier of the entity that performed the check
+ */
+ checked_by = '';
+
+ /**
+ * @type {Array} List of adapters used
+ */
+ adapters = [];
+
+ /**
+ * @type {Array} List of fields that were dropped in the response
+ */
+ dropped_fields = [];
+
+ /**
+ * @type {number} Total duration of the check
+ */
+ duration = -1;
+
+ constructor() {}
+
+ fromRdb(headers, result) {
+ this.relay_id = headers?.relay_id? headers.relay_id: '';
+ this.checked_at = headers?.checked_at? headers.checked_at: -1;
+ this.checked_by = headers?.checked_by? headers.checked_by: '';
+ this.adapters = headers?.adapters? headers.adapters: [];
+ this.dropped_fields = headers?.dropped_fields? headers.dropped_fields: [];
+ this.duration = headers?.duration? headers.duration: -1;
+ this.data = result?.data? { ...this.data, ...result.data }: {};
+ }
+
+ // fromNocap(){
+ // console.warn(`fromNocap() not implemented in ${this.constructor.name} transformer`)
+ // }
+
+ // Method to set headers from NocapResult to the class
+ setHeadersFromNocap(nocapResult) {
+ this.relay_id = relayId(nocapResult.url);
+ this.checked_at = nocapResult.checked_at;
+ this.checked_by = nocapResult.checked_by;
+ this.adapters = nocapResult.adapters;
+ this.dropped_fields = nocapResult.dropped_fields;
+ this.duration = nocapResult.duration;
+ }
+
+ // Method to set headers from the class to NocapResult
+ setHeadersToNocap(nocapResult={}) {
+ nocapResult.relay_id = this.relay_id;
+ nocapResult.checked_at = this.checked_at;
+ nocapResult.checked_by = this.checked_by;
+ nocapResult.adapters = this.adapters;
+ nocapResult.dropped_fields = this.dropped_fields;
+ nocapResult.duration = this.duration;
+ return nocapResult;
+ }
+
+ detectDroppedFields(nocapResult, definedFields) {
+ const allFields = new Set(Object.keys(nocapResult));
+ definedFields.forEach(field => allFields.delete(field));
+ this.dropped_fields = [...allFields];
+ }
+
+ hashData() {
+ this.hash = hash(this.data)
+ }
+
+ toJson(){
+ const result = {}
+ Object.keys(this).forEach(key => {
+ if(typeof this[key] !== 'function')
+ result[key] = this[key]
+ })
+ return result
+ }
+}
\ No newline at end of file
diff --git a/packages/transform/src/info.js b/packages/transform/src/info.js
new file mode 100644
index 00000000..1d545099
--- /dev/null
+++ b/packages/transform/src/info.js
@@ -0,0 +1,67 @@
+import { RelayCheck } from './headers.js'
+
+export class RelayCheckInfo extends RelayCheck {
+ /**
+ * @type {object} The data structure specific to RelayCheckInfo
+ */
+ data = {
+ description: null,
+ name: null,
+ pubkey: null,
+ software: null,
+ supported_nips: null,
+ retention: null,
+ language_tags: null,
+ tags: null,
+ posting_policy: null,
+ relay_countries: null,
+ version: null,
+ limitation: {
+ max_message_length: null,
+ max_subscriptions: null,
+ max_filters: null,
+ max_limit: null,
+ max_subid_length: null,
+ min_prefix: null,
+ max_content_length: null,
+ max_event_tags: null,
+ min_pow_difficulty: null,
+ auth_required: null,
+ payment_required: null
+ },
+ payments_url: "",
+ fees: {
+ subscription: []
+ }
+ };
+
+ constructor(data, format=`rdb`) {
+ super(data);
+ if (!data?.data) {
+ this.data = {}
+ }
+ }
+
+ toNocap() {
+ const nocapResult = this.setHeadersToNocap({});
+ nocapResult.info.data = this.data;
+
+ return nocapResult;
+ }
+
+ fromNocap(nocapResult) {
+ this.setHeadersFromNocap(nocapResult);
+
+ this.detectDroppedFields(nocapResult, Object.keys(this.data));
+
+ if (nocapResult?.info?.data) {
+ this.data = { ...this.data, ...nocapResult.info.data };
+ }
+
+ // Detect and record dropped fields
+
+
+ this.hashData()
+ return this.toJson()
+ }
+}
\ No newline at end of file
diff --git a/packages/transform/src/relay.js b/packages/transform/src/relay.js
new file mode 100644
index 00000000..c9e02531
--- /dev/null
+++ b/packages/transform/src/relay.js
@@ -0,0 +1,58 @@
+export class RelayRecord {
+ /**
+ * @type {string} Data indicating whether connect operation was successful
+ */
+ url = "";
+
+ /**
+ * @type {string} Data indicating whether read operation was successful
+ */
+ network = "";
+
+ /**
+ * @type {string} Data indicating whether write operation was successful
+ */
+ websocket = "";
+
+ /**
+ * @type {string} Duration of the connect operation
+ */
+ info = "";
+
+ /**
+ * @type {string} Duration of the read operation
+ */
+ geo = "";
+
+ /**
+ * @type {string} Duration of the write operation
+ */
+ dns = "";
+
+ /**
+ * @type {string} Identifier of the entity that checked the data
+ */
+ ssl = "";
+
+ /**
+ * @type {EpochTimeStamp}
+ */
+ first_seen = -1;
+
+ /**
+ * @type {EpochTimeStamp}
+ */
+ last_seen = -1;
+
+ constructor(data) {
+ this.url = data.url || "";
+ this.network = data.network || "";
+ this.websocket = data.websocket || "";
+ this.info = data.info || "";
+ this.geo = data.geo || "";
+ this.dns = data.dns || "";
+ this.ssl = data.ssl || "";
+ this.first_seen = data.first_seen || -1;
+ this.last_seen = data.last_seen || -1;
+ }
+}
\ No newline at end of file
diff --git a/packages/transform/src/ssl.js b/packages/transform/src/ssl.js
new file mode 100644
index 00000000..0be647c4
--- /dev/null
+++ b/packages/transform/src/ssl.js
@@ -0,0 +1,59 @@
+import { RelayCheck } from './headers.js'
+
+export class RelayCheckSsl extends RelayCheck {
+ /**
+ * @type {object} The data structure specific to RelayCheckResultSsl
+ */
+ data = {
+ days_remaining: 57,
+ valid: true,
+ subject: { CN: '' },
+ issuer: {
+ C: '',
+ O: '',
+ CN: ''
+ },
+ subjectaltname: '',
+ infoAccess: {
+ ' ': ['']
+ },
+ ca: false,
+ modulus: '',
+ bits: 2048,
+ exponent: '',
+ pubkey: {},
+ valid_from: '',
+ valid_to: '',
+ fingerprint: '',
+ fingerprint256: '',
+ fingerprint512: '',
+ ext_key_usage: [''],
+ serialNumber: '',
+ raw: Buffer.alloc(0), // assuming Node.js Buffer
+ pemEncoded: ''
+ };
+
+ constructor(headers, result) {
+ super(headers, result);
+
+ }
+
+ toNocap() {
+ const nocapResult = this.setHeadersToNocap();
+ nocapResult.data = this.data;
+ return nocapResult;
+ }
+
+ fromNocap(headers, result) {
+ console.log('WTF!', !result || !result?.data || Object.keys(result.data).length === 0)
+ console.log('HEADER KEYS!', Object.keys(headers))
+ console.log('RESULT KEYS!', Object.keys(result), Object.keys(result.data))
+ this.setHeadersFromNocap(headers);
+ if (!result || !result?.data || Object.keys(result.data).length === 0)
+ this.data = {}
+ else
+ this.data = { ...this.data, ...result.data }
+ this.hashData()
+ return this.toJSON()
+ }
+}
\ No newline at end of file
diff --git a/packages/transform/src/websocket.js b/packages/transform/src/websocket.js
new file mode 100644
index 00000000..507b4a20
--- /dev/null
+++ b/packages/transform/src/websocket.js
@@ -0,0 +1,58 @@
+import { RelayCheck } from './headers.js'
+
+export class RelayCheckWebsocket extends RelayCheck {
+
+ constructor(data) {
+ super(data);
+ /**
+ * @type {boolean|null} Data indicating whether connect operation was successful
+ */
+ this.connect = null;
+
+ /**
+ * @type {boolean|null} Data indicating whether read operation was successful
+ */
+ this.read = null;
+
+ /**
+ * @type {boolean|null} Data indicating whether write operation was successful
+ */
+ this.write = null;
+
+ /**
+ * @type {number} Duration of the connect operation
+ */
+ this.connectDuration = -1;
+
+ /**
+ * @type {number} Duration of the read operation
+ */
+ this.readDuration = -1;
+
+ /**
+ * @type {number} Duration of the write operation
+ */
+ this.writeDuration = -1;
+ }
+
+ toNocap() {
+ const nocapResult = this.setHeadersToNocap({});
+ nocapResult.connect = { data: this.connect, duration: this.connectDuration };
+ nocapResult.read = { data: this.read, duration: this.readDuration };
+ nocapResult.write = { data: this.write, duration: this.writeDuration };
+ return nocapResult;
+ }
+
+ fromNocap(nocapResult) {
+ this.setHeadersFromNocap(nocapResult);
+
+ this.connect = nocapResult.connect ? nocapResult.connect.data : null;
+ this.read = nocapResult.read ? nocapResult.read.data : null;
+ this.write = nocapResult.write ? nocapResult.write.data : null;
+ this.connectDuration = nocapResult.connect ? nocapResult.connect.duration : -1;
+ this.readDuration = nocapResult.read ? nocapResult.read.duration : -1;
+ this.writeDuration = nocapResult.write ? nocapResult.write.duration : -1;
+
+ return this
+ }
+}
\ No newline at end of file
diff --git a/packages/trawler/.env.sample b/packages/trawler/.env.sample
index af27555e..d3a50be6 100644
--- a/packages/trawler/.env.sample
+++ b/packages/trawler/.env.sample
@@ -1,6 +1,10 @@
-CONFIG_PATH="./crawler.config.json"
+# Description: Sample .env file for trawler
-#REDIS
+#PUBLISHER
+DEAMON_PUBKEY=""
+DEAMON_PRIVKEY=""
+
+#QUEUE
REDIS_HOST="localhost"
REDIS_PORT=6379
REDIS_DB=0
diff --git a/packages/trawler/config.yaml b/packages/trawler/config.yaml
new file mode 100644
index 00000000..23adf9a6
--- /dev/null
+++ b/packages/trawler/config.yaml
@@ -0,0 +1,52 @@
+
+ # - static
+ # seed:
+ # - wss://relay.snort.social
+ # - wss://relay.damus.io
+publisher:
+ to_relays:
+ - 'wss://history.nostr.watch'
+trawler:
+ trawl_concurrent_relays: 3
+ relay_list_providers:
+ - wss://purplepag.es
+ check:
+ enabled: true
+ timeout: 1000
+ interval: 10*60*1000
+ max: 500
+ expiry: [
+ { max: 3, delay: 1000 * 60 * 60 * 12},
+ { max: 6, delay: 1000 * 60 * 60 * 24 },
+ { max: 13, delay: 1000 * 60 * 60 * 24 * 7 },
+ { max: 17, delay: 1000 * 60 * 60 * 24 * 28 },
+ { max: 29, delay: 1000 * 60 * 60 * 24 * 90 }
+ ]
+ check_cache: true
+ publish:
+ to_relays:
+ - 'wss://history.nostr.watch'
+ interval: 20*60*1000
+ expiry: 60*60*1000
+ kinds:
+ - 30066
+ # - 10066
+ seed:
+ remotes:
+ rest_api: https://api.nostr.watch/v1
+ sources:
+ - api
+ - nwcache
+ sync:
+ relays:
+ out:
+ queue: true
+ events:
+ 30066:
+ properties:
+ - url
+ in:
+ - api
+ - 'https://api.nostr.watch/v1'
+ - events:
+ - 'self'
\ No newline at end of file
diff --git a/packages/trawler/package.json b/packages/trawler/package.json
index a97094be..30b022cd 100644
--- a/packages/trawler/package.json
+++ b/packages/trawler/package.json
@@ -8,11 +8,6 @@
"node": ">=16.0.0"
},
"dependencies": {
- "@bull-board/api": "5.9.2",
- "@bull-board/fastify": "5.9.2",
- "@nostr-dev-kit/ndk": "2.0.5",
- "@nostr-dev-kit/ndk-cache-redis": "2.0.5",
- "@nostr-fetch/adapter-ndk": "0.13.1",
"@nostr-fetch/adapter-nostr-tools": "0.13.1",
"@types/ioredis": "5.0.0",
"bullmq": "4.13.3",
@@ -22,13 +17,16 @@
"lmdb": "2.8.5",
"logging": "3.3.0",
"murmurhash": "2.0.1",
+ "node-schedule": "2.1.1",
"nostr-fetch": "0.13.1",
"nostr-tools": "1.17.0",
"timestring": "7.0.0",
+ "url-local": "2.0.0",
"websocket-polyfill": "0.0.3",
"ws": "8.14.2"
},
"scripts": {
+ "launch": "node src/index.js",
"test": "echo \"Error: no test specified\" && exit 1"
},
"license": "MIT"
diff --git a/packages/trawler/scripts/gen-keypair-save-to-env.js b/packages/trawler/scripts/gen-keypair-save-to-env.js
new file mode 100644
index 00000000..9c1acb0b
--- /dev/null
+++ b/packages/trawler/scripts/gen-keypair-save-to-env.js
@@ -0,0 +1,10 @@
+/*this should only ever be used for testing!!!!*/
+import { setEnvValue } from '@nostrwatch/utils'
+
+import { generatePrivateKey, getPublicKey } from 'nostr-tools'
+
+const PRIVATE_KEY = generatePrivateKey()
+const PUBLIC_KEY = getPublicKey(PRIVATE_KEY)
+
+setEnvValue('DAEMON_PUBKEY', PUBLIC_KEY)
+setEnvValue('DAEMON_PRIVKEY', PRIVATE_KEY)
\ No newline at end of file
diff --git a/packages/trawler/src/bootstrap.js b/packages/trawler/src/bootstrap.js
index 56e95ce1..57b2fdfe 100644
--- a/packages/trawler/src/bootstrap.js
+++ b/packages/trawler/src/bootstrap.js
@@ -1,5 +1,5 @@
import { getSeedStatic } from "@nostrwatch/seed"
-import lmdb from "./relaydb.js"
+import rcache from "./relaydb.js"
import config from "./config.js"
import Logger from "@nostrwatch/logger"
import { fetch } from "cross-fetch"
@@ -9,22 +9,24 @@ const logger = new Logger('bootstrap')
export const bootstrap = async () => {
let configseed = [],
staticseed = [],
- lmdb = [],
+ cache = [],
api = []
- if(config.seed_sources.includes('config') && config?.seed instanceof Array)
+ if(config.trawler.seed_sources.includes('config') && config?.seed instanceof Array)
configseed = config?.seed
- if(config.seed_sources.includes('static'))
+ if(config.trawler.seed_sources.includes('static'))
staticseed = await relaysFromStaticSeed()
- if(config.seed_sources.includes('lmdb'))
- lmdb = await relaysOnlineFromLmdb()
+ if(config.trawler.seed_sources.includes('cache'))
+ cache = await relaysOnlineFromCache()
- if(config.seed_sources.includes('api'))
+ if(config.trawler.seed_sources.includes('api'))
api = await relaysOnlineFromApi()
- const uniques = new Set([...configseed, ...staticseed, ...lmdb, ...api])
+ const uniques = new Set([...configseed, ...staticseed, ...cache, ...api])
+
+ logger.info(`Bootstrapped ${uniques.size} relays`)
return [...uniques]
}
@@ -39,7 +41,7 @@ export const relaysOnlineFromApi = async () => {
let found = false
logger.debug('api results retrieved.')
return new Promise( resolve => {
- fetch(`${config.remotes.rest_api}/online`, {signal: controller.signal })
+ fetch(`${config.trawler.remotes.rest_api}/online`, {signal: controller.signal })
.then((response) => {
if (!response.ok) {
resolve()
@@ -62,8 +64,6 @@ export const relaysOnlineFromApi = async () => {
relays = response.relays //presumed
}
- logger.info(`api returned ${relays.length} relays`)
-
resolve(relays)
clearTimeout(timeout)
@@ -80,6 +80,6 @@ export const relaysOnlineFromApi = async () => {
})
}
-export const relaysOnlineFromLmdb = async () => {
- return lmdb.relay.get.online()
+export const relaysOnlineFromCache = async () => {
+ return rcache.relay.get.online()
}
\ No newline at end of file
diff --git a/packages/trawler/src/check-cache.js b/packages/trawler/src/check-cache.js
new file mode 100644
index 00000000..30326b6f
--- /dev/null
+++ b/packages/trawler/src/check-cache.js
@@ -0,0 +1,117 @@
+import { Nocap } from '@nostrwatch/nocap'
+import { lastCheckedId } from '@nostrwatch/utils'
+import Logger from '@nostrwatch/logger'
+
+import rcache from './relaydb.js'
+import config from './config.js'
+
+import { retryId } from './utils.js'
+
+const logger = new Logger('check-cache')
+
+export default async () => {
+ const relays = rcache.relay.get.all(['url', 'online'])
+ const uncheckedRelays = getUncheckedRelays(relays)
+ const expiredRelays = await getExpiredRelays(relays)
+
+ const relaysToCheck = [...new Set([ ...uncheckedRelays, ...expiredRelays ])]
+ let onlineRelays = relays.filter( relay => relay.online )
+ const totalRelays = relays.length
+
+ // logger.info(onlineRelays)
+
+ logger.info(`total relays: ${totalRelays}`)
+ logger.info(`online relays: ${onlineRelays.length}`)
+ logger.info(`expired relays: ${expiredRelays.length}`)
+ logger.info(`unchecked relays: ${uncheckedRelays.length}`)
+ logger.info(`relays to check: ${relaysToCheck.length}`)
+
+ await initRetryCount(relays)
+
+ if(relaysToCheck.length === 0) return
+
+ const doTruncate = config?.trawler?.check?.max
+
+ if(config?.trawler?.check?.max && relaysToCheck.length > config.trawler.check.max && typeof config.trawler.check.max === 'number')
+ relaysToCheck.length = parseInt(config.trawler.check.max)
+
+ logger.info(`checkCache(): Quickly filtering through ${uncheckedRelays.length} unchecked,
+ ${expiredRelays.length} expired and a total of ${totalRelays.length} relays before trawling.
+ ${doTruncate? "Since Max value is set, so only filtering "+relaysToCheck.length+" Relays.": ""}
+ There are currently ${onlineRelays.length} relays online according to the cache.
+ `)
+ for await ( const relay of relaysToCheck ) {
+ const { url } = relay
+ let online = false
+ const nocap = new Nocap(url, { timeout: { connect: config?.trawler?.check?.timeout || 500 }})
+ try {
+ await nocap.check('connect').catch()
+ online = nocap.results.get('connect').data? true: false
+ }
+ catch(e) { }
+ await setLastChecked(url)
+ await setRetries(url, online)
+ rcache.relay.patch({ url, online })
+ }
+ onlineRelays = rcache.relay.get.all(['url', 'online']).filter( relay => relay.online )
+ logger.info(`checkCache(): Completed, ${onlineRelays.length} cached relays are online`)
+}
+
+const expiry = (retries) => {
+ if(typeof retries === 'undefined') return 0
+ let map
+ if(config?.trawler?.check?.expiry && config.trawler.check.expiry instanceof Array )
+ map = config.trawler.check.expiry.map( entry => { return { max: entry.max, delay: parseInt(eval(entry.delay)) } } )
+ else
+ map = [
+ { max: 3, delay: 1000 * 60 * 60 },
+ { max: 6, delay: 1000 * 60 * 60 * 24 },
+ { max: 13, delay: 1000 * 60 * 60 * 24 * 7 },
+ { max: 17, delay: 1000 * 60 * 60 * 24 * 28 },
+ { max: 29, delay: 1000 * 60 * 60 * 24 * 90 }
+ ];
+ const found = map.find(entry => retries <= entry.max);
+ return found ? found.delay : map[map.length - 1].delay;
+};
+
+const getUncheckedRelays = (relays=[]) => {
+ let unchecked = relays.filter( relay => relay.online == null )
+ return unchecked?.length? unchecked: []
+}
+
+const setLastChecked = async (url) => {
+ await rcache.cachetime.set( lastCheckedId('online',url), Date.now() )
+}
+
+const initRetryCount = async (relays) => {
+ relays.forEach(async (relay) => {
+ const url = relay.url
+ // logger.info(retryId(url))
+ const retries = rcache.retry.get( retryId(url) )
+ if(typeof retries === 'undefined' || typeof retries === null)
+ await rcache.retry.set(retryId(url), 0)
+ })
+}
+
+const getExpiredRelays = async (relays=[]) => {
+ const relayStatuses = await Promise.all(relays.map(async relay => {
+ const url = relay.url;
+ const lastChecked = await rcache.cachetime.get.one( lastCheckedId('online',url) );
+ if (!lastChecked) return { relay, isExpired: true };
+ const retries = await rcache.retry.get(retryId(url));
+ const isExpired = lastChecked < Date.now() - expiry(retries);
+ return { relay, isExpired };
+ }));
+ return relayStatuses.filter(r => r.isExpired).map(r => r.relay);
+}
+
+const setRetries = async ( url, online ) => {
+ let id
+ if(online) {
+ logger.info(`${url} is online`)
+ id = await rcache.retry.set(retryId(url), 0)
+ } else {
+ // logger.info(url, 'is offline')
+ id = await rcache.retry.increment(retryId(url))
+ }
+}
\ No newline at end of file
diff --git a/packages/trawler/src/config.js b/packages/trawler/src/config.js
index cf96d58a..ffba037b 100644
--- a/packages/trawler/src/config.js
+++ b/packages/trawler/src/config.js
@@ -2,6 +2,6 @@ import { loadConfig } from '@nostrwatch/utils'
let config
if(!config)
- config = await loadConfig('crawler')
+ config = await loadConfig('trawler')
export default config
\ No newline at end of file
diff --git a/packages/trawler/src/crawler.js b/packages/trawler/src/crawler.js
deleted file mode 100644
index aad88268..00000000
--- a/packages/trawler/src/crawler.js
+++ /dev/null
@@ -1,110 +0,0 @@
-import "websocket-polyfill";
-import { Blob } from 'buffer';
-
-import { NostrFetcher } from 'nostr-fetch';
-import { SimplePool } from 'nostr-tools';
-import { simplePoolAdapter } from '@nostr-fetch/adapter-nostr-tools'
-
-import rdb from "./relaydb.js"
-import { ResultInterface } from "@nostrwatch/nocap";
-import Logger from "@nostrwatch/logger";
-
-import { parseRelayList } from "./parsers.js";
-import { lastCrawledId, checkOnline } from "./utils.js";
-import { parseRelayNetwork } from "../../utils/index.js"
-
-const logger = new Logger('crawler')
-
-export const crawl = async function($job){
- const relays = $job.data.relays
- const pool = new SimplePool();
- const fetcher = NostrFetcher.withCustomPool(simplePoolAdapter(pool));
- const promises = []
-
- let relaysPersisted = new Set(),
- listCount = 0
-
- relays.forEach( async (relay) => {
- const rlog = new Logger(relay)
-
- const keySize = new Blob([relay]).size
- if(keySize > 1978)
- return logger.error(`relay ${relay} is too large (${relaySize} bytes), skipping`)
-
- promises.push( new Promise( async (resolve) => {
- let lastEvent = 0
- const cacheSince = await rdb.cachetime.get( lastCrawledId(relay) )
- let since = cacheSince?.v || 0
- $job.updateProgress(`${relay} resuming from ${since}`)
-
- try {
- const it = await fetcher.allEventsIterator(
- [ relay ],
- { kinds: [ 2, 10002 ] },
- { since },
- { sort: true }
- )
-
- for await (const ev of it) {
- const timestamp = parseInt(ev.created_at)
- lastEvent = (timestamp>lastEvent? (timestamp>since? timestamp: since): lastEvent)
- if( await rdb.note.exists(ev) ) {
- await rdb.cachetime.set( lastCrawledId(relay), lastEvent )
- continue
- }
-
- let relayList = parseRelayList(ev)
-
- if(!(relayList instanceof Array))
- continue
-
- //prepare relays for rdb
- relayList = relayList.map( relay => {
- const result = {
- url: relay,
- network: parseRelayNetwork(relay),
- status: {
- connect: false,
- read: false,
- write: false
- },
- info: "",
- geo: "",
- dns: "",
- ssl: "",
- checked_at: -1,
- first_seen: -1,
- last_seen: -1
- }
- return result
- })
-
- const listPersisted = await rdb.relay.batch.insertIfNotExists(relayList)
- listPersisted.forEach(relay => relaysPersisted.add(relay))
-
- //store the note
- await rdb.note.set.one(ev)
-
- //increment counter
- listCount++
-
- if(relaysPersisted?.size)
- $job.updateProgress(`${relay}: ${listCount} new lists found, ${relaysPersisted.size} new relays found`)
- }
- }
- catch(err) {
- logger.err(`error crawling ${relay}: ${err}`)
- resolve()
- }
-
- if(lastEvent > 0)
- await rdb.cachetime.set( lastCrawledId(relay), lastEvent )
-
- resolve()
- }))
- })
-
- await Promise.all(promises)
-
- return [...relaysPersisted]
-}
\ No newline at end of file
diff --git a/packages/trawler/src/daemon.js b/packages/trawler/src/daemon.js
index 9e7e10d1..becc6b76 100644
--- a/packages/trawler/src/daemon.js
+++ b/packages/trawler/src/daemon.js
@@ -1,30 +1,79 @@
+import schedule from 'node-schedule'
+import rdb from './relaydb.js'
+import config from "./config.js"
+import checkCache from './check-cache.js'
+import publish from './publish.js'
import Logger from '@nostrwatch/logger'
-
import { configureQueues } from './queue.js'
-import { whenAllQueuesEmpty, whenAnyQueueIsActive } from './utils.js'
+import { bootstrap } from '@nostrwatch/seed'
+// import { whenAllQueuesEmpty, whenAnyQueueIsActive, } from './utils.js'
+import { chunkArray, msToCronTime } from "@nostrwatch/utils"
+
+const {trawlQueue} = await configureQueues()
const logger = new Logger('daemon')
+let busy = false
+
+
+const populateTrawler = async (relays) => {
+ await trawlQueue.pause()
+ const relaysPerChunk = config?.trawl_concurrent_relays || 50;
+ const batches = chunkArray(relays, relaysPerChunk)
+ batches.forEach( (batch, index) => {
+ logger.info(`adding batch ${index} to trawlQueue`)
+ trawlQueue.add(`trawlBatch${index}`, { relays: batch })
+ })
+ await trawlQueue.resume()
+}
+
+const maybeCheckRelays = async () => {
+ const seeded = rdb.relay.count.all() > 0
+ const useCache = config?.trawler?.check?.enabled || false
+ if(!seeded || !useCache || busy === true) return
+ logger.info('maybeCheckRelays(): checking relays, pausing TrawlerQueue')
+ busy = true
+ await trawlQueue.pause()
+ await checkCache()
+ await trawlQueue.resume()
+ busy = false
+ logger.info('maybeCheckRelays(): checked relays, resuming TrawlerQueue')
+}
+
+
+const maybePublishRelays = async () => {
+ const publishingEnabled = config?.trawler?.sync?.relays?.out?.events
+ if(!publishingEnabled || busy === true ) return
+ logger.info('maybePublishRelays(): publishing relays, pausing TrawlerQueue')
+ busy = true
+ await trawlQueue.pause()
+ await publish.all()
+ await trawlQueue.resume()
+ busy = false
+ logger.info('maybePublishRelays(): published relays, resuming TrawlerQueue')
+}
+
+const schedules = () => {
+ const publishEveryMs = config?.trawler?.publish?.interval? parseInt(eval(config.trawler.publish.interval)): 4*60*60*1000
+ schedule.scheduleJob( msToCronTime(publishEveryMs), maybePublishRelays )
+
+ const checkEveryMs = config?.trawler?.check?.interval? parseInt(eval(config.trawler.check.interval)): 12*60*60*1000
+ schedule.scheduleJob( msToCronTime(checkEveryMs), maybeCheckRelays )
+}
+
export default async () => {
return new Promise( async (resolve) => {
- const {batchQueue, crawlQueue, connection:$connection} = await configureQueues()
- const queues = {batchQueue, crawlQueue}
- batchQueue.add('batchRelays', {});
-
- whenAllQueuesEmpty([batchQueue, crawlQueue], () => {
- batchQueue.add('batchRelays', {});
- })
- whenAnyQueueIsActive([batchQueue, crawlQueue], () => {})
-
- const watcher = null
- // const watcher = relayListWatcher({
- // queues: queues,
- // openSignal: whenAllQueuesEmpty,
- // closeSignal: whenAnyQueueIsActive
- // })
-
- resolve({ queues, watcher })
+ schedules()
+ await maybeCheckRelays()
+ await maybePublishRelays()
+ const relays = await bootstrap('trawler')
+ await populateTrawler( relays )
+ resolve({ queues: { trawlQueue }, watcher: null })
})
}
+ // whenAllQueuesEmpty([trawlQueue], () => {
+ // populateTrawler()
+ // })
+ // whenAnyQueueIsActive([trawlQueue], () => {})
\ No newline at end of file
diff --git a/packages/trawler/src/publish.js b/packages/trawler/src/publish.js
new file mode 100644
index 00000000..b418a198
--- /dev/null
+++ b/packages/trawler/src/publish.js
@@ -0,0 +1,62 @@
+import Publish from '@nostrwatch/publisher'
+import rcache from "./relaydb.js"
+import config from "./config.js"
+import { lastPublishedId } from "./utils.js"
+
+const p30066 = new Publish.Kind30066()
+
+const filterRelayProperties = (relay) => {
+ const relay_ = {}
+ const relayProps = config?.trawler?.sync?.relays?.out?.events?.properties
+ if(!(relayProps instanceof Array)) return relay
+ Object.entries( relay ).forEach( entry => {
+ if( relayProps.includes(entry[0]) )
+ relay_[entry[0]] = entry[1]
+ })
+ return relay_
+}
+
+const filterRelaysProperties = (relays) => {
+ return relays.map( filterRelayProperties )
+}
+
+const relayIsExpired = (relay) => {
+ const lastPublished = rcache.cachetime.get.one( lastPublishedId(relay.url) );
+ const expiry = eval(config?.trawler?.publish?.expiry) || 4 * 60 * 60 * 10000;
+ if (!lastPublished) return true;
+ if (lastPublished < new Date() - expiry) return true;
+ return false;
+}
+
+const updatePublishTimes = async (relays=[]) => {
+ for await ( const relay of relays ) {
+ await rcache.cachetime.set( lastPublishedId(relay.url), Date.now() )
+ }
+}
+
+export const publishOne = async (relay) => {
+ relay = filterRelayProperties(relay)
+ if(!relay) throw new Error('publishOne(): relay must be defined')
+ await p30066.one(relay)
+}
+
+export const publishMany = async (relays = []) => {
+ relays = filterRelaysProperties(relays)
+ console.log('before filter', relays.length)
+ const filteredRelays = relays.filter(relayIsExpired);
+ console.log('after filter', filteredRelays.length)
+ if (!filteredRelays.length) return;
+ await p30066.many(filteredRelays);
+ await updatePublishTimes(filteredRelays);
+}
+
+export const publishAll = async () => {
+ const relays = rcache.relay.get.all()
+ await publishMany(relays)
+}
+
+export default {
+ many: publishMany,
+ one: publishOne,
+ all: publishAll
+}
\ No newline at end of file
diff --git a/packages/trawler/src/queue.js b/packages/trawler/src/queue.js
index 5e9e574e..cd1f4912 100644
--- a/packages/trawler/src/queue.js
+++ b/packages/trawler/src/queue.js
@@ -1,99 +1,61 @@
-import { Queue, Worker } from 'bullmq';
+import { Worker } from 'bullmq';
import Redis from 'ioredis';
-import { RedisConnectionDetails } from '@nostrwatch/utils'
-import { bootstrap } from './bootstrap.js'
-import { chunkArray } from './utils.js'
-
-import { crawl } from './crawler.js';
+import { trawl } from './trawler.js';
import Logger from '@nostrwatch/logger'
-import config from './config.js'
-
-const relaysPerChunk = config?.crawl_concurrent_relays || 50;
+import { TrawlQueue } from '@nostrwatch/controlflow'
export const configureQueues = async function(){
const connection = new Redis()
/**********
- * Batcher
+ * Trawler
*/
- const batchLogger = new Logger('batch queue')
+ const trawlLogger = new Logger('trawler queue')
//queue
- const batchQueue = new Queue('batchQueue', { removeOnComplete: true, removeOnFail: true, timeout: 1000*60*10, connection: RedisConnectionDetails()})
- //job
- const batchJob = async (job) => {
- const bootstrapRelays = await bootstrap()
- const batches = chunkArray(bootstrapRelays, relaysPerChunk)
- batches.forEach( (batch, index) => {
- batchLogger.info(`adding batch ${index} to crawlQueue`)
- crawlQueue.add(`crawlBatch${index}`, { relays: batch })
- })
- }
-
- const batchJobCompleted = async (job, returnvalue) => {
- batchLogger.info(`batchJob ${job.id} completed`)
- }
+ const trawler = TrawlQueue({ removeOnComplete: { age: 30*60*1000 }, removeOnFail: { age: 30*60*1000 }, timeout: 1000*60*10 })
- const batchJobFailed = async (job, err) => {
- batchLogger.err(`batchJob ${job.id} failed: ${err}`)
+ const trawlJobProgress = async ($job, progress) => {
+ if(!(progress instanceof Object)) return trawlLogger.warn(`Progress data is not an object, it's a ${typeof progress}`)
+ const { type, source } = progress
+ if(type === 'found'){
+ const { source, listCount, result, relaysPersisted, total } = progress
+ trawlLogger.info(`${source}: ${listCount} lists found, +${result?.length} relays persisted, ${relaysPersisted.size} total found in this chunk. ${total} total relays`)
+ }
+ if(type === 'resuming') {
+ const { since } = progress
+ trawlLogger.info(`${source} resuming from ${since}`)
+ }
}
- const batchWorker = new Worker('batchQueue', batchJob, { concurrency: 1, connection: RedisConnectionDetails(), blockingConnection: true })
- batchWorker.on('completed', batchJobCompleted);
- batchWorker.on('failed', batchJobFailed);
-
- /**********
- * Crawler
- */
-
- const crawlLogger = new Logger('crawler queue')
+ const trawlQueueDrained = () => {}
+
+ trawler.$Queue.drain()
- //queue
-
- const crawlQueueDrained = () => {}
-
- const crawlQueue = new Queue('crawlQueue', { removeOnComplete: true, removeOnFail: true, timeout: 1000*60*10, connection: RedisConnectionDetails() })
- crawlQueue.on('drained', crawlQueueDrained)
-
- //job
- const crawlJob = async ($job) => {
- return crawl($job)
- }
+ trawler.$Queue.on('drained', trawlQueueDrained)
+ // trawler.$QueueEvents.on('progress', trawlJobProgress)
- const crawlJobCompleted = async ($job, foundRelays) => {
- crawlLogger.info(`crawlJob#${$job.id} found ${foundRelays.length} relays}`)
+ const trawlJobCompleted = async ($job, foundRelays) => {
+ trawlLogger.info(`trawlJob#${$job.id} found ${foundRelays.length} relays}`)
}
- const crawlJobFailed = async ($job, err) => {
- crawlLogger.info(`crawlJob ${$job.id} failed: ${err}`)
+ const trawlJobFailed = async ($job, err) => {
+ trawlLogger.warn(`trawlJob ${$job.id} failed: ${err}`)
}
- const crawlJobProgress = async ($job, progress) => {
- crawlLogger.info(progress)
- }
-
- const crawlWorker = new Worker('crawlQueue', crawlJob, { concurrency: 1, connection: RedisConnectionDetails(), maxStalledCount: 1 })
- crawlWorker.on('completed', crawlJobCompleted)
- crawlWorker.on('failed', crawlJobFailed)
- crawlWorker.on('progress', crawlJobProgress)
-
- // await crawlQueue.drain()
-
- // if(config?.debug?.on_launch_drain_all) {
- // await batchQueue.drain()
- // await crawlQueue.drain()
- // }
+ const trawlWorker = new Worker(trawler.$Queue.name, trawl, { concurrency: 1, maxStalledCount: 1 })
+ trawlWorker.on('completed', trawlJobCompleted)
+ trawlWorker.on('failed', trawlJobFailed)
+ trawlWorker.on('progress', trawlJobProgress)
return {
- batchQueue,
- crawlQueue,
- batchWorker,
- crawlWorker,
+ trawlQueue: trawler.$Queue,
+ trawlWorker,
connection
}
}
diff --git a/packages/trawler/src/relaydb.js b/packages/trawler/src/relaydb.js
index bdf612a8..a8beb9a4 100644
--- a/packages/trawler/src/relaydb.js
+++ b/packages/trawler/src/relaydb.js
@@ -1,13 +1,16 @@
-import rdb from '@nostrwatch/relaydb'
-import config from './config.js'
+import rcache from '@nostrwatch/relaycache'
+import config from "./config.js"
-let $rdb
+let $rcache
-if(!config?.lmdb_path)
- throw new Error("No LMDB path specified in config")
+console.log(process.env.PWD, process.cwd())
+console.log('config', config)
-if(!$rdb) {
- $rdb = rdb(config.lmdb_path)
+if(!process.env.NWCACHE_PATH)
+ throw new Error("NWCACHE_PATH, the path to the nostr watch LMDB cache, was not specified in the environment.")
+
+if(!$rcache) {
+ $rcache = rcache(process.env.NWCACHE_PATH)
}
-export default $rdb
\ No newline at end of file
+export default $rcache
\ No newline at end of file
diff --git a/packages/trawler/src/replay.js b/packages/trawler/src/replay.js
new file mode 100644
index 00000000..286a4329
--- /dev/null
+++ b/packages/trawler/src/replay.js
@@ -0,0 +1,11 @@
+import rcache from "./relaydb.js"
+import { relaysFromRelayList } from "./trawler.js"
+
+export const replay = async () => {
+ const notes = await rcache.note.get.allIds()
+ for (const noteid of notes) {
+ const note = await rcache.note.get(noteid)
+ const persistedIds = relaysFromRelayList(note)
+ console.log('ids', persistedIds)x
+ }
+}
\ No newline at end of file
diff --git a/packages/trawler/src/sanitizers.js b/packages/trawler/src/sanitizers.js
index feff4a64..f83d1cf9 100644
--- a/packages/trawler/src/sanitizers.js
+++ b/packages/trawler/src/sanitizers.js
@@ -4,6 +4,8 @@
* @returns Filtered and deduped list of relays
*/
+import isLocal from "url-local"
+
import lmdb from './relaydb.js'
import Logger from '@nostrwatch/logger'
@@ -13,7 +15,7 @@ export const normalizeRelays = (relays) => {
return relays
.map( sanitizeRelayUrl )
.filter( qualifyRelayUrl )
- .reduce ( normalizeRelayUrls, [] )
+ .reduce ( normalizeRelayUrlAcc, [] )
}
export const sanitizeRelayList = (relays) => {
@@ -37,6 +39,9 @@ export const relayAlreadyKnown = async (relay) => {
}
export const qualifyRelayUrl = (relay) => {
+ if(isLocal(relay))
+ return false
+
if( /^(wss:\/\/)(.*)(:\/\/)(.*)$/.test(relay) ) //multiple protocols
return false
@@ -58,7 +63,7 @@ export const qualifyRelayUrl = (relay) => {
return true;
}
-const normalizeRelayUrls = (acc, relay) => {
+const normalizeRelayUrlAcc = (acc, relay) => {
const normalized = normalizeRelayUrl(relay);
if (normalized) {
acc.push(normalized);
@@ -66,12 +71,19 @@ const normalizeRelayUrls = (acc, relay) => {
return acc;
}
+const normalizeRelayUrls = (relays) => {
+ return relays.map( relay => normalizeRelayUrl(relay))
+}
+
const normalizeRelayUrl = (relay) => {
try {
- return new URL(relay).toString()
+ const url = new URL(relay)
+ url.hash = ''
+ return url.toString()
}
catch(e) {
- return
+ logger.warn(`Failed to normalize relay ${relay}`)
+ return ""
}
}
@@ -133,20 +145,16 @@ export const relaysFilterDuplicates = (relays) => {
});
}
-
export const relaysFilterPortDuplicates = (relays) => {
const relaysMap = new Map(relays.map(relay => [new URL(relay).hostname, relay]));
return Array.from(relaysMap.values());
}
-
export const relaysFilterRobotsTxtDisallowed = (relays) => {
const disallowed = cache.get('disallowed') || [];
return relays.filter(relay => !disallowed.includes(relay));
}
-
export const relaysFilterBlocked = (relays) => {
return relays.filter(relay => !BLOCK_HOSTNAMES.some(hostname => relay.includes(hostname)));
-}
-
+}
\ No newline at end of file
diff --git a/packages/trawler/src/sync.js b/packages/trawler/src/sync.js
new file mode 100644
index 00000000..4f78b1ee
--- /dev/null
+++ b/packages/trawler/src/sync.js
@@ -0,0 +1,52 @@
+import { SyncQueue } from "@nostrwatch/controlflow"
+import hash from 'object-hash'
+import config from "./config.js"
+
+const { $Queue:$SyncQueue } = SyncQueue()
+
+export const syncRelayOut = async (data) => {
+ if(config?.trawler?.sync?.relays?.out?.queue){
+ if(data.payload instanceof Array) throw new Error("syncRelayOut(): data.payload must be an object, not an array, otherwise use syncRelaysOut() if trying to sync multiple relays")
+ await $SyncQueue.add('relay-create', data, { priority: 1 /*, jobId: `SyncOut@${process.env.DAEMON_PUBKEY}:${data.url}` */ })
+ }
+}
+
+export const syncRelaysOut = async (data) => {
+ if(config?.trawler?.sync?.relays?.out?.queue){
+ if(!(data.payload instanceof Array)) throw new Error("syncRelaysOut(): data.payload must be an array, not an object, otherwise use syncRelayOut() if trying to sync a single relay")
+ await $SyncQueue.add('relays-create', data, { priority: 1 /*, jobId: `SyncOut@${process.env.DAEMON_PUBKEY}:${hash(data.payload)}` */ })
+ }
+}
+
+export const syncRelayIn = async (data) => {
+ if(config?.trawler?.sync?.relays?.in?.queue){
+ if(data.payload instanceof Array) throw new Error("syncRelayIn(): data.payload must be an object, not an array, otherwise use syncRelaysIn() if trying to sync multiple relays")
+ await $SyncQueue.add('relay-get', data, { priority: 1, jobId: `SyncIn@${process.env.DAEMON_PUBKEY}:${data.url}` })
+ //watch for completed on jobid, populate cache
+ }
+ if(config?.trawler?.sync?.relays?.in?.events){
+ //subscribe to events matching kind/pubkey/tag[d] filter, populate cache
+ }
+}
+
+export const syncRelaysIn = async (data) => {
+ if(config?.trawler?.sync.relays?.in?.queue){
+ if(!(data.payload instanceof Array)) throw new Error("syncRelaysIn(): data.payload must be an array, not an object, otherwise use syncRelayIn() if trying to sync a single relay")
+ await $SyncQueue.add('relays-get', data, { priority: 1, jobId: `SyncIn@${process.env.DAEMON_PUBKEY}:${hash(data.payload)}` })
+ //watch for completed on jobid, populate cache
+ }
+ if(config?.trawler?.sync?.relays?.in?.events){
+ //subscribe to events matching kind/pubkey filter, populate cache
+ }
+}
+
+export default {
+ relay: {
+ out: syncRelayOut,
+ in: syncRelayIn
+ },
+ relays: {
+ out: syncRelaysOut,
+ in: syncRelaysIn
+ }
+}
\ No newline at end of file
diff --git a/packages/trawler/src/trawler.js b/packages/trawler/src/trawler.js
new file mode 100644
index 00000000..76d3f24f
--- /dev/null
+++ b/packages/trawler/src/trawler.js
@@ -0,0 +1,160 @@
+import "websocket-polyfill";
+import { Blob } from 'buffer';
+
+import { NostrFetcher } from 'nostr-fetch';
+import { SimplePool } from 'nostr-tools';
+import { simplePoolAdapter } from '@nostr-fetch/adapter-nostr-tools'
+
+import config from "./config.js"
+import rcache from "./relaydb.js"
+import Logger from "@nostrwatch/logger";
+import sync from "./sync.js"
+
+import { parseRelayList } from "./parsers.js";
+import { lastTrawledId } from "./utils.js";
+import { parseRelayNetwork, relayId } from "@nostrwatch/utils"
+import { SyncQueue, TrawlQueue } from "@nostrwatch/controlflow"
+
+const logger = new Logger('trawler')
+
+const { $Queue:$SyncQueue, $QueueEvents:$SyncEvents } = SyncQueue()
+const { $Queue:$TrawlQueue, $QueueEvents:$TrawlEvents } = TrawlQueue()
+
+let relaysPersisted,
+ listCount
+
+let promises,
+ deferPersist,
+ $currentJob
+
+const addRelaysToCache = async (relayList) => {
+ relayList.forEach(async (relayObj) => {
+ await rcache.relay.insertIfNotExists(relayObj)
+ })
+}
+
+const noteInCache = async (ev, relay, lastEvent) => {
+ const exists = await rcache.note.exists(ev)
+ if( exists )
+ await rcache.cachetime.set( lastTrawledId(relay), lastEvent )
+ return exists
+}
+
+const setLastEvent = (ev, since, lastEvent) => {
+ const timestamp = parseInt(ev.created_at)
+ return timestamp>lastEvent? (timestamp>since? timestamp: since): lastEvent
+}
+
+const determineSince = async (relay) => {
+ const cacheSince = await rcache.cachetime.get.one( lastTrawledId(relay) )
+ return cacheSince || 0
+}
+
+export const relaysFromRelayList = async ( ev ) => {
+ let relayList = parseRelayList(ev)
+
+ if(!(relayList instanceof Array))
+ return false
+
+ relayList = relayList.map( relay => {
+ return {
+ // id: relayId(relay),
+ url: relay,
+ network: parseRelayNetwork(relay),
+ online: null
+ }
+ })
+ return relayList
+}
+
+const trawlJobData = (relayList, roundtrip) => {
+ return {
+ type: 'relay',
+ action: 'create',
+ condition: 'ifNotExists',
+ batch: true,
+ payload: relayList,
+ roundtrip
+ }
+}
+
+export const trawl = async function($job){
+ promises = new Array()
+ deferPersist = new Object()
+ relaysPersisted = new Set()
+ listCount = 0
+ $currentJob = $job
+
+ const relays = $job.data.relays
+ const pool = new SimplePool();
+ const fetcher = NostrFetcher.withCustomPool(simplePoolAdapter(pool));
+
+ relays.forEach( async (relay) => {
+
+ promises.push(new Promise( async (resolve) => {
+
+ let lastEvent = 0
+ let since = await determineSince(relay)
+ $job.updateProgress({ type: 'resuming', source: relay, since })
+ try {
+
+ const it = await fetcher.allEventsIterator(
+ [ relay ],
+ { kinds: [ 2, 10002 ] },
+ { since },
+ { sort: true }
+ )
+ for await (const ev of it) {
+ lastEvent = setLastEvent(ev, since, lastEvent)
+ if( await noteInCache(ev, relay, lastEvent) ) continue
+ const relayList = await relaysFromRelayList(ev)
+ addRelaysToCache(relayList)
+ if(relayList === false) continue
+ deferPersist[ev.id] = async () => await rcache.note.set.one(ev)
+
+ const data = trawlJobData(relayList, {
+ requestedBy: process.env.DAEMON_PUBKEY,
+ source: relay,
+ trawlJobId: $job.id,
+ eventId: ev.id
+ })
+
+ await sync.relays.out(data)
+ // await $SyncQueue.add('relay-create', jobData, { priority: 1 })
+ }
+ }
+ catch(err) {
+ logger.error(`${relay}: ${err}`)
+ }
+ if(lastEvent > 0)
+ await rcache.cachetime.set( lastTrawledId(relay), lastEvent )
+ resolve()
+ }))
+ })
+ await Promise.allSettled(Object.values(promises))
+ return [...relaysPersisted]
+}
+
+const watchQueue = () => {
+ $SyncEvents.on( 'completed', async ({returnvalue}) => {
+ const { result, roundtrip } = returnvalue
+ const { requestedBy, source, trawlJobId, eventId } = roundtrip
+ if(requestedBy != process.env.DAEMON_PUBKEY) return
+ const $trawlJob = await $TrawlQueue.getJob(trawlJobId)
+ if(result === false || result.length == 0) return
+ result.forEach(relay => relaysPersisted.add(relay))
+ listCount++
+ if(result?.length && result.length > 0) {
+ if(deferPersist?.[eventId])
+ await deferPersist[eventId]()
+ if(relaysPersisted?.size && typeof $trawlJob?.updateProgress === 'function')
+ await $currentJob.updateProgress({ type: 'found', source, listCount, result, relaysPersisted, total: rcache.relay.count.all() })
+ }
+ if(deferPersist?.[eventId])
+ delete deferPersist[eventId]
+ })
+}
+
+if(config?.trawler?.sync?.out?.queue)
+ watchQueue()
+
diff --git a/packages/trawler/src/types.js b/packages/trawler/src/types.js
deleted file mode 100644
index c279464c..00000000
--- a/packages/trawler/src/types.js
+++ /dev/null
@@ -1,26 +0,0 @@
-/**
- * @typedef {Object} Relay
- * @property {number} age
- */
-
-/** @type {Relay} */
-const Relay = {
- url: '',
- online: false,
- read: false,
- write: false,
- info: {},
- found: new Date().getTime(),
- last_online: null,
- first_online: new Date().getTime(),
- network: 'clearnet',
-};
-
-
-/**
- * @typedef {Object} RelayInfo
- * @property {number} age
- */
-
-/** @type {RelayInfo} */
-const RelayInfo = {}
\ No newline at end of file
diff --git a/packages/trawler/src/utils.js b/packages/trawler/src/utils.js
index 0274322c..ac3e6043 100644
--- a/packages/trawler/src/utils.js
+++ b/packages/trawler/src/utils.js
@@ -1,26 +1,15 @@
import _timestring from "timestring";
import WebSocket from 'ws';
-export const lastCrawledId = (relay) => `LastCrawled:${relay}`
+// import { env } from '@nostrwatch/utils'
+export const lastTrawledId = (relay) => `LastTrawled:${relay}`
+export const retryId = (relay) => `Trawler:${relay}`
+export const lastPublishedId = (relay) => `LastPublished:${relay}`
export const excludeKnownRelays = (known, discovered) => {
return discovered.filter( relay => !known.includes(relay) )
}
-export const chunkArray = function(arr, chunkSize) {
- if (chunkSize <= 0) {
- throw new Error("Chunk size must be greater than 0.");
- }
-
- const result = [];
- for (let i = 0; i < arr.length; i += chunkSize) {
- const chunk = arr.slice(i, i + chunkSize);
- result.push(chunk);
- }
-
- return result;
-}
-
// Function to check if a single queue is empty
export const isQueueEmpty = async function(queue) {
const counts = await queue.getJobCounts("active");
@@ -34,60 +23,48 @@ export const areAllQueuesEmpty = async function(queues) {
return checks.every(check => check);
}
-// Function to wait until a single queue is empty
-export const whenAllQueuesEmpty = function(queues, callback) {
+export const whenAllQueuesEmpty = function(queues, callback=()=>{}) {
const checkQueues = async () => {
const allEmpty = await areAllQueuesEmpty(queues);
- // console.log('whenAllQueuesEmpty: checking queues', Object.keys(queues), allEmpty)
if (allEmpty) {
- callback(); // Trigger the callback when all queues are empty
+ callback();
}
- setTimeout(checkQueues, 100); // Recheck after a specified interval
+ setTimeout(checkQueues, 100);
};
checkQueues();
};
-
-// Function to check if a single queue has active jobs
export const isQueueActive = async function(queue) {
const counts = await queue.getJobCounts("active");
- console.log('active', counts.active)
+ // console.log('active', counts.active)
return counts.active > 0;
};
-// Function to check if any queue is active
export const isAnyQueueActive = async function(queues) {
const checks = await Promise.all(Object.keys(queues).map((key) => isQueueActive(queues[key])));
return checks.some(check => check);
};
-// Unified function to wait until a queue or any queue in an array is active
-export const whenAnyQueueIsActive = function(input) {
- return new Promise(resolve => {
- const check = async () => {
- let anyActive;
- if (Array.isArray(input))
- anyActive = await isAnyQueueActive(input)
- else
- anyActive = await isQueueActive(input);
- if (anyActive)
- resolve();
- else
- setTimeout(check, 1000); // Check every second, for example
- };
- check();
- });
+export const whenAnyQueueIsActive = function(input, callback=()=>{}) {
+ const check = async () => {
+ let anyActive;
+ if (Array.isArray(input))
+ anyActive = await isAnyQueueActive(input)
+ else
+ anyActive = await isQueueActive(input);
+ if (anyActive)
+ callback();
+ else
+ setTimeout(check, 1000);
+ };
+ check();
};
-
export const countItemsInObjectOfArrays = function(objectOfArrays) {
const counts = {};
-
- // Iterate over each key in the object and count the elements in its array
for (const key in objectOfArrays) {
counts[key] = objectOfArrays[key].length;
}
-
return counts;
}
diff --git a/packages/trawler/src/watcher.js b/packages/trawler/src/watcher.js
index 6872b193..e21b8f3a 100644
--- a/packages/trawler/src/watcher.js
+++ b/packages/trawler/src/watcher.js
@@ -12,7 +12,7 @@ const logger = new Logger('watcher')
export const relayListWatcher = async function(options) {
const { openSignal, closeSignal, queues } = options
- let since = lmdb.cachetime.get('watcherLastUpdate') || 0;
+ let since = lmdb.cachetime.get.one('watcherLastUpdate') || 0;
const relayListProviders = config?.relay_list_providers || [];
if(!relayListProviders.length)
@@ -28,7 +28,7 @@ export const relayListWatcher = async function(options) {
let subscription
let connected = false
- openSignal([queues.batchQueue, queues.crawlQueue], async () => {
+ openSignal([queues.batchQueue, queues.trawlQueue], async () => {
if(!connected) {
connected = true
console.log('open!')
@@ -37,7 +37,7 @@ export const relayListWatcher = async function(options) {
}
})
- closeSignal([queues.batchQueue, queues.crawlQueue], async () => {
+ closeSignal([queues.batchQueue, queues.trawlQueue], async () => {
if(connected) {
connected = false
console.log('close!')
diff --git a/packages/utils/config.js b/packages/utils/config.js
new file mode 100644
index 00000000..acd2ceb3
--- /dev/null
+++ b/packages/utils/config.js
@@ -0,0 +1,48 @@
+import fs from 'fs/promises';
+import path from 'path'
+import yaml from 'js-yaml';
+
+// let config
+
+export const extractConfig = async (caller, provider, warn=true) => {
+ let opts = {}
+ await config
+ if(config?.[caller]?.[provider])
+ opts = config[caller][provider]
+ else
+ if(config?.[provider])
+ opts = config[provider]
+ if(warn && !Object.keys(opts).length === 0)
+ logger.warn(`No ${provider} config specified in 'config.${caller}.${provider}' nor in 'config.${provider}'`)
+ return opts
+}
+
+export const loadConfigSync = function(){
+ try {
+ const config = process.env?.CONFIG_PATH? process.env.CONFIG_PATH: './config.yaml'
+ if(!config)
+ return {}
+ const fileContents = fs.readFileSync(configPath, 'utf8');
+ return yaml.load(fileContents);
+ } catch (e) {
+ console.error(e);
+ return {};
+ }
+}
+
+export const loadConfig = async function (){
+ try {
+ const config = process.env?.CONFIG_PATH? process.env.CONFIG_PATH: './config.yaml'
+ console.log('config path:', config)
+ if(!config)
+ return {}
+ const fileContents = await fs.readFile(config, 'utf8');
+ let data = yaml.load(fileContents);
+ return data;
+ } catch (e) {
+ console.error(e);
+ return {};
+ }
+}
+
+// config = await loadConfig()
\ No newline at end of file
diff --git a/packages/utils/env-tools.js b/packages/utils/env-tools.js
new file mode 100644
index 00000000..57954343
--- /dev/null
+++ b/packages/utils/env-tools.js
@@ -0,0 +1,47 @@
+
+
+import fs from 'fs'
+import os from 'os'
+
+//METHODS BELOW ARE FOR MODIFYING ENV FILES FOR TESTING PURPOSES, SHOULD USE "DOTENV" FOR MOST SITUATIONS!!!!
+const envFilePath = '.env'
+
+// read .env file & convert to array
+export const readEnvVars = () => fs.readFileSync(envFilePath, "utf-8").split(os.EOL);
+
+/**
+ * Finds the key in .env files and returns the corresponding value
+ *
+ * @param {string} key Key to find
+ * @returns {string|null} Value of the key
+ */
+export const getEnvValue = (key) => {
+ // find the line that contains the key (exact match)
+ const matchedLine = readEnvVars().find((line) => line.split("=")[0] === key);
+ // split the line (delimiter is '=') and return the item at index 2
+ return matchedLine !== undefined ? matchedLine.split("=")[1] : null;
+};
+
+/**
+ * Updates value for existing key or creates a new key=value line
+ *
+ * This function is a modified version of https://stackoverflow.com/a/65001580/3153583
+ *
+ * @param {string} key Key to update/insert
+ * @param {string} value Value to update/insert
+ */
+export const setEnvValue = (key, value) => {
+ const envVars = readEnvVars();
+ const targetLine = envVars.find((line) => line.split("=")[0] === key);
+ if (targetLine !== undefined) {
+ // update existing line
+ const targetLineIndex = envVars.indexOf(targetLine);
+ // replace the key/value with the new value
+ envVars.splice(targetLineIndex, 1, `${key}="${value}"`);
+ } else {
+ // create new key value
+ envVars.push(`${key}="${value}"`);
+ }
+ // write everything back to the file system
+ fs.writeFileSync(envFilePath, envVars.join(os.EOL));
+};
\ No newline at end of file
diff --git a/packages/utils/env.js b/packages/utils/env.js
new file mode 100644
index 00000000..e03b4bf9
--- /dev/null
+++ b/packages/utils/env.js
@@ -0,0 +1 @@
+export { process.env }
\ No newline at end of file
diff --git a/packages/utils/geo.js b/packages/utils/geo.js
new file mode 100644
index 00000000..d45efe62
--- /dev/null
+++ b/packages/utils/geo.js
@@ -0,0 +1,43 @@
+const ngeohash = require('ngeohash');
+const iso3166 = require('iso-3166');
+
+export const transformData = (input) => {
+ const transformedData = [];
+
+ // Geohash
+ if (input.lat && input.lon) {
+ const geohash = ngeohash.encode(input.lat, input.lon);
+ transformedData.push([geohash, 'geohash']);
+ }
+
+ // City
+ if (input.city) {
+ transformedData.push([input.city, 'city']);
+ }
+
+ // ISO-3166-2 (region code)
+ if (input.country && input.region) {
+ const iso3166Data = iso3166.country(input.country);
+ if (iso3166Data && iso3166Data.regions) {
+ const regionCode = iso3166Data.regions.find(r => r.name === input.regionName)?.code;
+ if (regionCode) {
+ transformedData.push([regionCode, 'ISO-3166-2']);
+ }
+ }
+ }
+
+ // ISO-3166-1 Alpha-2 (country code)
+ if (input.countryCode) {
+ transformedData.push([input.countryCode, 'ISO-3166-1:Alpha-2']);
+ }
+
+ // Continent
+ if (input.continent) {
+ transformedData.push([input.continent, 'continent']);
+ }
+
+ // Planet - Assuming Earth as there's no specific data for planet
+ transformedData.push(['Earth', 'planet']);
+
+ return transformedData;
+}
diff --git a/packages/utils/index.js b/packages/utils/index.js
index 1fbcb055..4bf3a796 100644
--- a/packages/utils/index.js
+++ b/packages/utils/index.js
@@ -1,9 +1,16 @@
-import fs from 'fs/promises';
-import yaml from 'js-yaml';
import murmurhash from 'murmurhash'
import 'dotenv/config'
import network from './network.js'
+export { getEnvValue, setEnvValue } from './env-tools.js'
+export { loadConfigSync, loadConfig, extractConfig } from './config.js'
+
+// export { env } from './env.js'
+
+let { DAEMON_PUBKEY } = process.env;
+DAEMON_PUBKEY = DAEMON_PUBKEY? DAEMON_PUBKEY : 'WARNING_DAEMON_PUBKEY_UNSET';
+export { DAEMON_PUBKEY }
+
export const parseRelayNetwork = network.parseRelayNetwork
export const relaysSerializedByNetwork = network.relaysSerializedByNetwork
@@ -12,10 +19,13 @@ export const relayId = (relay, schema="Relay") => `${schema}@${hashRelay(relay)}
export const serviceId = (service) => `Service@${service}`
export const cacheTimeId = (key) => `CacheTime@${key}`
export const noteId = (key) => `Note@${key}`
+export const lastCheckedId = (key, relay) => `${DAEMON_PUBKEY}:LastChecked:${key}:${relay}`
export const now = () => new Date().getTime()
export const nowstr = () => Math.round(now()/1000)
+export const delay = async (ms) => new Promise(resolve => setTimeout(resolve, ms))
+
export const devnull = () => {}
export const RedisConnectionDetails = function(){
@@ -24,39 +34,42 @@ export const RedisConnectionDetails = function(){
if(key.startsWith('REDIS_'))
redis[key.replace('REDIS_', '').toLowerCase()] = process.env[key]
})
- console.log(redis)
return redis
}
-export const loadConfigSync = function(key){
- try {
- const configPath = process.env.DOCKER == 'true' ? `/etc/@nostrwatch/${key}/config.yaml` : process.env.CONFIG_PATH
- if(!configPath)
- throw new Error(`No config path set for ${key} config file`)
- const fileContents = fs.readFileSync(configPath, 'utf8');
- const data = yaml.load(fileContents);
- if(data?.[key])
- data = data.package
- return data;
- } catch (e) {
- console.error(e);
- return {};
+export const chunkArray = function(arr, chunkSize) {
+ if (chunkSize <= 0) {
+ throw new Error("Chunk size must be greater than 0.");
+ }
+
+ const result = [];
+ for (let i = 0; i < arr.length; i += chunkSize) {
+ const chunk = arr.slice(i, i + chunkSize);
+ result.push(chunk);
}
+
+ return result;
}
-export const loadConfig = async function (key){
- try {
- if(!process.env?.CONFIG_PATH)
- return {}
- const fileContents = await fs.readFile(process.env.CONFIG_PATH, 'utf8');
- let data = yaml.load(fileContents);
- if (data?.[key]) {
- data = data[key]; // Assuming you want to access 'key' in data
- }
- return data;
- } catch (e) {
- console.error(e);
- return {};
+export const msToCronTime = (milliseconds) => {
+ // Ensure the input is a non-negative integer
+ if (!Number.isInteger(milliseconds) || milliseconds < 0) {
+ return 'Invalid input';
+ }
+
+ // Convert milliseconds to minutes
+ let minutes = Math.ceil(milliseconds / 60000);
+
+ // Convert to hours if it's 60 minutes or more
+ if (minutes >= 60) {
+ let hours = Math.ceil(minutes / 60);
+ return `0 */${hours} * * *`;
+ } else {
+ // For less than 60 minutes, use the minute field in cron
+ return `*/${minutes} * * * *`;
}
}
+export const capitalize = (str) => {
+ return str.charAt(0).toUpperCase()+str.slice(1);
+}
diff --git a/scripts/.gitkeep b/scripts/.gitkeep
new file mode 100644
index 00000000..e69de29b
diff --git a/scripts/squash-all-co-author-notations.sh b/scripts/squash-all-co-author-notations.sh
new file mode 100644
index 00000000..de4ca620
--- /dev/null
+++ b/scripts/squash-all-co-author-notations.sh
@@ -0,0 +1,55 @@
+#!/bin/bash
+
+#https://gist.github.com/dskvr/a8b105aba87eba20a6a3d0228dcfb485
+# Exit immediately if a command exits with a non-zero status.
+set -e
+
+# Variables
+BRANCH_TO_SQUASH="main"
+NEW_BRANCH="legacy-squash"
+DRY_RUN=true # Set to false to perform actual operations
+
+# Checkout the branch
+git checkout $BRANCH_TO_SQUASH
+
+# Fetch all commits and extract authors
+AUTHORS=$(git log --format='%aN <%aE>' | sort | uniq)
+
+# Create the co-authors message
+CO_AUTHORS=""
+while IFS= read -r AUTHOR; do
+ CO_AUTHORS+="Co-authored-by: $AUTHOR"$'\n'
+done < <(echo "$AUTHORS")
+
+# Only check if a new branch exists and create it if not in dry run mode
+if [ "$DRY_RUN" != true ]; then
+ # Check if the new branch already exists
+ if git rev-parse --verify "$NEW_BRANCH" > /dev/null 2>&1; then
+ echo "Branch '$NEW_BRANCH' already exists. Exiting."
+ exit 1
+ fi
+
+ # Create a new branch for the squash
+ git checkout -b "$NEW_BRANCH"
+
+ # Reset to the first commit of the branch
+ FIRST_COMMIT=$(git rev-list --max-parents=0 HEAD)
+
+ git reset "$FIRST_COMMIT"
+
+ # Stage all changes
+ git add -A
+fi
+
+# Commit with all co-authors
+COMMIT_MESSAGE=$'Squashed commit \n\n'"${CO_AUTHORS}"
+
+# Dry run check
+if [ "$DRY_RUN" = true ]; then
+ echo "Dry run is enabled. Commit message would be:"
+ echo -e "$COMMIT_MESSAGE"
+else
+ git commit -m "$COMMIT_MESSAGE"
+ # Show the final commit message for verification
+ git log -1
+fi
\ No newline at end of file
diff --git a/tools/.gitkeep b/tools/.gitkeep
new file mode 100644
index 00000000..e69de29b
diff --git a/tools/relaydb-cli/.editorconfig b/tools/relaydb-cli/.editorconfig
new file mode 100644
index 00000000..1c6314a3
--- /dev/null
+++ b/tools/relaydb-cli/.editorconfig
@@ -0,0 +1,12 @@
+root = true
+
+[*]
+indent_style = tab
+end_of_line = lf
+charset = utf-8
+trim_trailing_whitespace = true
+insert_final_newline = true
+
+[*.yml]
+indent_style = space
+indent_size = 2
diff --git a/tools/relaydb-cli/.gitattributes b/tools/relaydb-cli/.gitattributes
new file mode 100644
index 00000000..6313b56c
--- /dev/null
+++ b/tools/relaydb-cli/.gitattributes
@@ -0,0 +1 @@
+* text=auto eol=lf
diff --git a/tools/relaydb-cli/.gitignore b/tools/relaydb-cli/.gitignore
new file mode 100644
index 00000000..76add878
--- /dev/null
+++ b/tools/relaydb-cli/.gitignore
@@ -0,0 +1,2 @@
+node_modules
+dist
\ No newline at end of file
diff --git a/tools/relaydb-cli/.prettierignore b/tools/relaydb-cli/.prettierignore
new file mode 100644
index 00000000..1521c8b7
--- /dev/null
+++ b/tools/relaydb-cli/.prettierignore
@@ -0,0 +1 @@
+dist
diff --git a/tools/relaydb-cli/lmdb/data.mdb b/tools/relaydb-cli/lmdb/data.mdb
new file mode 100644
index 00000000..d17b06fb
Binary files /dev/null and b/tools/relaydb-cli/lmdb/data.mdb differ
diff --git a/tools/relaydb-cli/lmdb/lock.mdb b/tools/relaydb-cli/lmdb/lock.mdb
new file mode 100644
index 00000000..78ae6735
Binary files /dev/null and b/tools/relaydb-cli/lmdb/lock.mdb differ
diff --git a/tools/relaydb-cli/package.json b/tools/relaydb-cli/package.json
new file mode 100644
index 00000000..27e9c167
--- /dev/null
+++ b/tools/relaydb-cli/package.json
@@ -0,0 +1,61 @@
+{
+ "name": "@nostrwatch/relaydb-cli",
+ "version": "0.0.0",
+ "license": "MIT",
+ "bin": "dist/cli.js",
+ "type": "module",
+ "engines": {
+ "node": ">=16"
+ },
+ "scripts": {
+ "build": "babel --out-dir=dist source",
+ "dev": "babel --out-dir=dist --watch source",
+ "test": "prettier --check . && xo && ava"
+ },
+ "files": [
+ "dist"
+ ],
+ "dependencies": {
+ "@tqman/ink-table": "0.0.0-development",
+ "figlet": "1.7.0",
+ "ink": "^4.1.0",
+ "ink-chart": "0.1.1",
+ "meow": "^11.0.0",
+ "react": "^18.2.0"
+ },
+ "devDependencies": {
+ "@babel/cli": "^7.21.0",
+ "@babel/preset-react": "^7.18.6",
+ "@vdemedes/prettier-config": "^2.0.1",
+ "ava": "^5.2.0",
+ "chalk": "^5.2.0",
+ "eslint-config-xo-react": "^0.27.0",
+ "eslint-plugin-react": "^7.32.2",
+ "eslint-plugin-react-hooks": "^4.6.0",
+ "import-jsx": "^5.0.0",
+ "ink-testing-library": "^3.0.0",
+ "prettier": "^2.8.7",
+ "xo": "^0.53.1"
+ },
+ "ava": {
+ "environmentVariables": {
+ "NODE_NO_WARNINGS": "1"
+ },
+ "nodeArguments": [
+ "--loader=import-jsx"
+ ]
+ },
+ "xo": {
+ "extends": "xo-react",
+ "prettier": true,
+ "rules": {
+ "react/prop-types": "off"
+ }
+ },
+ "prettier": "@vdemedes/prettier-config",
+ "babel": {
+ "presets": [
+ "@babel/preset-react"
+ ]
+ }
+}
diff --git a/tools/relaydb-cli/readme.md b/tools/relaydb-cli/readme.md
new file mode 100644
index 00000000..106b7ad2
--- /dev/null
+++ b/tools/relaydb-cli/readme.md
@@ -0,0 +1,25 @@
+# lmdb-monitor
+
+> This readme is automatically generated by [create-ink-app](https://github.com/vadimdemedes/create-ink-app)
+
+## Install
+
+```bash
+$ npm install --global lmdb-monitor
+```
+
+## CLI
+
+```
+$ lmdb-monitor --help
+
+ Usage
+ $ lmdb-monitor
+
+ Options
+ --name Your name
+
+ Examples
+ $ lmdb-monitor --name=Jane
+ Hello, Jane
+```
diff --git a/tools/relaydb-cli/source/app.js b/tools/relaydb-cli/source/app.js
new file mode 100644
index 00000000..cbab3f15
--- /dev/null
+++ b/tools/relaydb-cli/source/app.js
@@ -0,0 +1,155 @@
+import React, { useState, useEffect } from 'react';
+import { render, Box, Text, useInput } from 'ink';
+// const Table = await import('ink-table').then((module) => module.default);
+import { Table } from '@tqman/ink-table';
+import figlet from 'figlet';
+
+console.clear();
+
+function getRandomElement(arr) {
+ if (!Array.isArray(arr) || arr.length === 0) {
+ return null; // Return null if the input is not an array or is empty
+ }
+ const randomIndex = Math.floor(Math.random() * arr.length);
+ return arr[randomIndex];
+}
+
+
+const Header = () => {
+
+ const fonts = figlet.fontsSync()
+
+ const ascii = figlet.textSync("relaydb", {
+ font: 'ANSI Regular',
+ horizontalLayout: "default",
+ verticalLayout: "default",
+ width: 80,
+ whitespaceBreak: true,
+ })
+
+ return (
+
+ {ascii}
+ Some Static Information
+
+ );
+};
+
+class Relays {
+ constructor(db) {
+ this.db = db
+ this.menu_item = "Relays";
+ this.content = `Loading Stats`;
+ this.tableData = [];
+ this.interval = 2000
+ }
+
+ updateData(){
+ const { Relay } = this.db.schemas
+ // console.log(this.db)
+ this.tableData = []
+ this.tableData.push({status: 'All', count: this.db.relay.count.all(), size: ''})
+ this.tableData.push({ status: 'Online', count: this.db.relay.count.online(), size: ''})
+ // this.tableData.push({status: 'Paid', count: this.db.relay.count.paid(), size: ''})
+ // this.tableData.push({status: 'Public', count: this.db.relay.count.public(), size: ''})
+ this.tableData.push({status: 'Dead', count: this.db.relay.count.dead(), size: ''})
+ this.tableData.push({status: 'Clearnet', count: this.db.relay.count.network('clearnet'), size: ''})
+ this.tableData.push({status: 'Tor', count: this.db.relay.count.network('tor'), size: ''})
+ this.tableData.push({status: 'I2P', count: this.db.relay.count.network('i2p'), size: ''})
+ this.tableData.push({status: 'I2P', count: this.db.relay.count.network('cjdns'), size: ''})
+ }
+
+ // Method to return dynamic content
+ getContent() {
+ return this.tableData
+ }
+}
+
+class Notes {
+ constructor(db) {
+ this.db = db
+ this.menu_item = "Notes:RelayLists";
+ this.content = `Loading Stats`;
+ this.tableData = [];
+ this.interval = 30000
+ }
+
+ updateData(){
+ this.tableData = []
+ this.tableData.push({Stat: 'All', Count: this.db.note.count.all(), Size: ''})
+ }
+
+ // Method to return dynamic content
+ getContent() {
+ return this.tableData
+ }
+}
+
+
+
+
+
+const App = ({ db }) => {
+ const [items, setItems] = useState([]);
+ const [selectedIndex, setSelectedIndex] = useState(0);
+ const [tableData, setTableData] = useState([]);
+
+ useInput((input, key) => {
+ if (key.upArrow) {
+ setSelectedIndex(prevIndex => (prevIndex > 0 ? prevIndex - 1 : 0));
+ } else if (key.downArrow) {
+ setSelectedIndex(prevIndex => (prevIndex < items.length - 1 ? prevIndex + 1 : items.length - 1));
+ }
+ console.log("New Selected Index:", selectedIndex);
+ });
+
+ useEffect(() => {
+ const loadedItems = [
+ new Relays(db),
+ new Notes(db)
+ ];
+ setItems(loadedItems);
+
+ // Function to update content based on the selected item
+ const updateContent = () => {
+ if (loadedItems.length > 0 && loadedItems[selectedIndex]) {
+ loadedItems[selectedIndex].updateData();
+ setTableData(loadedItems[selectedIndex].getContent());
+ }
+ };
+
+ // Set up the interval with the current item's interval time
+ const currentIntervalTime = loadedItems.length > 0 ? loadedItems[selectedIndex].interval : 1000; // default interval time
+ const intervalId = setInterval(updateContent, currentIntervalTime);
+
+ // Clean up function to clear the interval when component unmounts or dependencies change
+ return () => clearInterval(intervalId);
+ }, [selectedIndex, db]);
+
+
+
+ return (
+
+
+
+ {/* Left Column */}
+
+ {items.map((item, index) => (
+
+ {item.menu_item}
+
+ ))}
+
+
+ {/* Right Column */}
+
+
+
+
+
+ );
+};
+
+// render();
+
+export default App
\ No newline at end of file
diff --git a/tools/relaydb-cli/source/cli.js b/tools/relaydb-cli/source/cli.js
new file mode 100644
index 00000000..8d6b2425
--- /dev/null
+++ b/tools/relaydb-cli/source/cli.js
@@ -0,0 +1,24 @@
+#!/usr/bin/env node
+import React from 'react';
+import {render} from 'ink';
+import meow from 'meow';
+import App from './app.js';
+import lmdb from '../../relaydb/index.js'
+
+
+const cli = meow(
+ `
+ Usage
+ $ relaydb --dbpath
+
+ Options
+ --dbpath Path to lmdb directory
+ `,
+ {
+ importMeta: import.meta,
+ },
+);
+
+const db = lmdb(cli.flags.dbpath)
+
+render();
diff --git a/tools/relaydb-cli/test.js b/tools/relaydb-cli/test.js
new file mode 100644
index 00000000..43d87434
--- /dev/null
+++ b/tools/relaydb-cli/test.js
@@ -0,0 +1,17 @@
+import React from 'react';
+import chalk from 'chalk';
+import test from 'ava';
+import {render} from 'ink-testing-library';
+import App from './source/app.js';
+
+test('greet unknown user', t => {
+ const {lastFrame} = render();
+
+ t.is(lastFrame(), `Hello, ${chalk.green('Stranger')}`);
+});
+
+test('greet user with a name', t => {
+ const {lastFrame} = render();
+
+ t.is(lastFrame(), `Hello, ${chalk.green('Jane')}`);
+});
diff --git a/turbo.json b/turbo.json
new file mode 100644
index 00000000..3746b76c
--- /dev/null
+++ b/turbo.json
@@ -0,0 +1,12 @@
+{
+ "pipeline": {
+ "build": {
+ "dependsOn": ["^build"]
+ },
+ "lint": {},
+ "test": {
+ "dependsOn": ["^build"]
+ }
+ },
+ "$schema": "https://turborepo.org/schema.json"
+}