diff --git a/README.md b/README.md
index 3b11b9b..358f864 100644
--- a/README.md
+++ b/README.md
@@ -20,9 +20,6 @@
🎉 __*新增功能!*__ 基于 Echart 的 OpenBMCLAPI 仪表盘(Dashboard)。
-🎉 __*新增功能!*__ 基于 loguru 的**日志器**。
-
-
# 简介
@@ -109,10 +106,14 @@ web_port: 8800
web_publicport: 8800
```
+# 贡献
+
+如果你有能力,你可以向我们的团队[发送邮件](mailto://administrator@ttb-network.top)或团队所有者[发送邮件](mailto://silian_zheng@outlook.com)并申请加入开发者行列。
+
# 鸣谢
[LiterMC/go-openbmclapi](https://github.com/LiterMC/go-openbmclapi)
[bangbang93/openbmclapi](https://github.com/bangbang93/openbmclapi)
-[SALTWOOD/CSharp-OpenBMCLAPI](https://github.com/SALTWOOD/CSharp-OpenBMCLAPI)
\ No newline at end of file
+[SALTWOOD/CSharp-OpenBMCLAPI](https://github.com/SALTWOOD/CSharp-OpenBMCLAPI)
diff --git a/container/bmclapi_dashboard/static/js/index.js b/container/bmclapi_dashboard/static/js/index.js
index 66190d5..a4df8bb 100644
--- a/container/bmclapi_dashboard/static/js/index.js
+++ b/container/bmclapi_dashboard/static/js/index.js
@@ -1,5 +1,5 @@
const UNIT_BYTES = [
- "", "K", "M", "G", "T", "E"
+ "K", "M", "G", "T", "E"
];
const calc_bits = (v) => {
v *= 8
@@ -112,10 +112,9 @@ const calc_more_bytes = (...values) => {
axios.get("master?url=/openbmclapi/metric/dashboard").then(response => {
if (response.status != 200) return
data = response.data
- console.log(data)
document.getElementById("t-clusters-nodes").innerText = data.currentNodes
document.getElementById("t-clusters-bandwidth").innerText = data.currentBandwidth.toFixed(2) + " M"
- document.getElementById("t-clusters-bytes").innerText = calc_bytes(data.bytes * 1024.0)
+ document.getElementById("t-clusters-bytes").innerText = calc_bytes(data.bytes)
document.getElementById("t-clusters-req").innerText = (data.hits / 10000).toFixed(2)
nodes = []
bytes = []
@@ -203,6 +202,100 @@ const calc_more_bytes = (...values) => {
).childWidth("33.33%", "33.33%", "33.33%").valueOf()
]
}
+ },
+ "dashboard": {
+ "connect": () => {
+ if (!("dashboard" in core_modules_locals)) {
+ core_modules_locals["dashboard"] = {
+ "refresh": () => {
+ axios.get("/dashboard").then(resp => {
+ if (resp.status != 200) return
+ data = resp.data
+ req = Array.from({ length: 24 }, (_, __) => null)
+ hits = Array.from({ length: 24 }, (_, __) => null)
+ bandwidth = Array.from({ length: 24 }, (_, __) => null)
+ bytes = Array.from({ length: 24 }, (_, __) => null)
+ days = data.days[0]
+ for (day of data.days) {
+ if (days._day < day._day)
+ days = day
+ }
+ for (hourly of data.hourly) {
+ const hour = hourly._hour
+ req[hour] = (hourly.qps / 10000).toFixed(2)
+ hits[hour] = (hourly.hits / 10000).toFixed(2)
+ bandwidth[hour] = (hourly.bandwidth * 8 / 1024.0 / 1024.0).toFixed(2)
+ bytes[hour] = (hourly.bytes / 1024.0 / 1024.0 / 1024.0).toFixed(2)
+ }
+ core_modules_locals["dashboard"]["req"] .setOption({title: {text: "每小时请求分布(万)"}, tooltip:{formatter: e => e[0].data == null ? '' : '
'},series: [{data: req}]})
+ core_modules_locals["dashboard"]["bytes"] .setOption({title: {text: "每小时流量分布(GiB)"}, tooltip:{formatter: e => e[0].data == null ? '' : ''},series: [{data: bytes}]})
+ core_modules_locals["dashboard"]["hit"] .setOption({title: {text: "每小时请求文件数(万)"}, tooltip:{formatter: e => e[0].data == null ? '' : ''},series: [{data: hits}]})
+ core_modules_locals["dashboard"]["bandwidth"] .setOption({title: {text: "每小时峰值出网带宽(Mbps)"}, tooltip:{formatter: e => e[0].data == null ? '' : ''},series: [{data: bandwidth}]})
+ document.getElementById("t-d-req").innerText = (days.qps / 10000).toFixed(2)
+ document.getElementById("t-d-bytes").innerText = calc_bytes(days.bytes)
+ document.getElementById("t-d-hit").innerText = (days.hit / 10000).toFixed(2)
+ document.getElementById("t-d-bandwidth").innerText = (days.bandwidth * 8 / 1024.0 / 1024.0).toFixed(2) + " M"
+ })
+ },
+ "bandwidth": echarts.init(document.getElementById("e-d-bandwidth")),
+ "bytes": echarts.init(document.getElementById("e-d-bytes")),
+ "req": echarts.init(document.getElementById("e-d-req")),
+ "hit": echarts.init(document.getElementById("e-d-hit")),
+ "load": echarts.init(document.getElementById("e-d-cpu")),
+ "options": {tooltip:{trigger:"axis",axisPointer:{type:"cross",label:{backgroundColor:"#0FC6C2"}}},grid:{left:"3%",right:"4%",bottom:"3%",containLabel:!0},xAxis:{type:"category",boundaryGap:!1,data:time_hours},yAxis:{type:"value",axisLabel:{formatter:"{value}"}},series:[{name:"",type:"line",stack:"",areaStyle:{},color:"#0FC6C2",symbol:"circle",symbolSize:4,data:[],smooth:!0,animationEasing:"cubicOut",animationDelay:function(t){return 10*t}}]},
+ }
+ core_modules_locals["dashboard"]["bandwidth"].setOption(core_modules_locals["dashboard"]["options"])
+ core_modules_locals["dashboard"]["bytes"] .setOption(core_modules_locals["dashboard"]["options"])
+ core_modules_locals["dashboard"]["req"] .setOption(core_modules_locals["dashboard"]["options"])
+ core_modules_locals["dashboard"]["hit"] .setOption(core_modules_locals["dashboard"]["options"])
+ }
+ core_modules_locals["dashboard"]["timer"] = setInterval(core_modules_locals["dashboard"].refresh, 30000)
+ core_modules_locals["dashboard"].refresh()
+ },
+ "page": () => [
+ ExtendFlex().append(
+ ExtendElement("div").append(
+ ExtendElement("div").css("panel").append(
+ ExtendElement("h4").text("当日出网峰值带宽").valueOf(),
+ ExtendElement("h2").append(
+ ExtendElement("span").text("0 ").id("t-d-bandwidth").valueOf(),
+ ExtendElement("span").text("bps").valueOf()
+ ).valueOf(),
+ ExtendElement("div").id("e-d-bandwidth").style("height: 216px; width: 100%").valueOf()
+ ).valueOf(),
+ ExtendElement("div").css("panel").append(
+ ExtendElement("h4").text("当日请求文件数").valueOf(),
+ ExtendElement("h2").append(
+ ExtendElement("span").text("0 ").id("t-d-hit").valueOf(),
+ ExtendElement("span").text("万").valueOf()
+ ).valueOf(),
+ ExtendElement("div").id("e-d-hit").style("height: 216px; width: 100%").valueOf()
+ ).valueOf(),
+ ),
+ ExtendElement("div").append(
+ ExtendElement("div").css("panel").append(
+ ExtendElement("h4").text("当日总流量").valueOf(),
+ ExtendElement("h2").append(
+ ExtendElement("span").text("0 ").id("t-d-bytes").valueOf(),
+ ExtendElement("span").text("iB").valueOf()
+ ).valueOf(),
+ ExtendElement("div").id("e-d-bytes").style("height: 216px; width: 100%").valueOf()
+ ).valueOf(),
+ ExtendElement("div").css("panel").append(
+ ExtendElement("h4").text("当日请求数").valueOf(),
+ ExtendElement("h2").append(
+ ExtendElement("span").text("0 ").id("t-d-req").valueOf(),
+ ExtendElement("span").text("万").valueOf()
+ ).valueOf(),
+ ExtendElement("div").id("e-d-req").style("height: 216px; width: 100%").valueOf()
+ ).valueOf(),
+ ),
+ ExtendElement("div").css("panel").append(
+ ExtendElement("h4").text("五分钟负载").valueOf(),
+ ExtendElement("div").id("e-d-cpu").style("height: 98%; width: 100%").valueOf()
+ ),
+ ).childWidth("33.33%", "33.33%", "33.33%").valueOf()
+ ]
}
}
const handler = ((root, key, type) => {
diff --git a/container/cluster.py b/container/cluster.py
index b79ea7f..350abf4 100644
--- a/container/cluster.py
+++ b/container/cluster.py
@@ -17,15 +17,15 @@
import utils
import stats
import web
-from logger import logger
+import logger
from tqdm import tqdm
PY_VERSION = "1.0.0"
VERSION = "1.9.7"
UA = f"openbmclapi-cluster/{VERSION} Python/{PY_VERSION}"
URL = 'https://openbmclapi.bangbang93.com/'
-COUNTER = stats.Counters()
-
+COUNTER = stats.counter
+LAST_COUNTER = stats.last_counter
@dataclass
class BMCLAPIFile:
path: str
@@ -127,7 +127,7 @@ async def check_file(self):
total = len(filelist)
byte = 0
miss = []
- pbar = tqdm(total=total, unit=' file(s)', unit_scale=True)
+ pbar = tqdm(file=logger.PRINTSTDOUT, total=total, unit=' file(s)', unit_scale=True)
pbar.set_description("Checking files")
for i, file in enumerate(filelist):
filepath = str(self.dir) + f"/{file.hash[:2]}/{file.hash}"
@@ -214,12 +214,12 @@ async def message(self, type, data):
logger.error("Error:" + data[0]['message'])
Timer.delay(self.enable)
elif type == "keep-alive":
- COUNTER.hit -= self.cur_counter.hit
- COUNTER.bytes -= self.cur_counter.bytes
+ LAST_COUNTER.hit += self.cur_counter.hit
+ LAST_COUNTER.bytes += self.cur_counter.bytes
self.keepalive = Timer.delay(self.keepaliveTimer, (), 5)
async def keepaliveTimer(self):
- self.cur_counter.hit = COUNTER.hit
- self.cur_counter.bytes = COUNTER.bytes
+ self.cur_counter.hit = COUNTER.hit - LAST_COUNTER.hit
+ self.cur_counter.bytes = COUNTER.bytes - LAST_COUNTER.bytes
await self.emit("keep-alive", {
"time": time.time(),
"hits": self.cur_counter.hit,
@@ -272,7 +272,7 @@ async def __call__(self) -> io.BytesIO:
if self.size == stat.st_size and self.last_file == stat.st_mtime:
self.last = time.time() + 1440
return self.buf
- self.buf.seek(0, os.SEEK_SET)
+ self.buf = io.BytesIO()
async with aiofiles.open(self.file, "rb") as r:
while (data := await r.read(min(config.IO_BUFFER, stat.st_size - self.buf.tell()))) and self.buf.tell() < stat.st_size:
self.buf.write(data)
@@ -288,6 +288,8 @@ async def init():
global storage
Timer.delay(storage.check_file)
app = web.app
+ def record_bandwidth(sent: int, recv: int):
+ COUNTER.bandwidth += sent
@app.get("/measure/{size}")
async def _(request: web.Request, size: int, s: str, e: str):
#if not config.SKIP_SIGN:
@@ -302,12 +304,14 @@ async def _(request: web.Request, hash: str, s: str, e: str):
#if not config.SKIP_SIGN:
# check_sign(request.protocol + "://" + request.host + request.path, config.CLUSTER_SECRET, s, e)
file = Path(str(storage.dir) + "/" + hash[:2] + "/" + hash)
+ COUNTER.qps += 1
if not file.exists():
return web.Response(status_code=404)
if hash not in cache:
cache[hash] = FileCache(file)
data = await cache[hash]()
- COUNTER.bytes += len(data.getbuffer())
+ COUNTER.bytes += cache[hash].size
+ request.client.set_log_network(record_bandwidth)
COUNTER.hit += 1
return data.getbuffer()
router: web.Router = web.Router("/bmcl")
@@ -324,6 +328,12 @@ async def _(request: web.Request, url: str):
async with session.get(url) as resp:
content.write(await resp.read())
return content # type: ignore
+ @router.get("/dashboard")
+ async def _():
+ return {
+ "hourly": stats.hourly(),
+ "days": stats.days()
+ }
app.mount(router)
async def clearCache():
diff --git a/container/config.py b/container/config.py
index 72993f5..b226b7d 100644
--- a/container/config.py
+++ b/container/config.py
@@ -11,7 +11,7 @@ def __init__(self, path: str) -> None:
def load(self):
with open(self.file, "r", encoding="utf-8") as f:
- self.cfg = yaml.load(f.read(), Loader=yaml.FullLoader)
+ self.cfg = yaml.load(f.read(), Loader=yaml.FullLoader) or {}
def get(self, key, default_):
value = self.cfg.get(key, default_)
diff --git a/container/logger.py b/container/logger.py
index 6113c76..2f3c09f 100644
--- a/container/logger.py
+++ b/container/logger.py
@@ -1,4 +1,47 @@
-from loguru import logger
-from pathlib import Path
+from enum import Enum
+import inspect
+import io
+import sys
+import time
+STDOUT = sys.stdout
+class Stdout(io.StringIO):
+ def write(self, __s: str) -> int:
+ return STDOUT.write(__s)
+ def flush(self) -> None:
+ return STDOUT.flush()
+ def seek(self, __cookie: int, __whence: int = 0) -> int:
+ return STDOUT.seek(__cookie, __whence)
+sys.stdout = Stdout()
-logger.add(Path("./logs/{time}.log"), rotation="3 hours")
+class PrintStdout(io.StringIO):
+ def write(self, __s: str) -> int:
+ info(__s.lstrip("\r"), flush=True)
+ return len(__s)
+PRINTSTDOUT = PrintStdout()
+
+class Level(Enum):
+ DEBUG = 0
+ INFO = 1
+ WARNING = 2
+ ERROR = 3
+LevelColors: dict[Level, str] = {
+ Level.DEBUG: "reset",
+ Level.INFO: "green",
+ Level.WARNING: "yellow",
+ Level.ERROR: "red"
+}
+
+def logger(*values, level: Level, flush: bool = False, stack: list[inspect.FrameInfo]):
+ print(*(f"<<>>[{level.name.upper()}]", *values))
+
+def info(*values, flush: bool = False):
+ return logger(*values, flush=flush, level=Level.INFO, stack=inspect.stack())
+
+def error(*values, flush: bool = False):
+ return logger(*values, flush=flush, level=Level.ERROR, stack=inspect.stack())
+
+def warning(*values, flush: bool = False):
+ return logger(*values, flush=flush, level=Level.WARNING, stack=inspect.stack())
+
+def debug(*values, flush: bool = False):
+ return logger(*values, flush=flush, level=Level.DEBUG, stack=inspect.stack())
\ No newline at end of file
diff --git a/container/main.py b/container/main.py
index cdced88..c527826 100644
--- a/container/main.py
+++ b/container/main.py
@@ -1,3 +1,9 @@
+from datetime import datetime
+import os
+import time
+cur = time.time()
+os.environ["UTC"] = str(int((datetime.fromtimestamp(cur) - datetime.utcfromtimestamp(cur)).total_seconds() / 3600))
+
if __name__ == "__main__":
import web
web.init()
\ No newline at end of file
diff --git a/container/stats.py b/container/stats.py
index d716353..0f744f0 100644
--- a/container/stats.py
+++ b/container/stats.py
@@ -1,30 +1,159 @@
-
-from dataclasses import dataclass
-from pathlib import Path
-
-from utils import FileDataInputStream, FileDataOutputStream
-from timer import Timer # type: ignore
-
-
-@dataclass
-class Counters:
- hit: int = 0
- bytes: int = 0
-
-counter = Counters()
-cache: Path = Path("./cache")
-cache.mkdir(exist_ok=True, parents=True)
-def write():
- with open("./cache/stats_count.bin", "wb") as w:
- f = FileDataOutputStream(w)
- f.writeVarInt(counter.hit)
- f.writeVarInt(counter.bytes)
-
-def read():
- if Path("./cache/stats_count.bin").exists():
- with open("stats_count.bin", "rb") as r:
- f = FileDataInputStream(r)
- counter.hit += f.readVarInt()
- counter.bytes += f.readVarInt()
-
-Timer.repeat(write, (), 0.01, 0.1)
+
+from dataclasses import dataclass, asdict
+import os
+from pathlib import Path
+import sqlite3
+import time
+from typing import Any
+
+from utils import FileDataInputStream, FileDataOutputStream
+from timer import Timer # type: ignore
+
+
+@dataclass
+class Counters:
+ hit: int = 0
+ bytes: int = 0
+ qps: int = 0
+ bandwidth: int = 0
+
+cache: Path = Path("./cache")
+cache.mkdir(exist_ok=True, parents=True)
+
+db: sqlite3.Connection = sqlite3.Connection("./cache/stats.db")
+db.execute("create table if not exists `Stats`(Time numeric not null, hits numeric default 0, bytes numeric default 0, qps numeric default 0, bandwidth numeric default 0)")
+db.commit()
+counter = Counters()
+last_counter = Counters()
+last_time: int = 0
+def write():
+ global counter, last_counter
+ with open("./cache/stats_count.bin", "wb") as w:
+ f = FileDataOutputStream(w)
+ f.writeVarInt(counter.hit)
+ f.writeVarInt(counter.bytes)
+ f.writeVarInt(counter.qps)
+ f.writeVarInt(counter.bandwidth)
+ f.writeVarInt(last_counter.hit)
+ f.writeVarInt(last_counter.bytes)
+
+def read():
+ global counter, last_counter
+ if Path("./cache/stats_count.bin").exists():
+ with open("./cache/stats_count.bin", "rb") as r:
+ hit = 0
+ bytes = 0
+ qps = 0
+ bandwidth = 0
+ last_hit = 0
+ last_bytes = 0
+ try:
+ f = FileDataInputStream(r)
+ hit += f.readVarInt()
+ bytes += f.readVarInt()
+ qps += f.readVarInt()
+ bandwidth += f.readVarInt()
+ last_hit += f.readVarInt()
+ last_bytes += f.readVarInt()
+ counter.hit += hit
+ counter.bytes += bytes
+ counter.qps += qps
+ counter.bandwidth += bandwidth
+ last_counter.hit += last_hit
+ last_counter.bytes += last_bytes
+ except:
+ ...
+
+def execute(cmd: str, *params) -> None:
+ global db
+ db.execute(cmd, params)
+ db.commit()
+
+def executemany(*cmds: tuple[str, tuple[Any, ...]]) -> None:
+ global db
+ for cmd in cmds:
+ db.execute(*cmd)
+ db.commit()
+
+def query(cmd: str, *params) -> list[Any]:
+ global db
+ cur = db.execute(cmd, params)
+ return cur.fetchone() or []
+
+def queryAllData(cmd: str, *params) -> list[tuple]:
+ global db
+ cur = db.execute(cmd, params)
+ return cur.fetchall() or []
+
+def exists(cmd: str, *params) -> bool:
+ return len(query(cmd, *params)) != 0
+
+def columns(table):
+ return [q[0] for q in queryAllData(f'SHOW COLUMNS FROM {table}')]
+
+async def addColumns(table, params, data, default=None):
+ if params not in columns(table):
+ execute(f'ALTER TABLE {table} ADD COLUMN {params} {data}')
+ if default is not None:
+ execute(f'UPDATE {table} SET {params}={default}')
+
+def write_database():
+ global last_time, counter
+ hits = counter.hit
+ bytes = counter.bytes
+ qps = counter.qps
+ bandwidth = counter.bandwidth
+ if hits == bytes == qps == bandwidth == 0:
+ return
+ t = int(time.time() // 3600)
+ if last_time != t and not exists("select `Time` from `Stats` where `Time` = ?", t):
+ execute("insert into `Stats`(`Time`) values (?)", t)
+ executemany(("update `Stats` set `hits` = ?, `bytes` = ?, `qps` = ? where `Time` = ?", (hits, bytes, qps, t)),
+ ("update `Stats` set `bandwidth` = ? where `Time` = ? and `bandwidth` < ?", (bandwidth, t, bandwidth)))
+ counter.bandwidth = 0
+ if last_time != 0 and last_time != t:
+ counter.hit = 0
+ counter.bytes = 0
+ counter.bandwidth = 0
+ counter.qps = 0
+ last_counter.hit = 0
+ last_counter.bytes = 0
+ last_time = t
+
+def hourly():
+ t = int(time.time() // 86400) * 24
+ data = []
+ for r in queryAllData("select `Time`, `hits`, `bytes`, `qps`, `bandwidth` from `Stats` where `Time` >= ?", t):
+ hour = r[0] - t + int(os.environ["UTC"])
+ data.append(
+ {"_hour": hour,
+ "hits": r[1],
+ "bytes": r[2],
+ "qps": r[3],
+ "bandwidth": r[4]
+ }
+ )
+ return data
+
+def days():
+ t = (int(time.time() // 86400) - 30) * 24
+ r = queryAllData("select `Time`, `hits`, `bytes`, `qps`, `bandwidth` from `Stats` where `Time` >= ?", t)
+ data = []
+ days: dict[int, Counters] = {}
+ for r in queryAllData("select `Time`, `hits`, `bytes`, `qps`, `bandwidth` from `Stats` where `Time` >= ?", t):
+ hour = (r[0] - t + int(os.environ["UTC"])) // 24
+ if hour not in days:
+ days[hour] = Counters()
+ days[hour].hit += r[1]
+ days[hour].bytes += r[2]
+ days[hour].qps += r[3]
+ days[hour].bandwidth += r[4]
+ for day in sorted(days.keys()):
+ data.append({
+ "_day": day,
+ **asdict(days[day])
+ })
+ return data
+Timer.repeat(write, (), 0.01, 0.1)
+Timer.repeat(write_database, (), 1, 1)
+read()
\ No newline at end of file
diff --git a/container/timer.py b/container/timer.py
index 4928060..a149cfa 100644
--- a/container/timer.py
+++ b/container/timer.py
@@ -2,7 +2,7 @@
import inspect
import time
import traceback
-from logger import logger
+import logger
class Task:
diff --git a/container/utils.py b/container/utils.py
index 0a15ac1..7ed3ec6 100644
--- a/container/utils.py
+++ b/container/utils.py
@@ -118,7 +118,7 @@ def set_log_network(self, handler):
def record_network(self, sent: int, recv: int):
if not self.log_network:
return
- self.log_network(self, sent, recv)
+ self.log_network(sent, recv)
def parse_obj_as_type(obj: Any, type_: Type[Any]) -> Any:
if obj is None:
diff --git a/container/web.py b/container/web.py
index 4b86270..11d7433 100644
--- a/container/web.py
+++ b/container/web.py
@@ -19,7 +19,7 @@
import config
import filetype
import urllib.parse as urlparse
-from logger import logger
+import logger
class Route:
@@ -494,6 +494,7 @@ async def handle(data, client: Client):
await resp(request, client)
await request.skip()
logger.info(request.get_request_time(), "|", request.method.ljust(6), request.get_status_code(), "|", request.get_ip().ljust(16), "|", request.url, request.get_user_agent())
+ request.client.set_log_network(None)
except TimeoutError:
...
except:
@@ -556,6 +557,7 @@ async def main():
if server:
server.close()
logger.error(traceback.format_exc())
+ await asyncio.sleep(2)
@app.get("/favicon.ico")
async def _():
diff --git a/main.py b/main.py
index c9fb98c..51c7996 100644
--- a/main.py
+++ b/main.py
@@ -1,12 +1,67 @@
+import os
from pathlib import Path
import queue
+import re
import subprocess
import threading
+import traceback
from typing import Optional
import sys
import time
from watchdog.observers import Observer
from watchdog.events import FileSystemEvent, FileSystemEventHandler
+
+class PrintColor:
+ def __init__(self) -> None:
+ self.ansi = (
+ (sys.platform != 'Pocket PC') and
+ (
+ (sys.platform != 'win32') or
+ ('ANSICON' in os.environ)
+ ) and
+ (
+ sys.stdout.isatty() or
+ (sys.platform == 'win32')
+ ) and
+ (
+ 'TERM' not in os.environ or
+ (
+ os.environ['TERM'].lower() in ('xterm', 'linux', 'screen', 'vt100', 'cygwin', 'ansicon') and
+ os.environ['TERM'].lower() not in ('dumb', 'emacs', 'emacs-24', 'xterm-mono')
+ )
+ )
+ )
+ self.colors = {
+ 'reset': '\033[0m',
+ 'red': '\033[31m',
+ 'green': '\033[32m',
+ 'yellow': '\033[33m',
+ 'blue': '\033[34m',
+ 'magenta': '\033[35m',
+ 'cyan': '\033[36m',
+ 'white': '\033[37m',
+ 'black': '\033[30m',
+ }
+ self.open_tag_pattern = r'<(\w+)>'
+ self.close_tag_pattern = r'<(\w+)/>'
+
+ def parse(self, text: str):
+ current_color = self.colors['reset']
+ open_tags = re.findall(self.open_tag_pattern, text)
+ for tag in open_tags:
+ if tag in self.colors:
+ text = text.replace(f'<{tag}>', self.colors[tag], 1)
+ current_color = self.colors[tag]
+ else:
+ text = text.replace(f'<{tag}>', '', 1)
+ close_tags = re.findall(self.close_tag_pattern, text)
+ for tag in close_tags:
+ if tag == tag.lower() and self.colors.get(tag.lower(), '') == current_color:
+ text = text.replace(f'<{tag}/>', self.colors['reset'], 1)
+ current_color = self.colors['reset']
+ return text
+
+printColor = PrintColor()
encoding = sys.getdefaultencoding()
process: Optional[subprocess.Popen] = None
stdout = None
@@ -55,10 +110,25 @@ def _err():
else:
output.put(line)
-def _parse(params):
+def _parse(params: str):
kwargs = {}
- if "flush" in params:
- kwargs["flush"] = True
+ for item in params.split(","):
+ if ':' not in item:
+ continue
+ k, v = item.split(":", 1)
+ if v == "True":
+ v = True
+ elif v == "False":
+ v = False
+ else:
+ try:
+ v = float(v)
+ except:
+ try:
+ v = int(v)
+ except:
+ ...
+ kwargs[k] = v
return kwargs
def _print():
global output, last_output_length, last_flush
@@ -71,22 +141,28 @@ def _print():
msg = msg.decode("gbk")
except:
msg = repr(msg)
- msg = msg.removesuffix("\n")
- date = time.localtime()
- date = f"[{date.tm_year:04d}-{date.tm_mon:02d}-{date.tm_mday:02d} {date.tm_hour:02d}:{date.tm_min:02d}:{date.tm_sec:02d}]"
- kwargs: dict = {}
- flush: bool = False
- if msg.startswith("<<<") and ">>>" in msg:
- kwargs = _parse(msg[3:msg.find(">>>")])
- msg = msg[msg.find(">>>") + 3:]
- flush = kwargs.get("flush", False)
- text = f"{date} {msg}"
- if flush:
- sys.stdout.write('\r' + ' ' * (last_output_length + 16) + '\r')
- sys.stdout.flush()
- last_output_length = len(text)
- print(text + ('\n' if not flush else ''), end='', flush=flush)
- last_flush = flush
+ try:
+ msg = msg.removesuffix("\n")
+ date = time.localtime()
+ kwargs: dict = {}
+ flush: bool = False
+ if msg.startswith("<<<") and ">>>" in msg:
+ kwargs = _parse(msg[3:msg.find(">>>")])
+ msg = msg[msg.find(">>>") + 3:]
+ flush = kwargs.get("flush", False)
+ if 'time' in kwargs:
+ date = time.localtime(kwargs["time"])
+ date = f"[{date.tm_year:04d}-{date.tm_mon:02d}-{date.tm_mday:02d} {date.tm_hour:02d}:{date.tm_min:02d}:{date.tm_sec:02d}]"
+ text = printColor.parse(f"<{kwargs.get('color', 'reset')}>{date} {msg}")
+ if flush:
+ sys.stdout.write('\r' + ' ' * (last_output_length + 16) + '\r')
+ sys.stdout.flush()
+ last_output_length = len(text)
+ print(text + ('\n' if not flush else ''), end='', flush=flush)
+ last_flush = flush
+ except:
+ traceback.print_exc()
+ ...
class MyHandler(FileSystemEventHandler):
def on_any_event(self, event: FileSystemEvent) -> None: