Skip to content

Commit

Permalink
(bug) bug fix on cap (#19)
Browse files Browse the repository at this point in the history
* (bug) bug fix on cap

* (fix) log level

* (fix) revive
  • Loading branch information
kevincobain2000 authored Aug 7, 2024
1 parent 94438a6 commit 4546c27
Showing 1 changed file with 87 additions and 21 deletions.
108 changes: 87 additions & 21 deletions main.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,13 @@ type Flags struct {
dbPath string
post string

min int
every uint64
proxy string
logLevel int
msTeamsHook string
version bool
min int
every uint64
healthCheckEvery uint64
proxy string
logLevel int
msTeamsHook string
version bool
}

var f Flags
Expand All @@ -42,18 +43,6 @@ func main() {
parseProxy()
wantsVersion()
validate()
slog.Info("Flags",
"filePath", f.filePath,
"match", f.match,
"ignore", f.ignore,
"dbPath", f.dbPath,
"min", f.min,
"every", f.every,
"version", f.version,
"loglevel", f.logLevel,
"proxy", f.proxy,
"msTeamsHook", f.msTeamsHook,
)

var err error
newFilePaths, err := pkg.FilesByPattern(f.filePath)
Expand All @@ -66,11 +55,16 @@ func main() {
return
}
if len(newFilePaths) > f.filePathsCap {
slog.Error("Too many files found", "count", len(newFilePaths), "cap", f.filePathsCap)
slog.Warn("Too many files found", "count", len(newFilePaths), "cap", f.filePathsCap)
slog.Info("Capping to", "count", f.filePathsCap)
}

filePaths = newFilePaths[:f.filePathsCap]
capped := f.filePathsCap
if capped > len(newFilePaths) {
capped = len(newFilePaths)
}

filePaths = newFilePaths[:capped]

for _, filePath := range filePaths {
isText, err := pkg.IsTextFile(filePath)
Expand Down Expand Up @@ -100,6 +94,12 @@ func main() {
slog.Error("Error scheduling syncFilePaths", "error", err.Error())
return
}
if f.healthCheckEvery > 0 {
if err := gocron.Every(f.healthCheckEvery).Second().Do(sendHealthCheck); err != nil {
slog.Error("Error scheduling health check", "error", err.Error())
return
}
}
<-gocron.Start()
}
}
Expand Down Expand Up @@ -131,10 +131,75 @@ func syncFilePaths() {
}

filePathsMutex.Lock()
filePaths = newFilePaths[:f.filePathsCap]
capped := f.filePathsCap
if capped > len(newFilePaths) {
capped = len(newFilePaths)
}

filePaths = newFilePaths[:capped]

filePathsMutex.Unlock()
}

func sendHealthCheck() {
if f.msTeamsHook == "" {
return
}
details := []gmt.Details{
{
Label: "Health Check",
Message: "All OK, go-watch-logs is running actively.",
},
{
Label: "Next Ping",
Message: fmt.Sprintf("%d seconds", f.healthCheckEvery),
},
{
Label: "Version",
Message: version,
},
{
Label: "File Path Pattern",
Message: f.filePath,
},
{
Label: "File Path Cap",
Message: fmt.Sprintf("%d", f.filePathsCap),
},
{
Label: "Match Pattern",
Message: f.match,
},
{
Label: "Ignore Pattern",
Message: f.ignore,
},
{
Label: "Min Errors Threshold",
Message: fmt.Sprintf("%d", f.min),
},
{
Label: "Monitoring Every",
Message: fmt.Sprintf("%d", f.every),
},
}
for idx, filePath := range filePaths {
details = append(details, gmt.Details{
Label: fmt.Sprintf("File Path %d", idx+1),
Message: filePath,
})
}

hostname, _ := os.Hostname()

err := gmt.Send(hostname, details, f.msTeamsHook, f.proxy)
if err != nil {
slog.Error("Error sending to Teams", "error", err.Error())
} else {
slog.Info("Successfully sent to MS Teams")
}
}

func validate() {
if f.filePath == "" {
slog.Error("file-path is required")
Expand Down Expand Up @@ -230,6 +295,7 @@ func flags() {
flag.StringVar(&f.ignore, "ignore", "", "regex for ignoring errors (empty to ignore none)")
flag.StringVar(&f.post, "post", "", "run this shell command after every scan")
flag.Uint64Var(&f.every, "every", 0, "run every n seconds (0 to run once)")
flag.Uint64Var(&f.healthCheckEvery, "health-check-every", 86400, "run health check every n seconds (0 to disable)")
flag.IntVar(&f.logLevel, "log-level", 0, "log level (0=info, 1=debug)")
flag.IntVar(&f.filePathsCap, "file-paths-cap", 100, "max number of file paths to watch")
flag.IntVar(&f.min, "min", 1, "on minimum num of matches, it should notify")
Expand Down

0 comments on commit 4546c27

Please sign in to comment.