@@ -22,6 +22,7 @@ import (
22
22
"errors"
23
23
"fmt"
24
24
"io"
25
+ "log/slog"
25
26
"net"
26
27
"net/http"
27
28
"net/url"
@@ -32,13 +33,11 @@ import (
32
33
"github.com/Showmax/go-fqdn"
33
34
"github.com/alecthomas/kingpin/v2"
34
35
"github.com/cenkalti/backoff/v4"
35
- "github.com/go-kit/log"
36
- "github.com/go-kit/log/level"
37
36
"github.com/prometheus-community/pushprox/util"
38
37
"github.com/prometheus/client_golang/prometheus"
39
38
"github.com/prometheus/client_golang/prometheus/promhttp"
40
- "github.com/prometheus/common/promlog "
41
- "github.com/prometheus/common/promlog /flag"
39
+ "github.com/prometheus/common/promslog "
40
+ "github.com/prometheus/common/promslog /flag"
42
41
)
43
42
44
43
var (
@@ -89,11 +88,11 @@ func newBackOffFromFlags() backoff.BackOff {
89
88
90
89
// Coordinator for scrape requests and responses
91
90
type Coordinator struct {
92
- logger log .Logger
91
+ logger * slog .Logger
93
92
}
94
93
95
94
func (c * Coordinator ) handleErr (request * http.Request , client * http.Client , err error ) {
96
- level . Error ( c .logger ). Log ( "err " , err )
95
+ c .logger . Error ( "coordinator error" , "error " , err )
97
96
scrapeErrorCounter .Inc ()
98
97
resp := & http.Response {
99
98
StatusCode : http .StatusInternalServerError ,
@@ -102,14 +101,14 @@ func (c *Coordinator) handleErr(request *http.Request, client *http.Client, err
102
101
}
103
102
if err = c .doPush (resp , request , client ); err != nil {
104
103
pushErrorCounter .Inc ()
105
- level . Warn ( c .logger ). Log ( "msg" , "Failed to push failed scrape response:" , "err" , err )
104
+ c .logger . Warn ( "Failed to push failed scrape response:" , "err" , err )
106
105
return
107
106
}
108
- level . Info ( c .logger ). Log ( "msg" , "Pushed failed scrape response" )
107
+ c .logger . Info ( "Pushed failed scrape response" )
109
108
}
110
109
111
110
func (c * Coordinator ) doScrape (request * http.Request , client * http.Client ) {
112
- logger := log . With ( c .logger , "scrape_id" , request .Header .Get ("id" ))
111
+ logger := c .logger . With ( "scrape_id" , request .Header .Get ("id" ))
113
112
timeout , err := util .GetHeaderTimeout (request .Header )
114
113
if err != nil {
115
114
c .handleErr (request , client , err )
@@ -137,13 +136,13 @@ func (c *Coordinator) doScrape(request *http.Request, client *http.Client) {
137
136
c .handleErr (request , client , fmt .Errorf ("failed to scrape %s: %w" , request .URL .String (), err ))
138
137
return
139
138
}
140
- level .Info (logger ). Log ( "msg" , "Retrieved scrape response" )
139
+ logger .Info ("Retrieved scrape response" )
141
140
if err = c .doPush (scrapeResp , request , client ); err != nil {
142
141
pushErrorCounter .Inc ()
143
- level .Warn (logger ). Log ( "msg" , "Failed to push scrape response:" , "err" , err )
142
+ logger .Warn ("Failed to push scrape response:" , "err" , err )
144
143
return
145
144
}
146
- level .Info (logger ). Log ( "msg" , "Pushed scrape result" )
145
+ logger .Info ("Pushed scrape result" )
147
146
}
148
147
149
148
// Report the result of the scrape back up to the proxy.
@@ -182,28 +181,28 @@ func (c *Coordinator) doPush(resp *http.Response, origRequest *http.Request, cli
182
181
func (c * Coordinator ) doPoll (client * http.Client ) error {
183
182
base , err := url .Parse (* proxyURL )
184
183
if err != nil {
185
- level . Error ( c .logger ). Log ( "msg" , "Error parsing url:" , "err" , err )
184
+ c .logger . Error ( "Error parsing url:" , "err" , err )
186
185
return fmt .Errorf ("error parsing url: %w" , err )
187
186
}
188
187
u , err := url .Parse ("poll" )
189
188
if err != nil {
190
- level . Error ( c .logger ). Log ( "msg" , "Error parsing url:" , "err" , err )
189
+ c .logger . Error ( "Error parsing url:" , "err" , err )
191
190
return fmt .Errorf ("error parsing url poll: %w" , err )
192
191
}
193
192
url := base .ResolveReference (u )
194
193
resp , err := client .Post (url .String (), "" , strings .NewReader (* myFqdn ))
195
194
if err != nil {
196
- level . Error ( c .logger ). Log ( "msg" , "Error polling:" , "err" , err )
195
+ c .logger . Error ( "Error polling:" , "err" , err )
197
196
return fmt .Errorf ("error polling: %w" , err )
198
197
}
199
198
defer resp .Body .Close ()
200
199
201
200
request , err := http .ReadRequest (bufio .NewReader (resp .Body ))
202
201
if err != nil {
203
- level . Error ( c .logger ). Log ( "msg" , "Error reading request:" , "err" , err )
202
+ c .logger . Error ( "Error reading request:" , "err" , err )
204
203
return fmt .Errorf ("error reading request: %w" , err )
205
204
}
206
- level . Info ( c .logger ). Log ( "msg" , "Got scrape request" , "scrape_id" , request .Header .Get ("id" ), "url" , request .URL )
205
+ c .logger . Info ( "Got scrape request" , "scrape_id" , request .Header .Get ("id" ), "url" , request .URL )
207
206
208
207
request .RequestURI = ""
209
208
@@ -221,32 +220,32 @@ func (c *Coordinator) loop(bo backoff.BackOff, client *http.Client) {
221
220
if err := backoff .RetryNotify (op , bo , func (err error , _ time.Duration ) {
222
221
pollErrorCounter .Inc ()
223
222
}); err != nil {
224
- level . Error ( c .logger ). Log ( "err " , err )
223
+ c .logger . Error ( "backoff returned error" , "error " , err )
225
224
}
226
225
}
227
226
}
228
227
229
228
func main () {
230
- promlogConfig := promlog .Config {}
231
- flag .AddFlags (kingpin .CommandLine , & promlogConfig )
229
+ promslogConfig := promslog .Config {}
230
+ flag .AddFlags (kingpin .CommandLine , & promslogConfig )
232
231
kingpin .HelpFlag .Short ('h' )
233
232
kingpin .Parse ()
234
- logger := promlog .New (& promlogConfig )
233
+ logger := promslog .New (& promslogConfig )
235
234
coordinator := Coordinator {logger : logger }
236
235
237
236
if * proxyURL == "" {
238
- level . Error ( coordinator .logger ). Log ( "msg" , "--proxy-url flag must be specified." )
237
+ coordinator .logger . Error ( "--proxy-url flag must be specified." )
239
238
os .Exit (1 )
240
239
}
241
240
// Make sure proxyURL ends with a single '/'
242
241
* proxyURL = strings .TrimRight (* proxyURL , "/" ) + "/"
243
- level . Info ( coordinator .logger ). Log ( "msg" , "URL and FQDN info" , "proxy_url" , * proxyURL , "fqdn" , * myFqdn )
242
+ coordinator .logger . Info ( "URL and FQDN info" , "proxy_url" , * proxyURL , "fqdn" , * myFqdn )
244
243
245
244
tlsConfig := & tls.Config {}
246
245
if * tlsCert != "" {
247
246
cert , err := tls .LoadX509KeyPair (* tlsCert , * tlsKey )
248
247
if err != nil {
249
- level . Error ( coordinator .logger ). Log ( "msg" , "Certificate or Key is invalid" , "err" , err )
248
+ coordinator .logger . Error ( "Certificate or Key is invalid" , "err" , err )
250
249
os .Exit (1 )
251
250
}
252
251
@@ -257,12 +256,12 @@ func main() {
257
256
if * caCertFile != "" {
258
257
caCert , err := os .ReadFile (* caCertFile )
259
258
if err != nil {
260
- level . Error ( coordinator .logger ). Log ( "msg" , "Not able to read cacert file" , "err" , err )
259
+ coordinator .logger . Error ( "Not able to read cacert file" , "err" , err )
261
260
os .Exit (1 )
262
261
}
263
262
caCertPool := x509 .NewCertPool ()
264
263
if ok := caCertPool .AppendCertsFromPEM (caCert ); ! ok {
265
- level . Error ( coordinator .logger ). Log ( "msg" , "Failed to use cacert file as ca certificate" )
264
+ coordinator .logger . Error ( "Failed to use cacert file as ca certificate" )
266
265
os .Exit (1 )
267
266
}
268
267
@@ -272,7 +271,7 @@ func main() {
272
271
if * metricsAddr != "" {
273
272
go func () {
274
273
if err := http .ListenAndServe (* metricsAddr , promhttp .Handler ()); err != nil {
275
- level . Warn ( coordinator .logger ). Log ( "msg" , "ListenAndServe" , "err" , err )
274
+ coordinator .logger . Warn ( "ListenAndServe" , "err" , err )
276
275
}
277
276
}()
278
277
}
0 commit comments