@@ -8,39 +8,47 @@ import (
8
8
"io/ioutil"
9
9
"net/http"
10
10
"os"
11
+ "path/filepath"
11
12
"sort"
13
+ "strings"
12
14
"time"
13
15
14
- cm "github.com/cleafy/promqueen/model"
16
+ cm "github.com/Cleafy/promqueen/model"
17
+
15
18
"github.com/mattetti/filebuffer"
16
19
dto "github.com/prometheus/client_model/go"
17
20
"github.com/prometheus/common/expfmt"
18
21
"github.com/prometheus/common/model"
19
22
"github.com/prometheus/prometheus/storage/local"
20
23
"github.com/sirupsen/logrus"
21
24
kingpin "gopkg.in/alecthomas/kingpin.v2"
25
+ pb "gopkg.in/cheggaaa/pb.v2"
22
26
filetype "gopkg.in/h2non/filetype.v1"
23
27
)
24
28
25
29
var replayType = filetype .NewType ("rep" , "application/replay" )
26
30
27
31
func replayMatcher (buf []byte ) bool {
28
32
header , err := cm .ReadFrameHeader (filebuffer .New (buf ))
29
- if err ! = nil {
30
- return false
33
+ if err = = nil {
34
+ return cm . CheckVersion ( header )
31
35
}
32
- return cm .CheckVersion (header )
36
+ logrus .Errorf ("Malformed frame header!" )
37
+ return false
33
38
}
34
39
35
40
var (
36
- debug = kingpin .Flag ("debug" , "Enable debug mode." ).Bool ()
37
- nopromcfg = kingpin .Flag ("nopromcfg" , "Disable the generation of the prometheus cfg file (prometheus.yml)" ).Bool ()
38
- dir = kingpin .Flag ("dir" , "Input directory." ).Short ('d' ).OverrideDefaultFromEnvar ("INPUT_DIRECTORY" ).Default ("." ).String ()
39
- framereader = make (<- chan cm.Frame )
40
- Version = "unversioned"
41
- cfgMemoryStorage = local.MemorySeriesStorageOptions {
42
- MemoryChunks : 1024 ,
43
- MaxChunksToPersist : 1024 ,
41
+ debug = kingpin .Flag ("debug" , "Enable debug mode. More verbose than --verbose" ).Default ("false" ).Bool ()
42
+ verbose = kingpin .Flag ("verbose" , "Enable info-only mode" ).Short ('v' ).Default ("false" ).Bool ()
43
+ nopromcfg = kingpin .Flag ("nopromcfg" , "Disable the generation of the prometheus cfg file (prometheus.yml)" ).Bool ()
44
+ dir = kingpin .Flag ("dir" , "Input directory." ).Short ('d' ).OverrideDefaultFromEnvar ("INPUT_DIRECTORY" ).Default ("." ).String ()
45
+ memoryChunk = kingpin .Flag ("memoryChunk" , "Maximum number of chunks in memory" ).Default ("100000000" ).Int ()
46
+ maxChunkToPersist = kingpin .Flag ("mexChunkToPersist" , "Maximum number of chunks waiting, in memory, to be written on the disk" ).Default ("100000000" ).Int ()
47
+ framereader = make (<- chan cm.Frame )
48
+ Version = "unversioned"
49
+ cfgMemoryStorage = local.MemorySeriesStorageOptions {
50
+ MemoryChunks : 0 ,
51
+ MaxChunksToPersist : 0 ,
44
52
//PersistenceStoragePath:
45
53
//PersistenceRetentionPeriod:
46
54
//CheckpointInterval: time.Minute*30,
@@ -59,44 +67,62 @@ func osfile2fname(fss []os.FileInfo, dir string) []string {
59
67
return out
60
68
}
61
69
62
- func generateFramereader () {
70
+ func generateFramereader () int {
71
+ defer func () {
72
+ if e := recover (); e != nil {
73
+ logrus .Errorf ("Frame reader generation failed!, MESSAGE: %v" , e )
74
+ }
75
+ }()
76
+
77
+ logrus .Infoln ("Preliminary file read started..." )
78
+ var count int = 0
63
79
// 1. Check for every file that is GZip or csave format and create the filemap
64
80
files , err := ioutil .ReadDir (* dir )
65
81
if err != nil {
66
- logrus . Fatalf ( "generateFilemap: %v" , err )
82
+ panic ( err )
67
83
}
68
84
readers := make ([]io.Reader , 0 )
69
85
70
86
fnames := osfile2fname (files , * dir )
71
- sort .Sort (cm .ByNumber (fnames ))
87
+ sort .Sort (sort . Reverse ( cm .ByNumber (fnames ) ))
72
88
73
89
logrus .Debugf ("fnames: %v" , fnames )
74
90
75
- for _ , filepath := range fnames {
76
- logrus .Debugf ("filepath: %v" , filepath )
77
- ftype , err := filetype .MatchFile (filepath )
91
+ for _ , path := range fnames {
92
+ logrus .Debugf ("filepath: %v" , path )
93
+ ftype , err := filetype .MatchFile (path )
78
94
if err != nil {
79
95
logrus .Debugf ("err %v" , err )
80
96
}
81
97
if ftype .MIME .Value == "application/replay" {
82
- f , _ := os .Open (filepath )
98
+ f , _ := os .Open (path )
99
+
100
+ count += len (cm .ReadAll (f ).Data )
101
+ f .Seek (0 , 0 )
102
+
83
103
readers = append (readers , f )
84
104
}
85
-
86
105
if ftype .MIME .Value == "application/gzip" {
87
- f , _ := os . Open ( filepath )
88
- logrus . Debugf ( "reading header: %v" , filepath )
89
- gz , _ := gzip . NewReader ( f )
90
- header , err := cm . ReadFrameHeader ( gz )
91
- if err == nil && cm . CheckVersion ( header ) {
92
- f . Seek ( 0 , io . SeekStart )
93
- gz , _ = gzip . NewReader ( f )
94
- readers = append ( readers , gz )
95
- }
106
+ filename := filepath . Base ( path )
107
+ ungzip ( path , "./tmp/" + trimSuffix ( filename , ".gz" ) )
108
+
109
+ f , _ := os . Open ( "./tmp/" + trimSuffix ( filename , ".gz" ) )
110
+
111
+ count += len ( cm . ReadAll ( f ). Data )
112
+ f . Seek ( 0 , 0 )
113
+
114
+ readers = append ( readers , f )
96
115
}
97
116
}
98
-
99
117
framereader = cm .NewMultiReader (readers )
118
+ return count
119
+ }
120
+
121
+ func trimSuffix (s , suffix string ) string {
122
+ if strings .HasSuffix (s , suffix ) {
123
+ s = s [:len (s )- len (suffix )]
124
+ }
125
+ return s
100
126
}
101
127
102
128
func updateURLTimestamp (timestamp int64 , name string , url string , body io.Reader ) io.Reader {
@@ -137,7 +163,7 @@ func updateURLTimestamp(timestamp int64, name string, url string, body io.Reader
137
163
138
164
enc .Encode (& metrics )
139
165
140
- count += 1
166
+ count ++
141
167
}
142
168
143
169
logrus .Printf ("%d metrics unmarshalled for %s" , count , url )
@@ -147,7 +173,40 @@ func updateURLTimestamp(timestamp int64, name string, url string, body io.Reader
147
173
return pr
148
174
}
149
175
176
+ func ungzip (source , target string ) {
177
+ defer func () {
178
+ if e := recover (); e != nil {
179
+ logrus .Errorf ("Errors during decompression of %v" , source )
180
+ }
181
+ }()
182
+
183
+ reader , err := os .Open (source )
184
+ if err != nil {
185
+ panic (err )
186
+ }
187
+ defer reader .Close ()
188
+
189
+ archive , err := gzip .NewReader (reader )
190
+ if err != nil {
191
+ panic (err )
192
+ }
193
+ defer archive .Close ()
194
+
195
+ target = filepath .Join (target , archive .Name )
196
+ writer , err := os .Create (target )
197
+ if err != nil {
198
+ panic (err )
199
+ }
200
+ defer writer .Close ()
201
+
202
+ _ , err = io .Copy (writer , archive )
203
+ if err != nil {
204
+ panic (err )
205
+ }
206
+ }
207
+
150
208
func main () {
209
+
151
210
kingpin .Version (Version )
152
211
153
212
kingpin .Flag ("storage.path" , "Directory path to create and fill the data store under." ).Default ("data" ).StringVar (& cfgMemoryStorage .PersistenceStoragePath )
@@ -163,7 +222,20 @@ func main() {
163
222
flag .Set ("log.level" , "debug" )
164
223
}
165
224
225
+ if ! * verbose {
226
+ logrus .SetLevel (logrus .ErrorLevel )
227
+ flag .Set ("log.level" , "error" )
228
+ }
229
+
230
+ // create temp directory to store ungzipped files
231
+ os .Mkdir ("./tmp" , 0700 )
232
+ defer os .RemoveAll ("./tmp" )
233
+
166
234
logrus .Infoln ("Prefilling into" , cfgMemoryStorage .PersistenceStoragePath )
235
+
236
+ cfgMemoryStorage .MaxChunksToPersist = * maxChunkToPersist
237
+ cfgMemoryStorage .MemoryChunks = * memoryChunk
238
+
167
239
localStorage := local .NewMemorySeriesStorage (& cfgMemoryStorage )
168
240
169
241
sampleAppender := localStorage
@@ -181,19 +253,24 @@ func main() {
181
253
182
254
filetype .AddMatcher (replayType , replayMatcher )
183
255
184
- generateFramereader ()
256
+ count := generateFramereader ()
257
+
185
258
logrus .Debug ("frameReader %+v" , framereader )
186
259
187
260
sout := bufio .NewWriter (os .Stdout )
188
261
defer sout .Flush ()
189
262
190
263
r := & http.Request {}
191
264
265
+ bar := pb .ProgressBarTemplate (`{{ red "Frames processed:" }} {{bar . | green}} {{rtime . "ETA %s" | blue }} {{percent . }}` ).Start (count )
266
+ defer bar .Finish ()
267
+
192
268
for frame := range framereader {
269
+ bar .Increment ()
193
270
response , err := http .ReadResponse (bufio .NewReader (filebuffer .New (frame .Data )), r )
194
271
if err != nil {
195
- logrus .Error ( err )
196
- return
272
+ logrus .Errorf ( "Errors occured while reading frame %d, MESSAGE: %v" , frame . NameString , err )
273
+ continue
197
274
}
198
275
bytesReader := updateURLTimestamp (frame .Header .Timestamp , frame .NameString (), frame .URIString (), response .Body )
199
276
@@ -214,7 +291,7 @@ func main() {
214
291
logrus .Infoln ("Ingested" , len (decSamples ), "metrics" )
215
292
216
293
for sampleAppender .NeedsThrottling () {
217
- logrus .Debugln ("Waiting 100ms for appender to be ready for more data" )
294
+ logrus .Debugln ("THROTTLING: Waiting 100ms for appender to be ready for more data" )
218
295
time .Sleep (time .Millisecond * 100 )
219
296
}
220
297
@@ -224,25 +301,26 @@ func main() {
224
301
)
225
302
226
303
for _ , s := range model .Samples (decSamples ) {
304
+
227
305
if err := sampleAppender .Append (s ); err != nil {
228
306
switch err {
229
307
case local .ErrOutOfOrderSample :
230
308
numOutOfOrder ++
231
309
logrus .WithFields (logrus.Fields {
232
310
"sample" : s ,
233
311
"error" : err ,
234
- }).Info ("Sample discarded" )
312
+ }).Error ("Sample discarded" )
235
313
case local .ErrDuplicateSampleForTimestamp :
236
314
numDuplicates ++
237
315
logrus .WithFields (logrus.Fields {
238
316
"sample" : s ,
239
317
"error" : err ,
240
- }).Info ("Sample discarded" )
318
+ }).Error ("Sample discarded" )
241
319
default :
242
320
logrus .WithFields (logrus.Fields {
243
321
"sample" : s ,
244
322
"error" : err ,
245
- }).Info ("Sample discarded" )
323
+ }).Error ("Sample discarded" )
246
324
}
247
325
}
248
326
}
0 commit comments