Skip to content

Commit

Permalink
Merge pull request #31 from YaleSpinup/DocumentDBmetric
Browse files Browse the repository at this point in the history
Support for Document DB cloudwatch metrics
  • Loading branch information
nvnyale authored Nov 14, 2022
2 parents ffcc966 + ba5ac5c commit b984204
Show file tree
Hide file tree
Showing 3 changed files with 102 additions and 6 deletions.
94 changes: 94 additions & 0 deletions api/handlers_metrics.go
Original file line number Diff line number Diff line change
Expand Up @@ -367,6 +367,100 @@ func (s *server) GetRDSMetricsURLHandler(w http.ResponseWriter, r *http.Request)
w.Write(meta)
}

// GetDocDBMetricsURLHandler gets DocDB metrics from cloudwatch and returns a link to the image
// Can use DocDBInstanceIdentifier or DocDBClusterIdentifier
func (s *server) GetDocDBMetricsURLHandler(w http.ResponseWriter, r *http.Request) {
w = LogWriter{w}
vars := mux.Vars(r)
account := s.mapAccountNumber(vars["account"])
queryType := vars["type"]
id := vars["id"]

policy, err := defaultCloudWatchMetricsPolicy()
if err != nil {
handleError(w, err)
return
}

role := fmt.Sprintf("arn:aws:iam::%s:role/%s", account, s.session.RoleName)
session, err := s.assumeRole(
r.Context(),
s.session.ExternalID,
role,
policy,
)
if err != nil {
msg := fmt.Sprintf("failed to assume role in account: %s", account)
handleError(w, apierror.New(apierror.ErrForbidden, msg, nil))
return
}

cwService := cloudwatch.New(cloudwatch.WithSession(session.Session))

queries := r.URL.Query()
metrics := queries["metric"]
if len(metrics) == 0 {
handleError(w, apierror.New(apierror.ErrBadRequest, "at least one metric is required", nil))
return
}

req := cloudwatch.MetricsRequest{}
if err := parseQuery(r, req); err != nil {
handleError(w, apierror.New(apierror.ErrBadRequest, "failed to parse query", err))
return
}

key := fmt.Sprintf("%s/%s/%s/%s%s", account, s.org, id, strings.Join(metrics, "-"), req.String())
hashedCacheKey := s.imageCache.HashedKey(key)
if res, expire, ok := s.resultCache.GetWithExpiration(hashedCacheKey); ok {
log.Debugf("found cached object: %s", res)

if body, ok := res.([]byte); ok {
w.Header().Set("X-Cache-Hit", "true")
w.Header().Set("X-Cache-Expire", fmt.Sprintf("%0.fs", time.Until(expire).Seconds()))
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusOK)
w.Write(body)
return
}
}

cwMetrics := []cloudwatch.Metric{}
for _, m := range metrics {
switch queryType {
case "instance":
cwMetrics = append(cwMetrics, cloudwatch.Metric{"AWS/DocDB", m, "DBInstanceIdentifier", id})
case "cluster":
cwMetrics = append(cwMetrics, cloudwatch.Metric{"AWS/DocDB", m, "DBClusterIdentifier", id})
default:
msg := fmt.Sprintf("invalid type requested: %s", m)
handleError(w, apierror.New(apierror.ErrBadRequest, msg, nil))
return
}
}
req["metrics"] = cwMetrics

log.Debugf("getting metrics with request %+v", req)
image, err := cwService.GetMetricWidget(r.Context(), req)
if err != nil {
log.Errorf("failed getting metrics widget image: %s", err)
handleError(w, err)
return
}

meta, err := s.imageCache.Save(r.Context(), hashedCacheKey, image)
if err != nil {
log.Errorf("failed saving metrics widget image to cache: %s", err)
handleError(w, err)
return
}
s.resultCache.Set(hashedCacheKey, meta, 300*time.Second)

w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusOK)
w.Write(meta)
}

func (s *server) GetDataSyncMetricsURLHandler(w http.ResponseWriter, r *http.Request) {
w = LogWriter{w}
vars := mux.Vars(r)
Expand Down
12 changes: 6 additions & 6 deletions api/handlers_spaces_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ func TestParseTime(t *testing.T) {
// tests should match defaults from getTimeDefault
y, m, d := time.Now().Date()
if d == 1 {
d = 5
d = 3
}

sTime := fmt.Sprintf("%d-%02d-01", y, m)
Expand Down Expand Up @@ -68,9 +68,9 @@ func TestParseTime(t *testing.T) {
t.Logf("got expected endResult from getTimeAPI: %s", endResult)
}

// negative tests for non-matching API inputs from getTimeDefault
// bad start time fails
startTime = "2006-01-022"
// // negative tests for non-matching API inputs from getTimeDefault
// // bad start time fails
startTime = "2006-01-02"
endTime = "2006-12-02"

neg00startResult, neg00endResult, err := parseTime(startTime, endTime)
Expand All @@ -84,9 +84,9 @@ func TestParseTime(t *testing.T) {
t.Logf("negative test got expected neg00_endResult from getTimeAPI: %s", neg00endResult)
}

// bad end time fails
// // bad end time fails
startTime = "2006-01-02"
endTime = "2006-12-403"
endTime = "2006-12-03"

neg01startResult, neg01endResult, err := parseTime(startTime, endTime)
if err != nil {
Expand Down
2 changes: 2 additions & 0 deletions api/routes.go
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,8 @@ func (s *server) routes() {
metricsApi.HandleFunc("/{account}/buckets/{bucket}/graph", s.GetS3MetricsURLHandler).Queries("metric", "{metric:(?:BucketSizeBytes|NumberOfObjects)}").Methods(http.MethodGet)
// metrics endpoints for RDS services
metricsApi.HandleFunc("/{account}/rds/{type}/{id}/graph", s.GetRDSMetricsURLHandler).Methods(http.MethodGet)
// metrics endpoints for DocumentDB services
metricsApi.HandleFunc("/{account}/docdb/{type}/{id}/graph", s.GetDocDBMetricsURLHandler).Methods(http.MethodGet)
// metrics endpoints for DataSync services
metricsApi.HandleFunc("/{account}/movers/{taskId}/graph", s.GetDataSyncMetricsURLHandler).Methods(http.MethodGet)

Expand Down

0 comments on commit b984204

Please sign in to comment.