Skip to content

Commit

Permalink
React UI: Implement /targets page (prometheus#6276)
Browse files Browse the repository at this point in the history
* Add LastScrapeDuration to targets endpoint

Signed-off-by: Dustin Hooten <[email protected]>

* Add Scrape job name to targets endpoint

Signed-off-by: Dustin Hooten <[email protected]>

* Implement the /targets page in react

Signed-off-by: Dustin Hooten <[email protected]>

* Add state query param to targets endpoint

Signed-off-by: Dustin Hooten <[email protected]>

* Use state filter in api call

Signed-off-by: Dustin Hooten <[email protected]>

* api feedback

Signed-off-by: Dustin Hooten <[email protected]>

* pr feedback frontend

Signed-off-by: Dustin Hooten <[email protected]>

* Implement and use localstorage hook

Signed-off-by: Dustin Hooten <[email protected]>

* PR feedback

Signed-off-by: Dustin Hooten <[email protected]>
  • Loading branch information
hooten authored and juliusv committed Nov 11, 2019
1 parent 4543153 commit ca60bf2
Show file tree
Hide file tree
Showing 31 changed files with 1,499 additions and 77 deletions.
39 changes: 38 additions & 1 deletion docs/querying/api.md
Original file line number Diff line number Diff line change
Expand Up @@ -390,7 +390,7 @@ Prometheus target discovery:
GET /api/v1/targets
```

Both the active and dropped targets are part of the response.
Both the active and dropped targets are part of the response by default.
`labels` represents the label set after relabelling has occurred.
`discoveredLabels` represent the unmodified labels retrieved during service discovery before relabelling has occurred.

Expand All @@ -411,9 +411,11 @@ $ curl http://localhost:9090/api/v1/targets
"instance": "127.0.0.1:9090",
"job": "prometheus"
},
"scrapePool": "prometheus",
"scrapeUrl": "http://127.0.0.1:9090/metrics",
"lastError": "",
"lastScrape": "2017-01-17T15:07:44.723715405+01:00",
"lastScrapeDuration": 0.050688943,
"health": "up"
}
],
Expand All @@ -431,6 +433,41 @@ $ curl http://localhost:9090/api/v1/targets
}
```

The `state` query parameter allows the caller to filter by active or dropped targets,
(e.g., `state=active`, `state=dropped`, `state=any`).
Note that an empty array is still returned for targets that are filtered out.
Other values are ignored.

```json
$ curl 'http://localhost:9090/api/v1/targets?state=active'
{
"status": "success",
"data": {
"activeTargets": [
{
"discoveredLabels": {
"__address__": "127.0.0.1:9090",
"__metrics_path__": "/metrics",
"__scheme__": "http",
"job": "prometheus"
},
"labels": {
"instance": "127.0.0.1:9090",
"job": "prometheus"
},
"scrapePool": "prometheus",
"scrapeUrl": "http://127.0.0.1:9090/metrics",
"lastError": "",
"lastScrape": "2017-01-17T15:07:44.723715405+01:00",
"lastScrapeDuration": 50688943,
"health": "up"
}
],
"droppedTargets": []
}
}
```


## Rules

Expand Down
4 changes: 2 additions & 2 deletions scrape/scrape.go
Original file line number Diff line number Diff line change
Expand Up @@ -499,7 +499,7 @@ func appender(app storage.Appender, limit int) storage.Appender {
// A scraper retrieves samples and accepts a status report at the end.
type scraper interface {
scrape(ctx context.Context, w io.Writer) (string, error)
report(start time.Time, dur time.Duration, err error)
Report(start time.Time, dur time.Duration, err error)
offset(interval time.Duration, jitterSeed uint64) time.Duration
}

Expand Down Expand Up @@ -1212,7 +1212,7 @@ const (
)

func (sl *scrapeLoop) report(start time.Time, duration time.Duration, scraped, appended, seriesAdded int, err error) error {
sl.scraper.report(start, duration, err)
sl.scraper.Report(start, duration, err)

ts := timestamp.FromTime(start)

Expand Down
2 changes: 1 addition & 1 deletion scrape/scrape_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -1450,7 +1450,7 @@ func (ts *testScraper) offset(interval time.Duration, jitterSeed uint64) time.Du
return ts.offsetDur
}

func (ts *testScraper) report(start time.Time, duration time.Duration, err error) {
func (ts *testScraper) Report(start time.Time, duration time.Duration, err error) {
ts.lastStart = start
ts.lastDuration = duration
ts.lastError = err
Expand Down
3 changes: 2 additions & 1 deletion scrape/target.go
Original file line number Diff line number Diff line change
Expand Up @@ -200,7 +200,8 @@ func (t *Target) URL() *url.URL {
}
}

func (t *Target) report(start time.Time, dur time.Duration, err error) {
// Report sets target data about the last scrape.
func (t *Target) Report(start time.Time, dur time.Duration, err error) {
t.mtx.Lock()
defer t.mtx.Unlock()

Expand Down
80 changes: 53 additions & 27 deletions web/api/v1/api.go
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ import (
"regexp"
"sort"
"strconv"
"strings"
"time"
"unsafe"

Expand Down Expand Up @@ -562,11 +563,13 @@ type Target struct {
// Any labels that are added to this target and its metrics.
Labels map[string]string `json:"labels"`

ScrapeURL string `json:"scrapeUrl"`
ScrapePool string `json:"scrapePool"`
ScrapeURL string `json:"scrapeUrl"`

LastError string `json:"lastError"`
LastScrape time.Time `json:"lastScrape"`
Health scrape.TargetHealth `json:"health"`
LastError string `json:"lastError"`
LastScrape time.Time `json:"lastScrape"`
LastScrapeDuration float64 `json:"lastScrapeDuration"`
Health scrape.TargetHealth `json:"health"`
}

// DroppedTarget has the information for one target that was dropped during relabelling.
Expand All @@ -582,46 +585,69 @@ type TargetDiscovery struct {
}

func (api *API) targets(r *http.Request) apiFuncResult {
flatten := func(targets map[string][]*scrape.Target) []*scrape.Target {
sortKeys := func(targets map[string][]*scrape.Target) ([]string, int) {
var n int
keys := make([]string, 0, len(targets))
for k := range targets {
keys = append(keys, k)
n += len(targets[k])
}
sort.Strings(keys)
return keys, n
}

flatten := func(targets map[string][]*scrape.Target) []*scrape.Target {
keys, n := sortKeys(targets)
res := make([]*scrape.Target, 0, n)
for _, k := range keys {
res = append(res, targets[k]...)
}
return res
}

tActive := flatten(api.targetRetriever.TargetsActive())
tDropped := flatten(api.targetRetriever.TargetsDropped())
res := &TargetDiscovery{ActiveTargets: make([]*Target, 0, len(tActive)), DroppedTargets: make([]*DroppedTarget, 0, len(tDropped))}
state := strings.ToLower(r.URL.Query().Get("state"))
showActive := state == "" || state == "any" || state == "active"
showDropped := state == "" || state == "any" || state == "dropped"
res := &TargetDiscovery{}

for _, target := range tActive {
lastErrStr := ""
lastErr := target.LastError()
if lastErr != nil {
lastErrStr = lastErr.Error()
}
if showActive {
targetsActive := api.targetRetriever.TargetsActive()
activeKeys, numTargets := sortKeys(targetsActive)
res.ActiveTargets = make([]*Target, 0, numTargets)

res.ActiveTargets = append(res.ActiveTargets, &Target{
DiscoveredLabels: target.DiscoveredLabels().Map(),
Labels: target.Labels().Map(),
ScrapeURL: target.URL().String(),
LastError: lastErrStr,
LastScrape: target.LastScrape(),
Health: target.Health(),
})
}
for _, key := range activeKeys {
for _, target := range targetsActive[key] {
lastErrStr := ""
lastErr := target.LastError()
if lastErr != nil {
lastErrStr = lastErr.Error()
}

for _, t := range tDropped {
res.DroppedTargets = append(res.DroppedTargets, &DroppedTarget{
DiscoveredLabels: t.DiscoveredLabels().Map(),
})
res.ActiveTargets = append(res.ActiveTargets, &Target{
DiscoveredLabels: target.DiscoveredLabels().Map(),
Labels: target.Labels().Map(),
ScrapePool: key,
ScrapeURL: target.URL().String(),
LastError: lastErrStr,
LastScrape: target.LastScrape(),
LastScrapeDuration: target.LastScrapeDuration().Seconds(),
Health: target.Health(),
})
}
}
} else {
res.ActiveTargets = []*Target{}
}
if showDropped {
tDropped := flatten(api.targetRetriever.TargetsDropped())
res.DroppedTargets = make([]*DroppedTarget, 0, len(tDropped))
for _, t := range tDropped {
res.DroppedTargets = append(res.DroppedTargets, &DroppedTarget{
DiscoveredLabels: t.DiscoveredLabels().Map(),
})
}
} else {
res.DroppedTargets = []*DroppedTarget{}
}
return apiFuncResult{res, nil, nil, nil}
}
Expand Down
Loading

0 comments on commit ca60bf2

Please sign in to comment.