Skip to content

Commit

Permalink
rego: Stop counting metrics for "no-op" steps
Browse files Browse the repository at this point in the history
We had a number of helper methods that would start a timer, defer
stopping it, and then do something to setup a Rego object for doing
its thing.

Many of these would have a check and then skip the step, but the timer
would get a little bit of time accounted for it anyway for the `if`
check and handling the deferred stop function.

This refactors as many of these as I could find to do the if check
and shortcut out before starting the timer. This will prevent any of
the "no-op" steps from showing up in metrics.

Signed-off-by: Patrick East <[email protected]>
  • Loading branch information
patrick-east committed Dec 18, 2019
1 parent 051c3b8 commit 2699974
Show file tree
Hide file tree
Showing 2 changed files with 70 additions and 62 deletions.
98 changes: 50 additions & 48 deletions rego/rego.go
Original file line number Diff line number Diff line change
Expand Up @@ -1275,38 +1275,40 @@ func (r *Rego) prepare(ctx context.Context, qType queryType, extras []extraStage
}

func (r *Rego) parseModules(ctx context.Context, txn storage.Transaction, m metrics.Metrics) error {
if len(r.modules) == 0 {
return nil
}

m.Timer(metrics.RegoModuleParse).Start()
defer m.Timer(metrics.RegoModuleParse).Stop()
var errs Errors

// Parse any modules in the are saved to the store, but only if
// another compile step is going to occur (ie. we have parsed modules
// that need to be compiled).
if len(r.modules) > 0 {
ids, err := r.store.ListPolicies(ctx, txn)
ids, err := r.store.ListPolicies(ctx, txn)
if err != nil {
return err
}

for _, id := range ids {
// if it is already on the compiler we're using
// then don't bother to re-parse it from source
if _, haveMod := r.compiler.Modules[id]; haveMod {
continue
}

bs, err := r.store.GetPolicy(ctx, txn, id)
if err != nil {
return err
}

for _, id := range ids {
// if it is already on the compiler we're using
// then don't bother to re-parse it from source
if _, haveMod := r.compiler.Modules[id]; haveMod {
continue
}

bs, err := r.store.GetPolicy(ctx, txn, id)
if err != nil {
return err
}

parsed, err := ast.ParseModule(id, string(bs))
if err != nil {
errs = append(errs, err)
}

r.parsedModules[id] = parsed
parsed, err := ast.ParseModule(id, string(bs))
if err != nil {
errs = append(errs, err)
}

r.parsedModules[id] = parsed
}

// Parse any passed in as arguments to the Rego object
Expand All @@ -1326,6 +1328,10 @@ func (r *Rego) parseModules(ctx context.Context, txn storage.Transaction, m metr
}

func (r *Rego) loadFiles(ctx context.Context, txn storage.Transaction, m metrics.Metrics) error {
if len(r.loadPaths.paths) == 0 {
return nil
}

m.Timer(metrics.RegoLoadFiles).Start()
defer m.Timer(metrics.RegoLoadFiles).Stop()

Expand All @@ -1347,6 +1353,10 @@ func (r *Rego) loadFiles(ctx context.Context, txn storage.Transaction, m metrics
}

func (r *Rego) loadBundles(ctx context.Context, txn storage.Transaction, m metrics.Metrics) error {
if len(r.bundlePaths) == 0 {
return nil
}

m.Timer(metrics.RegoLoadBundles).Start()
defer m.Timer(metrics.RegoLoadBundles).Stop()

Expand All @@ -1357,7 +1367,6 @@ func (r *Rego) loadBundles(ctx context.Context, txn storage.Transaction, m metri
}
r.bundles[path] = bndl
}

return nil
}

Expand All @@ -1369,42 +1378,35 @@ func (r *Rego) parseInput() (ast.Value, error) {
}

func (r *Rego) parseRawInput(rawInput *interface{}, m metrics.Metrics) (ast.Value, error) {
var input ast.Value

if rawInput == nil {
return input, nil
}

m.Timer(metrics.RegoInputParse).Start()
defer m.Timer(metrics.RegoInputParse).Stop()
var input ast.Value
if rawInput != nil {
rawPtr := util.Reference(rawInput)
// roundtrip through json: this turns slices (e.g. []string, []bool) into
// []interface{}, the only array type ast.InterfaceToValue can work with
if err := util.RoundTrip(rawPtr); err != nil {
return nil, err
}
val, err := ast.InterfaceToValue(*rawPtr)
if err != nil {
return nil, err
}
input = val

rawPtr := util.Reference(rawInput)

// roundtrip through json: this turns slices (e.g. []string, []bool) into
// []interface{}, the only array type ast.InterfaceToValue can work with
if err := util.RoundTrip(rawPtr); err != nil {
return nil, err
}
return input, nil

return ast.InterfaceToValue(*rawPtr)
}

func (r *Rego) parseQuery(m metrics.Metrics) (ast.Body, error) {
m.Timer(metrics.RegoQueryParse).Start()
defer m.Timer(metrics.RegoQueryParse).Stop()

var query ast.Body

if r.parsedQuery != nil {
query = r.parsedQuery
} else {
var err error
query, err = ast.ParseBody(r.query)
if err != nil {
return nil, err
}
return r.parsedQuery, nil
}

return query, nil
m.Timer(metrics.RegoQueryParse).Start()
defer m.Timer(metrics.RegoQueryParse).Stop()

return ast.ParseBody(r.query)
}

func (r *Rego) compileModules(ctx context.Context, txn storage.Transaction, m metrics.Metrics) error {
Expand Down
34 changes: 20 additions & 14 deletions server/server_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -1871,43 +1871,50 @@ func TestDataProvenanceMultiBundle(t *testing.T) {
}
}

func TestDataMetrics(t *testing.T) {

func TestDataMetricsEval(t *testing.T) {
f := newFixture(t)

// Make a request to evaluate `data`
testDataMetrics(t, f, "/data?metrics", []string{
"counter_server_query_cache_hit",
"timer_rego_input_parse_ns",
"timer_rego_load_bundles_ns",
"timer_rego_load_files_ns",
"timer_rego_module_parse_ns",
"timer_rego_query_parse_ns",
"timer_rego_query_compile_ns",
"timer_rego_query_eval_ns",
"timer_server_handler_ns",
})

// Repeat previous request, expect to have hit the query cache
// so fewer timers should have been reported.
testDataMetrics(t, f, "/data?metrics", []string{
"counter_server_query_cache_hit",
"timer_rego_input_parse_ns",
"timer_rego_query_parse_ns",
"timer_rego_query_eval_ns",
"timer_server_handler_ns",
})

// Make a request to evaluate `data` and use partial evaluation,
// this should not hit the same query cache result as the previous
// request.
testDataMetrics(t, f, "/data?metrics&partial", []string{
"counter_server_query_cache_hit",
"timer_rego_input_parse_ns",
"timer_rego_load_bundles_ns",
"timer_rego_load_files_ns",
"timer_rego_module_compile_ns",
"timer_rego_module_parse_ns",
"timer_rego_query_parse_ns",
"timer_rego_query_compile_ns",
"timer_rego_query_eval_ns",
"timer_rego_partial_eval_ns",
"timer_server_handler_ns",
})

// Repeat previous partial eval request, this time it should
// be cached
testDataMetrics(t, f, "/data?metrics&partial", []string{
"counter_server_query_cache_hit",
"timer_rego_input_parse_ns",
"timer_rego_query_eval_ns",
"timer_server_handler_ns",
})
}

func testDataMetrics(t *testing.T, f *fixture, url string, expected []string) {
Expand All @@ -1925,16 +1932,15 @@ func testDataMetrics(t *testing.T, f *fixture, url string, expected []string) {
for _, key := range expected {
v, ok := result.Metrics[key]
if !ok {
t.Fatalf("Missing expected metric: %s", key)
}
if v == nil {
t.Fatalf("Expected non-nil value for metric: %s", key)
t.Errorf("Missing expected metric: %s", key)
} else if v == nil {
t.Errorf("Expected non-nil value for metric: %s", key)
}

}

if len(expected) != len(result.Metrics) {
t.Fatalf("Expected %d metrics, got %d\n\n\tValues: %+v", len(expected), len(result.Metrics), result.Metrics)
t.Errorf("Expected %d metrics, got %d\n\n\tValues: %+v", len(expected), len(result.Metrics), result.Metrics)
}
}

Expand Down

0 comments on commit 2699974

Please sign in to comment.