Skip to content

Commit

Permalink
[query] Carbon fetch for metrics with no separator (#2450)
Browse files Browse the repository at this point in the history
  • Loading branch information
arnikola authored Jul 8, 2020
1 parent 78a9858 commit daaa0c8
Show file tree
Hide file tree
Showing 4 changed files with 122 additions and 14 deletions.
5 changes: 5 additions & 0 deletions scripts/docker-integration-tests/carbon/test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,11 @@ t=$(date +%s)
echo "foo.bar:baz.qux 42 $t" | nc 0.0.0.0 7204
ATTEMPTS=20 MAX_TIMEOUT=4 TIMEOUT=1 retry_with_backoff read_carbon 'foo.bar:*.*' 42

# Test writing and reading IDs with a single element.
t=$(date +%s)
echo "quail 42 $t" | nc 0.0.0.0 7204
ATTEMPTS=20 MAX_TIMEOUT=4 TIMEOUT=1 retry_with_backoff read_carbon 'quail' 42

t=$(date +%s)
echo "a 0 $t" | nc 0.0.0.0 7204
echo "a.bar 0 $t" | nc 0.0.0.0 7204
Expand Down
3 changes: 3 additions & 0 deletions src/query/graphite/lexer/lexer.go
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,9 @@ type Token struct {
value string
}

// MustMakeToken is a test function for creating a Token.MustMakeToken.
func MustMakeToken(value string) *Token { return &Token{value: value} }

// TokenType returns the type of token consumed.
func (t Token) TokenType() TokenType {
return t.tokenType
Expand Down
91 changes: 77 additions & 14 deletions src/query/graphite/native/compiler.go
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,8 @@ func compile(input string) (Expression, error) {
lex, tokens := lexer.NewLexer(input, booleanLiterals)
go lex.Run()

c := compiler{input: input, tokens: tokens}
lookforward := newTokenLookforward(tokens)
c := compiler{input: input, tokens: lookforward}
expr, err := c.compileExpression()

// Exhaust all tokens until closed or else lexer won't close
Expand All @@ -49,15 +50,54 @@ func compile(input string) (Expression, error) {
return expr, err
}

type tokenLookforward struct {
lookforward *lexer.Token
tokens chan *lexer.Token
}

func newTokenLookforward(tokens chan *lexer.Token) *tokenLookforward {
return &tokenLookforward{
tokens: tokens,
}
}

// get advances the lexer tokens.
func (l *tokenLookforward) get() *lexer.Token {
if token := l.lookforward; token != nil {
l.lookforward = nil
return token
}

if token, ok := <-l.tokens; ok {
return token
}

return nil
}

func (l *tokenLookforward) peek() (*lexer.Token, bool) {
if l.lookforward != nil {
return l.lookforward, true
}

token, ok := <-l.tokens
if !ok {
return nil, false
}

l.lookforward = token
return token, true
}

// A compiler converts an input string into an executable Expression
type compiler struct {
input string
tokens chan *lexer.Token
tokens *tokenLookforward
}

// compileExpression compiles a top level expression
func (c *compiler) compileExpression() (Expression, error) {
token := <-c.tokens
token := c.tokens.get()
if token == nil {
return noopExpression{}, nil
}
Expand All @@ -69,31 +109,54 @@ func (c *compiler) compileExpression() (Expression, error) {

case lexer.Identifier:
fc, err := c.compileFunctionCall(token.Value(), nil)
fetchCandidate := false
if err != nil {
return nil, err
_, fnNotFound := err.(errFuncNotFound)
if fnNotFound && c.canCompileAsFetch(token.Value()) {
fetchCandidate = true
expr = newFetchExpression(token.Value())
} else {
return nil, err
}
}

expr, err = newFuncExpression(fc)
if err != nil {
return nil, err
if !fetchCandidate {
expr, err = newFuncExpression(fc)
if err != nil {
return nil, err
}
}

default:
return nil, c.errorf("unexpected value %s", token.Value())
}

if token := <-c.tokens; token != nil {
if token := c.tokens.get(); token != nil {
return nil, c.errorf("extra data %s", token.Value())
}

return expr, nil
}

// canCompileAsFetch attempts to see if the given term is a non-delimited
// carbon metric; no dots, without any trailing parentheses.
func (c *compiler) canCompileAsFetch(fname string) bool {
if nextToken, hasNext := c.tokens.peek(); hasNext {
return nextToken.TokenType() != lexer.LParenthesis
}

return true
}

type errFuncNotFound struct{ err error }

func (e errFuncNotFound) Error() string { return e.err.Error() }

// compileFunctionCall compiles a function call
func (c *compiler) compileFunctionCall(fname string, nextToken *lexer.Token) (*functionCall, error) {
fn := findFunction(fname)
if fn == nil {
return nil, c.errorf("could not find function named %s", fname)
return nil, errFuncNotFound{c.errorf("could not find function named %s", fname)}
}

if nextToken != nil {
Expand Down Expand Up @@ -158,7 +221,7 @@ func (c *compiler) compileFunctionCall(fname string, nextToken *lexer.Token) (*f
// compileArg parses and compiles a single argument
func (c *compiler) compileArg(fname string, index int,
reflectType reflect.Type) (arg funcArg, foundRParen bool, err error) {
token := <-c.tokens
token := c.tokens.get()
if token == nil {
return nil, false, c.errorf("unexpected eof while parsing %s", fname)
}
Expand All @@ -173,7 +236,7 @@ func (c *compiler) compileArg(fname string, index int,
fname, token.Value())
}

if token = <-c.tokens; token == nil {
if token = c.tokens.get(); token == nil {
return nil, false, c.errorf("unexpected eof while parsing %s", fname)
}
}
Expand Down Expand Up @@ -219,13 +282,13 @@ func (c *compiler) convertTokenToArg(token *lexer.Token, reflectType reflect.Typ
currentToken := token.Value()

// handle named arguments
nextToken := <-c.tokens
nextToken := c.tokens.get()
if nextToken == nil {
return nil, c.errorf("unexpected eof, %s should be followed by = or (", currentToken)
}
if nextToken.TokenType() == lexer.Equal {
// TODO: check if currentToken matches the expected parameter name
tokenAfterNext := <-c.tokens
tokenAfterNext := c.tokens.get()
if tokenAfterNext == nil {
return nil, c.errorf("unexpected eof, named argument %s should be followed by its value", currentToken)
}
Expand All @@ -240,7 +303,7 @@ func (c *compiler) convertTokenToArg(token *lexer.Token, reflectType reflect.Typ

// expectToken reads the next token and confirms it is the expected type before returning it
func (c *compiler) expectToken(expectedType lexer.TokenType) (*lexer.Token, error) {
token := <-c.tokens
token := c.tokens.get()
if token == nil {
return nil, c.errorf("expected %v but encountered eof", expectedType)
}
Expand Down
37 changes: 37 additions & 0 deletions src/query/graphite/native/compiler_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ import (
"testing"

"github.com/m3db/m3/src/query/graphite/common"
"github.com/m3db/m3/src/query/graphite/lexer"
xtest "github.com/m3db/m3/src/query/graphite/testing"
"github.com/m3db/m3/src/query/graphite/ts"

Expand Down Expand Up @@ -56,6 +57,7 @@ func TestCompile1(t *testing.T) {

tests := []testCompile{
{"", noopExpression{}},
{"foobar", newFetchExpression("foobar")},
{"foo.bar.{a,b,c}.baz-*.stat[0-9]",
newFetchExpression("foo.bar.{a,b,c}.baz-*.stat[0-9]")},
{"noArgs()", &funcExpression{&functionCall{f: noArgs}}},
Expand Down Expand Up @@ -290,6 +292,8 @@ type testCompilerError struct {
func TestCompileErrors(t *testing.T) {
tests := []testCompilerError{
{"hello()", "top-level functions must return timeseries data"},
{"foobar(", "invalid expression 'foobar(': could not find function named foobar"},
{"foobar()", "invalid expression 'foobar()': could not find function named foobar"},
{"sortByName(foo.*.zed)junk", "invalid expression 'sortByName(foo.*.zed)junk': " +
"extra data junk"},
{"aliasByNode(",
Expand Down Expand Up @@ -435,7 +439,40 @@ func TestExtractFetchExpressions(t *testing.T) {
require.NoError(t, err)
assert.Equal(t, test.targets, targets, test.expr)
}
}

func TestTokenLookforward(t *testing.T) {
tokenVals := []string{"a", "b", "c"}
tokens := make(chan *lexer.Token)
go func() {
for _, v := range tokenVals {
tokens <- lexer.MustMakeToken(v)
}

close(tokens)
}()

lookforward := newTokenLookforward(tokens)
token := lookforward.get()
assert.Equal(t, "a", token.Value())

// assert that peek does not iterate token.
token, found := lookforward.peek()
assert.True(t, found)
assert.Equal(t, "b", token.Value())
token, found = lookforward.peek()
assert.True(t, found)
assert.Equal(t, "b", token.Value())

// assert that next get after peek will iterate forward.
token = lookforward.get()
assert.Equal(t, "b", token.Value())
token = lookforward.get()
assert.Equal(t, "c", token.Value())

// assert peek is empty once channel is closed.
_, found = lookforward.peek()
assert.False(t, found)
}

func init() {
Expand Down

0 comments on commit daaa0c8

Please sign in to comment.