Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions bql/grammar/grammar_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ func TestRejectByParse(t *testing.T) {
// Reject incomplete clauses.
`select ?a from ?b where {?s ?p};`,
`select ?a from ?b where {?s ?p ?o . ?};`,
// Reject imcomplete clause aliasing.
// Reject incomplete clause aliasing.
`select ?a from ?b where {?s id ?b as ?c ?d ?o};`,
`select ?a from ?b where {?s ?p at ?t as ?a ?o};`,
`select ?a from ?b where {?s ?p ?o at ?t id ?i};`,
Expand Down Expand Up @@ -270,7 +270,7 @@ func TestAcceptOpsByParseAndSemantic(t *testing.T) {

func TestAcceptQueryBySemanticParse(t *testing.T) {
table := []string{
// Test well type litterals are accepted.
// Test well type literals are accepted.
`select ?s from ?g where{?s ?p "1"^^type:int64};`,
// Test predicates are accepted.
// Test invalid predicate time anchor are rejected.
Expand Down Expand Up @@ -314,7 +314,7 @@ func TestAcceptQueryBySemanticParse(t *testing.T) {

func TestRejectByParseAndSemantic(t *testing.T) {
table := []string{
// Test wront type litterals are rejected.
// Test wrong type literals are rejected.
`select ?s from ?g where{?s ?p "true"^^type:int64};`,
// Test invalid predicate bounds are rejected.
`select ?s from ?b where{/_<foo> as ?s "id"@[2018-07-19T13:12:04.669618843-07:00, 2015-07-19T13:12:04.669618843-07:00] ?o};`,
Expand Down
2 changes: 1 addition & 1 deletion bql/grammar/llk.go
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ func (l *LLk) CanAccept(tt lexer.TokenType) bool {
return l.tkns[0].Type == tt
}

// Consume will consue the current token and move to the next one if it matches
// Consume will consume the current token and move to the next one if it matches
// the provided token, false otherwise.
func (l *LLk) Consume(tt lexer.TokenType) bool {
if l.tkns[0].Type != tt {
Expand Down
2 changes: 1 addition & 1 deletion bql/grammar/llk_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ import (
"github.com/google/badwolf/bql/lexer"
)

func TestEmptyImputLLk(t *testing.T) {
func TestEmptyInputLLk(t *testing.T) {
const k = 10
l := NewLLk("", k)
if l.Current().Type != lexer.ItemEOF {
Expand Down
8 changes: 4 additions & 4 deletions bql/grammar/parser.go
Original file line number Diff line number Diff line change
Expand Up @@ -63,16 +63,16 @@ type Clause struct {
}

// Grammar contains the left factory LLk grammar to be parsed. All provided
// grammars *must* have the "START" symbol to initialte the parsing of input
// grammars *must* have the "START" symbol to initiate the parsing of input
// text.
type Grammar map[semantic.Symbol][]*Clause

// Parser implements a LLk recursive decend parser for left factorized grammars.
// Parser implements a LLk recursive descent parser for left factorized grammars.
type Parser struct {
grammar *Grammar
}

// NewParser creates a new recursive decend parser for a left factorized
// NewParser creates a new recursive descent parser for a left factorized
// grammar.
func NewParser(grammar *Grammar) (*Parser, error) {
// Check that the grammar is left factorized.
Expand Down Expand Up @@ -126,7 +126,7 @@ func (p *Parser) consume(llk *LLk, st *semantic.Statement, s semantic.Symbol) (b
return false, fmt.Errorf("Parser.consume: could not consume token %s in production %s", llk.Current(), s)
}

// expect given the input, symbol, and clause attemps to satisfy all elements.
// expect given the input, symbol, and clause attempts to satisfy all elements.
func (p *Parser) expect(llk *LLk, st *semantic.Statement, s semantic.Symbol, cls *Clause) (bool, error) {
if cls.ProcessStart != nil {
if _, err := cls.ProcessStart(st, s); err != nil {
Expand Down
58 changes: 29 additions & 29 deletions bql/lexer/lexer.go
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
// limitations under the License.

// Package lexer implements the lexer used bye the BadWolf query language.
// The lexer is losely written after the parsel model described by Rob Pike
// The lexer is loosely written after the parser model described by Rob Pike
// in his presentation "Lexical Scanning in Go". Slides can be found at
// http://cuddle.googlecode.com/hg/talk/lex.html#landing-slide.
package lexer
Expand Down Expand Up @@ -66,9 +66,9 @@ const (
ItemBefore
// ItemAfter represents the after keyword in BQL.
ItemAfter
// ItemBetween represents the betwen keyword in BQL.
// ItemBetween represents the between keyword in BQL.
ItemBetween
// ItemCount represents the count funtion in BQL.
// ItemCount represents the count function in BQL.
ItemCount
// ItemDistinct represents the distinct modifier in BQL.
ItemDistinct
Expand All @@ -82,17 +82,17 @@ const (
ItemOrder
// ItemHaving represents the having clause keyword clause in BQL.
ItemHaving
// ItemAsc represents asc keywork on order by clause in BQL.
// ItemAsc represents asc keyword on order by clause in BQL.
ItemAsc
// ItemDesc represents desc keywork on order by clause in BQL
// ItemDesc represents desc keyword on order by clause in BQL
ItemDesc
// ItemLimit represetnts the limit clause in BQL.
// ItemLimit represents the limit clause in BQL.
ItemLimit

// ItemBinding respresents a variable binding in BQL.
// ItemBinding represents a variable binding in BQL.
ItemBinding

// ItemNode respresents a BadWolf node in BQL.
// ItemNode represents a BadWolf node in BQL.
ItemNode
// ItemLiteral represents a BadWolf literal in BQL.
ItemLiteral
Expand All @@ -101,19 +101,19 @@ const (
// ItemPredicateBound represents a BadWolf predicate bound in BQL.
ItemPredicateBound

// ItemLBracket representes the left opening bracket token in BQL.
// ItemLBracket represents the left opening bracket token in BQL.
ItemLBracket
// ItemRBracket representes the right opening bracket token in BQL.
// ItemRBracket represents the right opening bracket token in BQL.
ItemRBracket
// ItemLPar representes the left opening parentesis token in BQL.
// ItemLPar represents the left opening parenthesis token in BQL.
ItemLPar
// ItemRPar representes the right closing parentesis token in BQL.
// ItemRPar represents the right closing parenthesis token in BQL.
ItemRPar
// ItemDot represents the graph clause separator . in BQL.
ItemDot
// ItemSemicolon represents the final statement semicolon in BQL.
ItemSemicolon
// ItemComma respresnts the graph join operator in BQL.
// ItemComma represents the graph join operator in BQL.
ItemComma
// ItemLT represents < in BQL.
ItemLT
Expand Down Expand Up @@ -315,7 +315,7 @@ type lexer struct {
tokens chan Token // channel of scanned items.
}

// lex creates a new lexer for the givne input
// lex creates a new lexer for the given input
func lex(input string, capacity int) (*lexer, <-chan Token) {
l := &lexer{
input: input,
Expand Down Expand Up @@ -353,34 +353,34 @@ func lexToken(l *lexer) stateFn {
return lexKeyword
}
}
if state := isSingleSymboToken(l, ItemLBracket, leftBracket); state != nil {
if state := isSingleSymbolToken(l, ItemLBracket, leftBracket); state != nil {
return state
}
if state := isSingleSymboToken(l, ItemRBracket, rightBracket); state != nil {
if state := isSingleSymbolToken(l, ItemRBracket, rightBracket); state != nil {
return state
}
if state := isSingleSymboToken(l, ItemLPar, leftPar); state != nil {
if state := isSingleSymbolToken(l, ItemLPar, leftPar); state != nil {
return state
}
if state := isSingleSymboToken(l, ItemRPar, rightPar); state != nil {
if state := isSingleSymbolToken(l, ItemRPar, rightPar); state != nil {
return state
}
if state := isSingleSymboToken(l, ItemSemicolon, semicolon); state != nil {
if state := isSingleSymbolToken(l, ItemSemicolon, semicolon); state != nil {
return state
}
if state := isSingleSymboToken(l, ItemDot, dot); state != nil {
if state := isSingleSymbolToken(l, ItemDot, dot); state != nil {
return state
}
if state := isSingleSymboToken(l, ItemComma, comma); state != nil {
if state := isSingleSymbolToken(l, ItemComma, comma); state != nil {
return state
}
if state := isSingleSymboToken(l, ItemLT, lt); state != nil {
if state := isSingleSymbolToken(l, ItemLT, lt); state != nil {
return state
}
if state := isSingleSymboToken(l, ItemGT, gt); state != nil {
if state := isSingleSymbolToken(l, ItemGT, gt); state != nil {
return state
}
if state := isSingleSymboToken(l, ItemEQ, eq); state != nil {
if state := isSingleSymbolToken(l, ItemEQ, eq); state != nil {
return state
}
{
Expand All @@ -398,8 +398,8 @@ func lexToken(l *lexer) stateFn {
return nil // Stop the run loop.
}

// isSingleSymboToken check if a single char should be lexed.
func isSingleSymboToken(l *lexer, tt TokenType, symbol rune) stateFn {
// isSingleSymbolToken check if a single char should be lexed.
func isSingleSymbolToken(l *lexer, tt TokenType, symbol rune) stateFn {
if r := l.peek(); r == symbol {
l.next()
l.emit(tt)
Expand All @@ -420,7 +420,7 @@ func lexBinding(l *lexer) stateFn {
return lexSpace
}

// lexSpace consumes spaces without emiting any token.
// lexSpace consumes spaces without emitting any token.
func lexSpace(l *lexer) stateFn {
for {
if r := l.next(); !unicode.IsSpace(r) || r == eof {
Expand All @@ -432,7 +432,7 @@ func lexSpace(l *lexer) stateFn {
return lexToken
}

// lexKeywork lexes the BQL keywords.
// lexKeyword lexes the BQL keywords.
func lexKeyword(l *lexer) stateFn {
input := l.input[l.pos:]
f := func(r rune) bool {
Expand Down Expand Up @@ -613,7 +613,7 @@ func lexPredicateOrLiteral(l *lexer) stateFn {
return lexLiteral
}

// lexPredicate lexes a predicicate of out of the input.
// lexPredicate lexes a predicate of out of the input.
func lexPredicate(l *lexer) stateFn {
l.next()
for done := false; !done; {
Expand Down
Loading