Skip to content

Commit

Permalink
Merge pull request #9 from yuuki/avoid-goroutine
Browse files Browse the repository at this point in the history
Avoid goroutine and channel because it is too complicated
  • Loading branch information
yuuki authored Mar 30, 2017
2 parents ab22d6e + 4c60817 commit 87f7b6f
Showing 1 changed file with 39 additions and 49 deletions.
88 changes: 39 additions & 49 deletions parser/lexer.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,10 @@ import (
"github.com/yuuki/gokc/log"
)

const (
EOF = 0
)

var SYMBOL_TABLES = map[string]int{
"{": LB,
"}": RB,
Expand Down Expand Up @@ -169,14 +173,11 @@ var SYMBOL_TABLES = map[string]int{
}

type Lexer struct {
ctx *Context
emitter chan int
e error
}

type Context struct {
scanner scanner.Scanner
tokens []int
pos int
filename string
e error
}

type Error struct {
Expand All @@ -192,43 +193,38 @@ func (e *Error) Error() string {

func NewLexer(src io.Reader, filename string) *Lexer {
var lex Lexer
lex.ctx = NewContext(src, filename)
lex.emitter = make(chan int)
lex.scanner.Init(src)
lex.scanner.Mode &^= scanner.ScanInts | scanner.ScanFloats | scanner.ScanChars | scanner.ScanRawStrings | scanner.ScanComments | scanner.SkipComments
lex.scanner.IsIdentRune = isIdentRune
lex.tokens = []int{}
lex.filename = filename
return &lex
}

func NewContext(src io.Reader, filename string) *Context {
c := &Context{filename: filename}
c.scanner.Init(src)
c.scanner.Mode &^= scanner.ScanInts | scanner.ScanFloats | scanner.ScanChars | scanner.ScanRawStrings | scanner.ScanComments | scanner.SkipComments
c.scanner.IsIdentRune = isIdentRune
return c
}

func isIdentRune(ch rune, i int) bool {
return ch == '_' || ch == '.' || ch == '/' || ch == ':' || ch == '-' || ch == '+' || ch == '*' || ch == '?' || ch == '=' || ch == '&' || ch == '@' || unicode.IsLetter(ch) || unicode.IsDigit(ch)
}

func (c *Context) scanNextToken() (int, string) {
token := int(c.scanner.Scan())
s := c.scanner.TokenText()
func (l *Lexer) scanNextToken() (int, string) {
token := int(l.scanner.Scan())
s := l.scanner.TokenText()

for s == "!" || s == "#" {
c.skipComments()
l.skipComments()

token = int(c.scanner.Scan())
s = c.scanner.TokenText()
token = int(l.scanner.Scan())
s = l.scanner.TokenText()
}

log.Debugf("token text: %s\n", s)

return token, s
}

func (c *Context) skipComments() {
ch := c.scanner.Next()
func (l *Lexer) skipComments() {
ch := l.scanner.Next()
for ch != '\n' && ch >= 0 {
ch = c.scanner.Next()
ch = l.scanner.Next()
}
}

Expand All @@ -238,7 +234,7 @@ func (l *Lexer) scanInclude(rawfilename string) error {
return err
}

baseDir := filepath.Dir(l.ctx.filename)
baseDir := filepath.Dir(l.filename)
os.Chdir(baseDir)
defer os.Chdir(curDir)

Expand All @@ -251,20 +247,17 @@ func (l *Lexer) scanInclude(rawfilename string) error {
log.Infof("warning: %s: No such file or directory", rawfilename)
}

prevctx := l.ctx
defer func() { l.ctx = prevctx }()

for _, rawpath := range rawpaths {
relpath := filepath.Join(baseDir, rawpath)
l.filename = relpath
log.Verbosef("--> Parsing ... %s\n", relpath)

f, err := os.Open(rawpath)
if err != nil {
return err
}

l.ctx = NewContext(f, relpath)
l.run()
l.tokenize()

f.Close()
}
Expand All @@ -273,29 +266,26 @@ func (l *Lexer) scanInclude(rawfilename string) error {
}

func (l *Lexer) Lex(lval *yySymType) int {
return <-l.emitter
}

func (l *Lexer) mainRun() {
l.run()
// XXX
l.emitter <- scanner.EOF
l.emitter <- scanner.EOF
close(l.emitter)
if len(l.tokens) == l.pos {
return EOF
}
token := l.tokens[l.pos]
l.pos++
return token
}

func (l *Lexer) run() {
func (l *Lexer) tokenize() {
for {
token, s := l.ctx.scanNextToken()
token, s := l.scanNextToken()

for s == "include" {
token, s = l.ctx.scanNextToken()
token, s = l.scanNextToken()

if err := l.scanInclude(s); err != nil {
l.Error(err.Error())
}

token, s = l.ctx.scanNextToken()
token, s = l.scanNextToken()
}

if token == scanner.EOF {
Expand Down Expand Up @@ -349,23 +339,23 @@ func (l *Lexer) run() {
token = SYMBOL_TABLES[s]
}

l.emitter <- token
l.tokens = append(l.tokens, token)
}
}

func (l *Lexer) Error(msg string) {
l.e = &Error{
Filename: l.ctx.filename,
Line: l.ctx.scanner.Line,
Column: l.ctx.scanner.Column,
Filename: l.filename,
Line: l.scanner.Line,
Column: l.scanner.Column,
Message: msg,
}
}

func Parse(src io.Reader, filename string) error {
yyErrorVerbose = true
l := NewLexer(src, filename)
go l.mainRun()
l.tokenize()
if ret := yyParse(l); ret != 0 {
return l.e
}
Expand Down

0 comments on commit 87f7b6f

Please sign in to comment.