package lexer import ( "bufio" // "log" "strings" ) type Token struct { Raw string } var ( LinkBeginToken = Token{Raw: "[["} LinkEndToken = Token{Raw: "]]"} LinkTextToken = Token{} LinkAliasBeginToken = Token{Raw: "|"} LinkAliasTextToken = Token{} LinkSectionBeginToken = Token{Raw: "#"} LinkSectionTextToken = Token{} LinkBlockBeginToken = Token{Raw: "^"} LinkBlockTextToken = Token{} ) type Lexer struct { input []string pos int Tokens chan Token } func Lex(input string) (*Lexer, chan Token) { l := &Lexer{ input: make([]string, 0), pos: 0, Tokens: make(chan Token), } scanner := bufio.NewScanner(strings.NewReader(input)) scanner.Split(bufio.ScanRunes) for scanner.Scan() { l.input = append(l.input, scanner.Text()) } go l.Run() return l, l.Tokens } func (l *Lexer) Next() string { if l.pos > len(l.input)-1 { return "eof" } r := l.input[l.pos] l.pos++ return r } func (l *Lexer) Backup() { l.pos-- } func (l *Lexer) Peek() string { r := l.Next() l.Backup() return r } func (l *Lexer) Emit(t Token) { // log.Printf("token emitted: %#v\n", t) l.Tokens <- t } func (l *Lexer) Run() { defer close(l.Tokens) for { s := l.Next() if s == "eof" { return } if s == "[" && l.Peek() == "[" { l.Next() l.Emit(LinkBeginToken) } if s == "]" && l.Peek() == "]" { l.Next() l.Emit(LinkBeginToken) } } }