You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
obsidian-markdown/lexer/lexer.go

117 lines
1.8 KiB
Go

package lexer
import (
"bufio"
// "log"
"strings"
)
type Token int
const (
LinkBeginToken = iota
LinkEndToken
LinkTextToken
LinkAliasBeginToken
LinkAliasTextToken
LinkSectionBeginToken
LinkSectionTextToken
LinkBlockBeginToken
LinkBlockTextToken
)
/*
var (
LinkBeginToken = Token{Raw: "[["}
LinkEndToken = Token{Raw: "]]"}
LinkTextToken = Token{}
LinkAliasBeginToken = Token{Raw: "|"}
LinkAliasTextToken = Token{}
LinkSectionBeginToken = Token{Raw: "#"}
LinkSectionTextToken = Token{}
LinkBlockBeginToken = Token{Raw: "^"}
LinkBlockTextToken = Token{}
)
*/
type Lexer struct {
input []string
pos int
link bool
Tokens chan Token
}
func Lex(input string) (*Lexer, chan Token) {
l := &Lexer{
input: make([]string, 0),
pos: 0,
Tokens: make(chan Token),
}
scanner := bufio.NewScanner(strings.NewReader(input))
scanner.Split(bufio.ScanRunes)
for scanner.Scan() {
l.input = append(l.input, scanner.Text())
}
go l.Run()
return l, l.Tokens
}
func (l *Lexer) Next() string {
if l.pos > len(l.input)-1 {
return "eof"
}
r := l.input[l.pos]
l.pos++
return r
}
func (l *Lexer) Backup() {
l.pos--
}
func (l *Lexer) Peek() string {
r := l.Next()
l.Backup()
return r
}
func (l *Lexer) Emit(t Token) {
// log.Printf("token emitted: %#v\n", t)
l.Tokens <- t
}
func (l *Lexer) Run() {
defer close(l.Tokens)
for {
s := l.Next()
if s == "eof" {
return
}
if s == "[" && l.Peek() == "[" {
l.link = true
l.Next()
l.Emit(LinkBeginToken)
switch s {
case "|":
l.Emit(LinkAliasBeginToken)
case "#":
l.Emit(LinkSectionBeginToken)
case "^":
l.Emit(LinkBlockBeginToken)
}
if s == "]" && l.Peek() == "]" {
l.link = false
l.Next()
l.Emit(LinkEndToken)
}
}
}
}