saving the lexer

dev
Nick Dumas 2 years ago
parent c56128156e
commit 8098d75ae9

@ -3,12 +3,11 @@ package main
import ( import (
"log" "log"
"github.com/therealfakemoot/obsidian-parser" "github.com/therealfakemoot/wikilinks-parser/lexer"
) )
func main() { func main() {
l, tokens := lexer.Lex("[[foo/bar/butts.png]]") _, tokens := lexer.Lex(`[[foo/bar/butts.png]]`)
go l.Run()
for t := range tokens { for t := range tokens {
log.Printf("%#v\n", t) log.Printf("%#v\n", t)
} }

@ -1,3 +1,3 @@
module github.com/therealfakemoot/obsidian-parser module github.com/therealfakemoot/wikilinks-parser
go 1.19 go 1.19

@ -6,10 +6,21 @@ import (
"strings" "strings"
) )
type Token struct { type Token int
Raw string
} const (
LinkBeginToken = iota
LinkEndToken
LinkTextToken
LinkAliasBeginToken
LinkAliasTextToken
LinkSectionBeginToken
LinkSectionTextToken
LinkBlockBeginToken
LinkBlockTextToken
)
/*
var ( var (
LinkBeginToken = Token{Raw: "[["} LinkBeginToken = Token{Raw: "[["}
LinkEndToken = Token{Raw: "]]"} LinkEndToken = Token{Raw: "]]"}
@ -21,10 +32,12 @@ var (
LinkBlockBeginToken = Token{Raw: "^"} LinkBlockBeginToken = Token{Raw: "^"}
LinkBlockTextToken = Token{} LinkBlockTextToken = Token{}
) )
*/
type Lexer struct { type Lexer struct {
input []string input []string
pos int pos int
link bool
Tokens chan Token Tokens chan Token
} }
@ -79,13 +92,24 @@ func (l *Lexer) Run() {
} }
if s == "[" && l.Peek() == "[" { if s == "[" && l.Peek() == "[" {
l.link = true
l.Next() l.Next()
l.Emit(LinkBeginToken) l.Emit(LinkBeginToken)
}
if s == "]" && l.Peek() == "]" { switch s {
l.Next() case "|":
l.Emit(LinkBeginToken) l.Emit(LinkAliasBeginToken)
case "#":
l.Emit(LinkSectionBeginToken)
case "^":
l.Emit(LinkBlockBeginToken)
}
if s == "]" && l.Peek() == "]" {
l.link = false
l.Next()
l.Emit(LinkEndToken)
}
} }
} }

@ -4,26 +4,47 @@ import (
"testing" "testing"
) )
func Test_Lexer(t *testing.T) { func Test_LexerSimple(t *testing.T) {
tt := []string{ tt := []string{
"[[Regular Link]]", "[[",
/* "]]",
"![[Transcluded Link]]", "[[foo]]",
"[[Regular Link|Alias]]", "[[foo]]",
"[[Regular Link#Subsection of page]]", "[[foo|bar]]",
"[[Regular Link^link to block]]",
"[[Regular Link#Subsection of page|Alias]]",
"[[Regular Link^link to block|Alias]]",
"[[Regular Link\|Alias]]",
"[[Regular Link^link to block\|Alias]]",
"[[Regular Link#Subsection of page\|Alias]]",
*/
} }
for _, tc := range tt { for _, tc := range tt {
t.Run(tc, func(t *testing.T) {
t.Logf("checking %q", tc)
_, tokens := Lex(tc)
for tok := range tokens {
t.Logf("found token: %#v", tok)
}
})
}
}
/*
func Test_LexerFull(t *testing.T) {
tt := []string{
`[[Regular Link]]`,
`![[Transcluded Link]]`,
`[[Regular Link|Alias]]`,
`[[Regular Link#Subsection of page]]`,
`[[Regular Link^link to block]]`,
`[[Regular Link#Subsection of page|Alias]]`,
`[[Regular Link^link to block|Alias]]`,
`[[Regular Link\|Alias]]`,
`[[Regular Link^link to block\|Alias]]`,
`[[Regular Link#Subsection of page\|Alias]]`,
}
for _, tc := range tt {
t.Run(tc, func(t *testing.T) { t.Run(tc, func(t *testing.T) {
t.Fail() t.Fail()
}) })
} }
} }
*/

@ -1,10 +0,0 @@
[[Regular Link]]
![[Transcluded Link]]
[[Regular Link|Alias]]
[[Regular Link#Subsection of page]]
[[Regular Link^link to block]]
[[Regular Link#Subsection of page|Alias]]
[[Regular Link^link to block|Alias]]
[[Regular Link\|Alias]]
[[Regular Link^link to block\|Alias]]
[[Regular Link#Subsection of page\|Alias]]
Loading…
Cancel
Save