Compare commits

..

16 Commits
dev ... main

Author SHA1 Message Date
Nick Dumas 0d4101de26 don't need this 6 months ago
Nick Dumas be0d629280 Cleaning test code and output up with testify 1 year ago
Nick Dumas 8b4ba717e2 Renaming in preparation for more tests 1 year ago
Nick Dumas 3f3696eb9f Lexer can now handle multi-line inputs 1 year ago
Nick Dumas 17699a114c tests for simple parsing of wikilinks in larger content blocks 1 year ago
Nick Dumas 4235beff81 can't assume this empty token will be there anymore 1 year ago
Nick Dumas 37dcdb9168 Logging is tuneable for better debugging experience 1 year ago
Nick Dumas ee6cad7662 removing nonexistent origin for this repo 1 year ago
Nick Dumas 82dd27bd06 More tests 1 year ago
Nick Dumas 04458800d1 tests refactored pretty nicely now 1 year ago
Nick Dumas 25f949fafd refactoring tests to make this easier 1 year ago
Nick Dumas 0b377a0500 Expand test cases, refine logging configuration
Wiki links don't occur in a vacuum, I need to be able to parse entire
blocks of markdown and extract the wikilinks properly.

Now that I've gotten the lexer working I mostly don't need to see every
single change to the position/start/width values, but the
instrumentation will be useful for future debugging.
1 year ago
Nick Dumas ebeea16b0d Full test suite for Obsidian wikilinks 1 year ago
Nick Dumas a8fe8282b3 passing tests 1 year ago
Nick Dumas d5a48dd362 lexIdent is the problem for sure 1 year ago
Nick Dumas 0c7b00e9b5 non-concurrent, test cases back to expected state 1 year ago

2
.gitignore vendored

@ -2,3 +2,5 @@ node_modules/*
package*.json
dist/*
reports/*
lexer.log
parser.log

@ -99,7 +99,6 @@ all: debug setup dep format lint test bench build dist
git-push:
git push origin main --tags
git push github main --tags
release-major: bump-major git-push

@ -0,0 +1,17 @@
package main
import (
"log"
"go.uber.org/zap/zapcore"
"code.ndumas.com/ndumas/wikilink-parser"
)
func main() {
l := wikilink.Lex("debugLexer", `this is a [[wikilink]]`, zapcore.InfoLevel)
for _, item := range l.Items {
item := item
log.Printf("%#+v\n", item)
}
}

@ -2,9 +2,15 @@ module code.ndumas.com/ndumas/wikilink-parser
go 1.19
require go.uber.org/zap v1.24.0
require (
github.com/stretchr/testify v1.8.0
go.uber.org/zap v1.24.0
)
require (
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
go.uber.org/atomic v1.7.0 // indirect
go.uber.org/multierr v1.6.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
)

@ -6,8 +6,11 @@ github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0 h1:pSgiaMZlXftHpm5L7V1+rVB+AZJydKsMxsQBIJw4PKk=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
go.uber.org/atomic v1.7.0 h1:ADUqmZGgLDDfbSL9ZmPxKTybcoEYHgpYfELNoN+7hsw=
go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
go.uber.org/goleak v1.1.11 h1:wy28qYRKZgnJTxGxvye5/wgWr1EKjmUDGYox5mGlRlI=
@ -15,4 +18,8 @@ go.uber.org/multierr v1.6.0 h1:y6IPFStTAIT5Ytl7/XYmHvzXQ7S3g/IeZW9hyZ5thw4=
go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU=
go.uber.org/zap v1.24.0 h1:FiJd5l1UOLj0wCgbSE0rwwXHzEdAZS6hiiSnxJN/D60=
go.uber.org/zap v1.24.0/go.mod h1:2kMP+WWQ8aoFoedH3T2sq6iJ2yDWpHbP0f6MQbS9Gkg=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

@ -1,31 +0,0 @@
// Code generated by "stringer -type=ItemType"; DO NOT EDIT.
package wikilink
import "strconv"
func _() {
// An "invalid array index" compiler error signifies that the constant values have changed.
// Re-run the stringer command to generate them again.
var x [1]struct{}
_ = x[ItemError-0]
_ = x[ItemEOF-1]
_ = x[ItemIdent-2]
_ = x[ItemOpenLink-3]
_ = x[ItemCloseLink-4]
_ = x[ItemHeading-5]
_ = x[ItemBlockRef-6]
_ = x[ItemAlias-7]
_ = x[ItemText-8]
}
const _ItemType_name = "ItemErrorItemEOFItemIdentItemOpenLinkItemCloseLinkItemHeadingItemBlockRefItemAliasItemText"
var _ItemType_index = [...]uint8{0, 9, 16, 25, 37, 50, 61, 73, 82, 90}
func (i ItemType) String() string {
if i < 0 || i >= ItemType(len(_ItemType_index)-1) {
return "ItemType(" + strconv.FormatInt(int64(i), 10) + ")"
}
return _ItemType_name[_ItemType_index[i]:_ItemType_index[i+1]]
}

@ -0,0 +1,31 @@
// Code generated by "stringer -type=LexemeType"; DO NOT EDIT.
package wikilink
import "strconv"
func _() {
// An "invalid array index" compiler error signifies that the constant values have changed.
// Re-run the stringer command to generate them again.
var x [1]struct{}
_ = x[LexError-0]
_ = x[LexEOF-1]
_ = x[LexIdent-2]
_ = x[LexOpenLink-3]
_ = x[LexCloseLink-4]
_ = x[LexHeading-5]
_ = x[LexBlockRef-6]
_ = x[LexAlias-7]
_ = x[LexText-8]
}
const _LexemeType_name = "LexErrorLexEOFLexIdentLexOpenLinkLexCloseLinkLexHeadingLexBlockRefLexAliasLexText"
var _LexemeType_index = [...]uint8{0, 8, 14, 22, 33, 45, 55, 66, 74, 81}
func (i LexemeType) String() string {
if i < 0 || i >= LexemeType(len(_LexemeType_index)-1) {
return "LexemeType(" + strconv.FormatInt(int64(i), 10) + ")"
}
return _LexemeType_name[_LexemeType_index[i]:_LexemeType_index[i+1]]
}

@ -1,26 +1,29 @@
//go:generate stringer -type=ItemType
//go:generate stringer -type=LexemeType
package wikilink
import (
"fmt"
// "os"
"strings"
"sync"
// "unicode"
"unicode/utf8"
"go.uber.org/zap"
"go.uber.org/zap/zapcore"
)
const (
ItemError ItemType = iota
ItemEOF
ItemIdent
ItemOpenLink
ItemCloseLink
ItemHeading
ItemBlockRef
ItemAlias
ItemText
LexError LexemeType = iota
LexEOF
LexIdent
LexOpenLink
LexCloseLink
LexHeading
LexBlockRef
LexAlias
LexText
)
const (
@ -35,19 +38,41 @@ const (
BlockRef = "#^"
)
func Lex(name, input string) *Lexer {
func Lex(name, input string, level zapcore.Level) *Lexer {
encoderCfg := zap.NewProductionEncoderConfig()
encoderCfg.EncodeTime = zapcore.ISO8601TimeEncoder
config := zap.Config{
Level: zap.NewAtomicLevelAt(level),
EncoderConfig: encoderCfg,
OutputPaths: []string{
"./lexer.log",
"stdout",
},
Encoding: "console",
ErrorOutputPaths: []string{
"stderr",
},
InitialFields: map[string]interface{}{
"lexer": name,
// "pid": os.Getpid(),
},
}
l := &Lexer{
L: zap.NewExample().Sugar().Named("lexer"),
L: zap.Must(config.Build()).Named("lexer"),
name: name,
input: input,
state: lexText,
items: make(chan Item, 2),
Items: make([]Lexeme, 0),
}
go l.run()
// go l.run()
l.run()
return l
}
/* shouldn't need this in non-concurrent implementation
func (l *Lexer) NextItem() Item {
for {
select {
@ -66,6 +91,7 @@ func (l *Lexer) NextItem() Item {
}
}
}
*/
func (l *Lexer) ignore() {
l.SetStart(l.pos)
@ -76,15 +102,18 @@ func (l *Lexer) backup() {
}
type Lexer struct {
L *zap.SugaredLogger
name, input string
start, pos, width int
state stateFn
items chan Item
L *zap.Logger
name, input string
start, pos, width int
state stateFn
// Items chan Item
Items []Lexeme
widthMutex, startMutex, posMutex, chanMutex sync.Mutex
}
func (l *Lexer) peek() rune {
L := l.L.Named("peek")
L.Debug("peeking")
r := l.next()
l.backup()
@ -92,22 +121,39 @@ func (l *Lexer) peek() rune {
}
func (l *Lexer) accept(valid string) bool {
L := l.L.Named("accept").With(
zap.String("input", valid),
)
if strings.ContainsRune(valid, l.next()) {
L.Debug("matched input")
return true
}
L.Debug("rejected input")
l.backup()
return false
}
func (l *Lexer) acceptRun(valid string) {
L := l.L.Named("acceptRun").With(
zap.String("input", valid),
)
L.Debug("scanning")
for strings.ContainsRune(valid, l.next()) {
}
l.backup()
}
func (l *Lexer) emit(t ItemType) {
func (l *Lexer) emit(t LexemeType) {
i := Lexeme{t, l.input[l.GetStart():l.GetPos()]}
L := l.L.Named("emit").With(
zap.String("item", i.String()),
)
L.Info("emitting lexeme")
l.Items = append(l.Items, i)
l.SetStart(l.GetPos())
/* original concurrent implementation
defer l.chanMutex.Unlock()
l.chanMutex.Lock()
i := Item{t, l.input[l.GetStart():l.GetPos()]}
@ -116,99 +162,131 @@ func (l *Lexer) emit(t ItemType) {
zap.Int("width", l.GetWidth()),
).Named("emit")
L.Debugw("emitting item",
L.Debug("emitting item",
zap.String("item", i.String()),
)
l.items <- i
l.SetStart(l.GetPos())
*/
}
func (l *Lexer) errorf(format string, args ...interface{}) stateFn {
L := l.L.Named("errorf")
errorItem := Item{
ItemError,
errorItem := Lexeme{
LexError,
fmt.Sprintf(format, args...),
}
L.Debugw("emitting errorItem",
L.Debug("emitting errorItem",
zap.String("error", errorItem.String()),
)
l.items <- errorItem
l.Items = append(l.Items, errorItem)
return nil
}
func (l *Lexer) next() rune {
var r rune
L := l.L.Named("next")
if l.GetPos() >= len(l.input) {
L.Debug("end of input reached")
l.SetWidth(0)
return EOF
}
r, width := utf8.DecodeRuneInString(l.input[l.GetPos():])
L.Debug("found rune",
zap.String("rune", string(r)),
zap.Int("width", width),
)
l.SetWidth(width)
l.SetPos(l.GetPos() + l.GetWidth())
return r
}
func (l *Lexer) run() {
for state := lexText; state != nil; {
state = state(l)
}
/* original concurrent implementation
defer l.chanMutex.Unlock()
for state := lexText; state != nil; {
state = state(l)
}
l.chanMutex.Lock()
close(l.items)
*/
}
func (l *Lexer) GetPos() int {
defer l.posMutex.Unlock()
l.posMutex.Lock()
l.L.Named("GetPos").Debug("getting current position",
zap.Int("old", l.pos),
)
return l.pos
}
func (l *Lexer) SetPos(pos int) {
defer l.posMutex.Unlock()
l.posMutex.Lock()
l.L.Named("SetPos").Debug("setting new position",
zap.Int("new", pos),
zap.Int("old", l.pos),
)
l.pos = pos
}
func (l *Lexer) GetWidth() int {
defer l.widthMutex.Unlock()
l.widthMutex.Lock()
l.L.Named("GetWidth").Debug("setting new width",
zap.Int("old", l.width),
)
return l.width
}
func (l *Lexer) SetWidth(width int) {
defer l.widthMutex.Unlock()
l.widthMutex.Lock()
l.L.Named("SetWidth").Debug("setting new width",
zap.Int("new", width),
zap.Int("old", l.width),
)
l.width = width
}
func (l *Lexer) GetStart() int {
defer l.startMutex.Unlock()
l.startMutex.Lock()
l.L.Named("GetStart").Debug("getting old start",
zap.Int("old", l.start),
)
return l.start
}
func (l *Lexer) SetStart(start int) {
defer l.startMutex.Unlock()
l.startMutex.Lock()
l.L.Named("SetStart").Debug("setting new start",
zap.Int("new", start),
zap.Int("old", l.start),
)
l.start = start
}
type stateFn func(*Lexer) stateFn
type ItemType int
type LexemeType int
type Item struct {
Typ ItemType
type Lexeme struct {
Typ LexemeType
Val string
}
func (i Item) String() string {
func (i Lexeme) String() string {
switch i.Typ {
case ItemEOF:
case LexEOF:
return "EOF"
case ItemError:
case LexError:
return i.Val
}

@ -3,109 +3,314 @@ package wikilink_test
import (
"testing"
"github.com/stretchr/testify/assert"
"go.uber.org/zap/zapcore"
"code.ndumas.com/ndumas/wikilink-parser"
)
func Test_Lexer(t *testing.T) {
// t.Parallel()
tcs := []struct {
name string
in string
expected []wikilink.Item
}{
{
name: "wikilink", in: "[[wikilink]]", expected: []wikilink.Item{
{Typ: wikilink.ItemOpenLink, Val: "[["},
{Typ: wikilink.ItemIdent, Val: "wikilink"},
{Typ: wikilink.ItemCloseLink, Val: "]]"},
},
var SingleWikilink = []tc{
{
name: "wikilink",
in: "[[wikilink]]",
expected: []wikilink.Lexeme{
{Typ: wikilink.LexOpenLink, Val: "[["},
{Typ: wikilink.LexIdent, Val: "wikilink"},
{Typ: wikilink.LexCloseLink, Val: "]]"},
},
{
name: "wikilink|display name", in: "[[wikilink|display name]]", expected: []wikilink.Item{
{Typ: wikilink.ItemOpenLink, Val: "[["},
{Typ: wikilink.ItemIdent, Val: "wikilink"},
{Typ: wikilink.ItemAlias, Val: "|"},
{Typ: wikilink.ItemIdent, Val: "display name"},
{Typ: wikilink.ItemCloseLink, Val: "]]"},
},
},
{
name: "wikilink|display name",
in: "[[wikilink|display name]]",
expected: []wikilink.Lexeme{
{Typ: wikilink.LexOpenLink, Val: "[["},
{Typ: wikilink.LexIdent, Val: "wikilink"},
{Typ: wikilink.LexAlias, Val: "|"},
{Typ: wikilink.LexIdent, Val: "display name"},
{Typ: wikilink.LexCloseLink, Val: "]]"},
},
{
name: "wikilink|display name|second pipe", in: "[[wikilink|display name|second pipe]]", expected: []wikilink.Item{
{Typ: wikilink.ItemOpenLink, Val: "[["},
{Typ: wikilink.ItemIdent, Val: "wikilink"},
{Typ: wikilink.ItemAlias, Val: "|"},
{Typ: wikilink.ItemIdent, Val: "display name"},
{Typ: wikilink.ItemAlias, Val: "|"},
{Typ: wikilink.ItemIdent, Val: "second pipe"},
{Typ: wikilink.ItemCloseLink, Val: "]]"},
},
},
{
name: "wikilink|display name|second pipe",
in: "[[wikilink|display name|second pipe]]",
expected: []wikilink.Lexeme{
{Typ: wikilink.LexOpenLink, Val: "[["},
{Typ: wikilink.LexIdent, Val: "wikilink"},
{Typ: wikilink.LexAlias, Val: "|"},
{Typ: wikilink.LexIdent, Val: "display name"},
{Typ: wikilink.LexAlias, Val: "|"},
{Typ: wikilink.LexIdent, Val: "second pipe"},
{Typ: wikilink.LexCloseLink, Val: "]]"},
},
{
name: "wikilink with numeric alias|420|second pipe", in: "[[wikilink|420|second pipe]]", expected: []wikilink.Item{
{Typ: wikilink.ItemOpenLink, Val: "[["},
{Typ: wikilink.ItemIdent, Val: "wikilink"},
{Typ: wikilink.ItemAlias, Val: "|"},
{Typ: wikilink.ItemIdent, Val: "420"},
{Typ: wikilink.ItemAlias, Val: "|"},
{Typ: wikilink.ItemIdent, Val: "second pipe"},
{Typ: wikilink.ItemCloseLink, Val: "]]"},
},
},
{
name: "wikilink with numeric alias|420|second pipe",
in: "[[wikilink|420|second pipe]]",
expected: []wikilink.Lexeme{
{Typ: wikilink.LexOpenLink, Val: "[["},
{Typ: wikilink.LexIdent, Val: "wikilink"},
{Typ: wikilink.LexAlias, Val: "|"},
{Typ: wikilink.LexIdent, Val: "420"},
{Typ: wikilink.LexAlias, Val: "|"},
{Typ: wikilink.LexIdent, Val: "second pipe"},
{Typ: wikilink.LexCloseLink, Val: "]]"},
},
{
name: "wikilink with spaces in filename", in: "[[wikilink spaces]]", expected: []wikilink.Item{
{Typ: wikilink.ItemOpenLink, Val: "[["},
{Typ: wikilink.ItemIdent, Val: "wikilink spaces"},
{Typ: wikilink.ItemCloseLink, Val: "]]"},
},
},
{
name: "wikilink with spaces in filename",
in: "[[wikilink spaces]]",
expected: []wikilink.Lexeme{
{Typ: wikilink.LexOpenLink, Val: "[["},
{Typ: wikilink.LexIdent, Val: "wikilink spaces"},
{Typ: wikilink.LexCloseLink, Val: "]]"},
},
{
name: "#heading", in: "[[#heading]]", expected: []wikilink.Item{
{Typ: wikilink.ItemOpenLink, Val: "[["},
{Typ: wikilink.ItemHeading, Val: "#"},
{Typ: wikilink.ItemIdent, Val: "heading"},
{Typ: wikilink.ItemCloseLink, Val: "]]"},
},
},
{
name: "#heading",
in: "[[#heading]]",
expected: []wikilink.Lexeme{
{Typ: wikilink.LexOpenLink, Val: "[["},
{Typ: wikilink.LexIdent, Val: ""},
{Typ: wikilink.LexHeading, Val: "#"},
{Typ: wikilink.LexIdent, Val: "heading"},
{Typ: wikilink.LexCloseLink, Val: "]]"},
},
{
name: "wikilink#heading", in: "[[wikilink#heading]]", expected: []wikilink.Item{
{Typ: wikilink.ItemOpenLink, Val: "[["},
{Typ: wikilink.ItemIdent, Val: "wikilink"},
{Typ: wikilink.ItemHeading, Val: "#"},
{Typ: wikilink.ItemIdent, Val: "heading"},
{Typ: wikilink.ItemCloseLink, Val: "]]"},
},
},
{
name: "wikilink#heading",
in: "[[wikilink#heading]]",
expected: []wikilink.Lexeme{
{Typ: wikilink.LexOpenLink, Val: "[["},
{Typ: wikilink.LexIdent, Val: "wikilink"},
{Typ: wikilink.LexHeading, Val: "#"},
{Typ: wikilink.LexIdent, Val: "heading"},
{Typ: wikilink.LexCloseLink, Val: "]]"},
},
},
{
name: "wikilink#heading|display name",
in: "[[wikilink#heading|display name]]",
expected: []wikilink.Lexeme{
{Typ: wikilink.LexOpenLink, Val: "[["},
{Typ: wikilink.LexIdent, Val: "wikilink"},
{Typ: wikilink.LexHeading, Val: "#"},
{Typ: wikilink.LexIdent, Val: "heading"},
{Typ: wikilink.LexAlias, Val: "|"},
{Typ: wikilink.LexIdent, Val: "display name"},
{Typ: wikilink.LexCloseLink, Val: "]]"},
},
},
{
name: "wikilink#heading|display name|second pipe",
in: "[[wikilink#heading|display name|second pipe]]",
expected: []wikilink.Lexeme{
{Typ: wikilink.LexOpenLink, Val: "[["},
{Typ: wikilink.LexIdent, Val: "wikilink"},
{Typ: wikilink.LexHeading, Val: "#"},
{Typ: wikilink.LexIdent, Val: "heading"},
{Typ: wikilink.LexAlias, Val: "|"},
{Typ: wikilink.LexIdent, Val: "display name"},
{Typ: wikilink.LexAlias, Val: "|"},
{Typ: wikilink.LexIdent, Val: "second pipe"},
{Typ: wikilink.LexCloseLink, Val: "]]"},
},
},
{
name: "wikilink with numeric aliases#heading|420|display name",
in: "[[wikilink#heading|420|second pipe]]",
expected: []wikilink.Lexeme{
{Typ: wikilink.LexOpenLink, Val: "[["},
{Typ: wikilink.LexIdent, Val: "wikilink"},
{Typ: wikilink.LexHeading, Val: "#"},
{Typ: wikilink.LexIdent, Val: "heading"},
{Typ: wikilink.LexAlias, Val: "|"},
{Typ: wikilink.LexIdent, Val: "420"},
{Typ: wikilink.LexAlias, Val: "|"},
{Typ: wikilink.LexIdent, Val: "second pipe"},
{Typ: wikilink.LexCloseLink, Val: "]]"},
},
},
{
name: "#^blockRef",
in: "[[#^blockRef]]",
expected: []wikilink.Lexeme{
{Typ: wikilink.LexOpenLink, Val: "[["},
{Typ: wikilink.LexIdent, Val: ""},
{Typ: wikilink.LexBlockRef, Val: "#^"},
{Typ: wikilink.LexIdent, Val: "blockRef"},
{Typ: wikilink.LexCloseLink, Val: "]]"},
},
},
{
name: "wikilink#^blockRef",
in: "[[wikilink#^blockRef]]",
expected: []wikilink.Lexeme{
{Typ: wikilink.LexOpenLink, Val: "[["},
{Typ: wikilink.LexIdent, Val: "wikilink"},
{Typ: wikilink.LexBlockRef, Val: "#^"},
{Typ: wikilink.LexIdent, Val: "blockRef"},
{Typ: wikilink.LexCloseLink, Val: "]]"},
},
/*
{name: "", in: "", expected: []wikilink.ItemType{}},
{name: "wikilink#heading|display name", in: "[[wikilink#heading|display name]]", expected: []wikilink.ItemType{}},
{name: "wikilink#heading|display name|second pipe", in: "[[wikilink#heading|display name|second pipe]]", expected: []wikilink.ItemType{}},
{name: "wikilink with numeric aliases#heading|420|display name", in: "[[wikilink#heading|420|second pipe]]", expected: []wikilink.ItemType{}},
{name: "^blockRef", in: "[[^blockRef]]", expected: []wikilink.ItemType{}},
{name: "wikilink^blockRef", in: "[[wikilink^blockRef]]", expected: []wikilink.ItemType{}},
{name: "wikilink^blockRef|display name", in: "[[wikilink#^blockRef|display name]]", expected: []wikilink.ItemType{}},
{name: "wikilink^blockRef|display name|second pipe", in: "[[wikilink#^blockRef|display name|second pipe]]", expected: []wikilink.ItemType{}},
{name: "wikilink with numeric aliases^blockRef|420|second pipe", in: "[[wikilink#^blockRef|420|second pipe]]", expected: []wikilink.ItemType{}},
*/
},
{
name: "wikilink#^blockRef|display name",
in: "[[wikilink#^blockRef|display name]]",
expected: []wikilink.Lexeme{
{Typ: wikilink.LexOpenLink, Val: "[["},
{Typ: wikilink.LexIdent, Val: "wikilink"},
{Typ: wikilink.LexBlockRef, Val: "#^"},
{Typ: wikilink.LexIdent, Val: "blockRef"},
{Typ: wikilink.LexAlias, Val: "|"},
{Typ: wikilink.LexIdent, Val: "display name"},
{Typ: wikilink.LexCloseLink, Val: "]]"},
},
},
{
name: "wikilink#^blockRef|display name|second pipe",
in: "[[wikilink#^blockRef|display name|second pipe]]",
expected: []wikilink.Lexeme{
{Typ: wikilink.LexOpenLink, Val: "[["},
{Typ: wikilink.LexIdent, Val: "wikilink"},
{Typ: wikilink.LexBlockRef, Val: "#^"},
{Typ: wikilink.LexIdent, Val: "blockRef"},
{Typ: wikilink.LexAlias, Val: "|"},
{Typ: wikilink.LexIdent, Val: "display name"},
{Typ: wikilink.LexAlias, Val: "|"},
{Typ: wikilink.LexIdent, Val: "second pipe"},
{Typ: wikilink.LexCloseLink, Val: "]]"},
},
},
{
name: "wikilink with numeric aliases#^blockRef|420|second pipe",
in: "[[wikilink#^blockRef|420|second pipe]]",
expected: []wikilink.Lexeme{
{Typ: wikilink.LexOpenLink, Val: "[["},
{Typ: wikilink.LexIdent, Val: "wikilink"},
{Typ: wikilink.LexBlockRef, Val: "#^"},
{Typ: wikilink.LexIdent, Val: "blockRef"},
{Typ: wikilink.LexAlias, Val: "|"},
{Typ: wikilink.LexIdent, Val: "420"},
{Typ: wikilink.LexAlias, Val: "|"},
{Typ: wikilink.LexIdent, Val: "second pipe"},
{Typ: wikilink.LexCloseLink, Val: "]]"},
},
},
}
func Test_ObsidianWikilinks_LinksEndOfMultiLineInput(t *testing.T) {
for _, tc := range SingleWikilink {
mut, test := mutateTestCase(
tc,
" test data please ignore.\nbling blonk more lines\nbling blong\nthis is a",
"",
[]wikilink.Lexeme{
{Typ: wikilink.LexText, Val: " test data please ignore.\n"},
{Typ: wikilink.LexText, Val: "bling blonk more lines\n"},
{Typ: wikilink.LexText, Val: "bling blong\n"},
{Typ: wikilink.LexText, Val: "this is a"},
},
[]wikilink.Lexeme{
{Typ: wikilink.LexText, Val: ""},
},
)
t.Run(mut.name, test)
}
}
func Test_ObsidianWikilinks_LinksStartOfMultiLineInput(t *testing.T) {
for _, tc := range SingleWikilink {
mut, test := mutateTestCase(
tc,
"",
" test data please ignore.\nbling blonk more lines\nbling blong\nthis is a",
[]wikilink.Lexeme{
{Typ: wikilink.LexText, Val: ""},
},
[]wikilink.Lexeme{
{Typ: wikilink.LexText, Val: " test data please ignore.\n"},
{Typ: wikilink.LexText, Val: "bling blonk more lines\n"},
{Typ: wikilink.LexText, Val: "bling blong\n"},
{Typ: wikilink.LexText, Val: "this is a"},
},
)
t.Run(mut.name, test)
}
}
func Test_ObsidianWikilinks_LinksStartOfInput(t *testing.T) {
for _, tc := range SingleWikilink {
mut, test := mutateTestCase(
tc,
"",
" test data please ignore",
[]wikilink.Lexeme{
{Typ: wikilink.LexText, Val: ""},
},
[]wikilink.Lexeme{
{Typ: wikilink.LexText, Val: " test data please ignore"},
},
)
t.Run(mut.name, test)
}
}
func Test_ObsidianWikilinks_LinksEndOfInput(t *testing.T) {
for _, tc := range SingleWikilink {
mut, test := mutateTestCase(
tc,
"this is a ",
"",
[]wikilink.Lexeme{
{Typ: wikilink.LexText, Val: "this is a "},
},
[]wikilink.Lexeme{
{Typ: wikilink.LexText, Val: ""},
},
)
t.Run(mut.name, test)
}
}
func Test_ObsidianWikilinks_Basic(t *testing.T) {
for _, tc := range SingleWikilink {
mut, test := mutateTestCase(
tc,
"",
"",
[]wikilink.Lexeme{
{Typ: wikilink.LexText, Val: ""},
},
[]wikilink.Lexeme{
{Typ: wikilink.LexText, Val: ""},
},
)
t.Run(mut.name, test)
}
}
type tc struct {
name string
in string
expected []wikilink.Lexeme
}
func mutateTestCase(tc tc, prefix, suffix string, expectedPrefix, expectedSuffix []wikilink.Lexeme) (tc, func(t *testing.T)) {
tc.in = prefix + tc.in
tc.in = tc.in + suffix
if expectedPrefix != nil {
tc.expected = append(expectedPrefix, tc.expected...)
}
if expectedSuffix != nil {
tc.expected = append(tc.expected, expectedSuffix...)
}
for _, tc := range tcs {
tc := tc
t.Run(tc.name, func(t *testing.T) {
// t.Parallel()
l := wikilink.Lex("testLexer", tc.in)
defer l.L.Sync()
for _, e := range tc.expected {
n := l.NextItem()
if e.Typ != n.Typ {
t.Logf("expected Type %s, received %s", e.Typ.String(), n.Typ.String())
t.Fail()
}
if e.Val != n.Val {
t.Logf("expected Value %q, received %q", e.Val, n.Val)
t.Fail()
}
}
})
return tc, func(t *testing.T) {
l := wikilink.Lex("testLexer", tc.in, zapcore.WarnLevel)
defer l.L.Sync()
assert.Equal(t, tc.expected, l.Items, "token stream mismatch")
}
}

@ -1,275 +0,0 @@
mkdir -p "/home/ndumas/work/wikilink-parser/reports"
mkdir -p "/home/ndumas/work/wikilink-parser/dist"
go clean code.ndumas.com/ndumas/wikilink-parser
rm -vrf "/home/ndumas/work/wikilink-parser/dist"/*
rm -vf "/home/ndumas/work/wikilink-parser/reports"/*
removed '/home/ndumas/work/wikilink-parser/reports/test.out'
go get -d -t code.ndumas.com/ndumas/wikilink-parser/...
go install golang.org/x/tools/cmd/stringer@latest
go generate
go test -race -v -tags "release" $(go list "code.ndumas.com/ndumas/wikilink-parser/..." | grep -v /vendor/) | tee "/home/ndumas/work/wikilink-parser/reports/test.out"
=== RUN Test_Lexer
=== RUN Test_Lexer/wikilink
2023/07/01 18:25:42 lexText
2023/07/01 18:25:42 lexOpenLink
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":2,"width":0,"item":"ItemOpenLink:\"[[\""}
2023/07/01 18:25:42 lexIdent
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":10,"width":1,"item":"ItemIdent:\"wikilink\""}
2023/07/01 18:25:42 lexCloseLink
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":12,"width":1,"item":"ItemCloseLink:\"]]\""}
=== RUN Test_Lexer/wikilink|display_name
2023/07/01 18:25:42 lexText
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":12,"width":0,"item":"ItemText:\"\""}
2023/07/01 18:25:42 lexText
2023/07/01 18:25:42 lexOpenLink
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":2,"width":0,"item":"ItemOpenLink:\"[[\""}
2023/07/01 18:25:42 lexIdent
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":10,"width":1,"item":"ItemIdent:\"wikilink\""}
2023/07/01 18:25:42 lexAlias
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":11,"width":1,"item":"ItemAlias:\"|\""}
2023/07/01 18:25:42 lexIdent
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":23,"width":1,"item":"ItemIdent:\"display name\""}
2023/07/01 18:25:42 lexCloseLink
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":25,"width":1,"item":"ItemCloseLink:\"]]\""}
=== RUN Test_Lexer/wikilink|display_name|second_pipe
2023/07/01 18:25:42 lexText
2023/07/01 18:25:42 lexOpenLink
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":2,"width":0,"item":"ItemOpenLink:\"[[\""}
2023/07/01 18:25:42 lexIdent
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":10,"width":1,"item":"ItemIdent:\"wikilink\""}
2023/07/01 18:25:42 lexAlias
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":11,"width":1,"item":"ItemAlias:\"|\""}
2023/07/01 18:25:42 lexIdent
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":23,"width":1,"item":"ItemIdent:\"display name\""}
2023/07/01 18:25:42 lexText
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":25,"width":0,"item":"ItemText:\"\""}
2023/07/01 18:25:42 lexAlias
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":24,"width":1,"item":"ItemAlias:\"|\""}
2023/07/01 18:25:42 lexIdent
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":35,"width":1,"item":"ItemIdent:\"second pipe\""}
2023/07/01 18:25:42 lexCloseLink
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":37,"width":1,"item":"ItemCloseLink:\"]]\""}
=== RUN Test_Lexer/wikilink_with_numeric_alias|420|second_pipe
2023/07/01 18:25:42 lexText
2023/07/01 18:25:42 lexOpenLink
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":2,"width":0,"item":"ItemOpenLink:\"[[\""}
2023/07/01 18:25:42 lexIdent
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":10,"width":1,"item":"ItemIdent:\"wikilink\""}
2023/07/01 18:25:42 lexAlias
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":11,"width":1,"item":"ItemAlias:\"|\""}
2023/07/01 18:25:42 lexIdent
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":14,"width":1,"item":"ItemIdent:\"420\""}
2023/07/01 18:25:42 lexAlias
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":15,"width":1,"item":"ItemAlias:\"|\""}
2023/07/01 18:25:42 lexIdent
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":26,"width":1,"item":"ItemIdent:\"second pipe\""}
2023/07/01 18:25:42 lexCloseLink
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":28,"width":1,"item":"ItemCloseLink:\"]]\""}
=== RUN Test_Lexer/wikilink_with_spaces_in_filename
2023/07/01 18:25:42 lexText
2023/07/01 18:25:42 lexOpenLink
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":2,"width":0,"item":"ItemOpenLink:\"[[\""}
2023/07/01 18:25:42 lexIdent
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":17,"width":1,"item":"ItemIdent:\"wikilink spaces\""}
2023/07/01 18:25:42 lexCloseLink
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":19,"width":1,"item":"ItemCloseLink:\"]]\""}
=== RUN Test_Lexer/#heading
2023/07/01 18:25:42 lexText
2023/07/01 18:25:42 lexOpenLink
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":2,"width":0,"item":"ItemOpenLink:\"[[\""}
2023/07/01 18:25:42 lexIdent
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":10,"width":1,"item":"ItemIdent:\"#heading\""}
lexer_test.go:100: expected Type ItemHeading, received ItemIdent
lexer_test.go:105: expected Value "#", received "#heading"
2023/07/01 18:25:42 lexCloseLink
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":12,"width":1,"item":"ItemCloseLink:\"]]\""}
lexer_test.go:100: expected Type ItemIdent, received ItemCloseLink
lexer_test.go:105: expected Value "heading", received "]]"
2023/07/01 18:25:42 lexText
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":12,"width":0,"item":"ItemText:\"\""}
lexer_test.go:100: expected Type ItemCloseLink, received ItemText
lexer_test.go:105: expected Value "]]", received ""
=== RUN Test_Lexer/wikilink#heading
2023/07/01 18:25:42 lexText
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":37,"width":0,"item":"ItemText:\"\""}
2023/07/01 18:25:42 lexText
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":28,"width":0,"item":"ItemText:\"\""}
2023/07/01 18:25:42 lexText
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":19,"width":0,"item":"ItemText:\"\""}
2023/07/01 18:25:42 lexText
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":12,"width":0,"item":"ItemText:\"\""}
2023/07/01 18:25:42 lexText
2023/07/01 18:25:42 lexOpenLink
2023/07/01 18:25:42 lexText
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":2,"width":0,"item":"ItemOpenLink:\"[[\""}
2023/07/01 18:25:42 lexIdent
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":10,"width":1,"item":"ItemIdent:\"wikilink\""}
==================
WARNING: DATA RACE
Read at 0x00c0000822b0 by goroutine 21:
code.ndumas.com/ndumas/wikilink-parser.lexText()
/home/ndumas/work/wikilink-parser/states.go:82 +0x69
code.ndumas.com/ndumas/wikilink-parser.(*Lexer).run()
/home/ndumas/work/wikilink-parser/lexer.go:149 +0x3a
code.ndumas.com/ndumas/wikilink-parser.Lex.func1()
/home/ndumas/work/wikilink-parser/lexer.go:45 +0x39
Previous write at 0x00c0000822b0 by goroutine 20:
code.ndumas.com/ndumas/wikilink-parser.lexOpenLink()
/home/ndumas/work/wikilink-parser/states.go:96 +0x86
code.ndumas.com/ndumas/wikilink-parser.(*Lexer).NextItem()
/home/ndumas/work/wikilink-parser/lexer.go:64 +0xd5
code.ndumas.com/ndumas/wikilink-parser_test.Test_Lexer.func1()
/home/ndumas/work/wikilink-parser/lexer_test.go:98 +0x18c
testing.tRunner()
/usr/lib/golang/src/testing/testing.go:1446 +0x216
testing.(*T).Run.func1()
/usr/lib/golang/src/testing/testing.go:1493 +0x47
Goroutine 21 (running) created at:
code.ndumas.com/ndumas/wikilink-parser.Lex()
/home/ndumas/work/wikilink-parser/lexer.go:45 +0x41a
code.ndumas.com/ndumas/wikilink-parser_test.Test_Lexer.func1()
/home/ndumas/work/wikilink-parser/lexer_test.go:95 +0x69
testing.tRunner()
/usr/lib/golang/src/testing/testing.go:1446 +0x216
testing.(*T).Run.func1()
/usr/lib/golang/src/testing/testing.go:1493 +0x47
Goroutine 20 (running) created at:
testing.(*T).Run()
/usr/lib/golang/src/testing/testing.go:1493 +0x75d
code.ndumas.com/ndumas/wikilink-parser_test.Test_Lexer()
/home/ndumas/work/wikilink-parser/lexer_test.go:93 +0x1086
testing.tRunner()
/usr/lib/golang/src/testing/testing.go:1446 +0x216
testing.(*T).Run.func1()
/usr/lib/golang/src/testing/testing.go:1493 +0x47
==================
==================
WARNING: DATA RACE
Write at 0x00c0000822b8 by goroutine 21:
code.ndumas.com/ndumas/wikilink-parser.(*Lexer).next()
/home/ndumas/work/wikilink-parser/lexer.go:142 +0xe8
code.ndumas.com/ndumas/wikilink-parser.lexText()
/home/ndumas/work/wikilink-parser/states.go:85 +0xc4
code.ndumas.com/ndumas/wikilink-parser.(*Lexer).run()
/home/ndumas/work/wikilink-parser/lexer.go:149 +0x3a
code.ndumas.com/ndumas/wikilink-parser.Lex.func1()
/home/ndumas/work/wikilink-parser/lexer.go:45 +0x39
Previous write at 0x00c0000822b8 by goroutine 20:
code.ndumas.com/ndumas/wikilink-parser.(*Lexer).next()
/home/ndumas/work/wikilink-parser/lexer.go:142 +0xe8
code.ndumas.com/ndumas/wikilink-parser.lexIdent()
/home/ndumas/work/wikilink-parser/states.go:31 +0x64
code.ndumas.com/ndumas/wikilink-parser.(*Lexer).NextItem()
/home/ndumas/work/wikilink-parser/lexer.go:64 +0xd5
code.ndumas.com/ndumas/wikilink-parser_test.Test_Lexer.func1()
/home/ndumas/work/wikilink-parser/lexer_test.go:98 +0x18c
testing.tRunner()
/usr/lib/golang/src/testing/testing.go:1446 +0x216
testing.(*T).Run.func1()
/usr/lib/golang/src/testing/testing.go:1493 +0x47
Goroutine 21 (running) created at:
code.ndumas.com/ndumas/wikilink-parser.Lex()
/home/ndumas/work/wikilink-parser/lexer.go:45 +0x41a
code.ndumas.com/ndumas/wikilink-parser_test.Test_Lexer.func1()
/home/ndumas/work/wikilink-parser/lexer_test.go:95 +0x69
testing.tRunner()
/usr/lib/golang/src/testing/testing.go:1446 +0x216
testing.(*T).Run.func1()
/usr/lib/golang/src/testing/testing.go:1493 +0x47
Goroutine 20 (running) created at:
testing.(*T).Run()
/usr/lib/golang/src/testing/testing.go:1493 +0x75d
code.ndumas.com/ndumas/wikilink-parser_test.Test_Lexer()
/home/ndumas/work/wikilink-parser/lexer_test.go:93 +0x1086
testing.tRunner()
/usr/lib/golang/src/testing/testing.go:1446 +0x216
testing.(*T).Run.func1()
/usr/lib/golang/src/testing/testing.go:1493 +0x47
==================
==================
WARNING: DATA RACE
Read at 0x00c0000822a8 by goroutine 21:
code.ndumas.com/ndumas/wikilink-parser.(*Lexer).emit()
/home/ndumas/work/wikilink-parser/lexer.go:109 +0x64
code.ndumas.com/ndumas/wikilink-parser.lexText()
/home/ndumas/work/wikilink-parser/states.go:88 +0xf4
code.ndumas.com/ndumas/wikilink-parser.(*Lexer).run()
/home/ndumas/work/wikilink-parser/lexer.go:149 +0x3a
code.ndumas.com/ndumas/wikilink-parser.Lex.func1()
/home/ndumas/work/wikilink-parser/lexer.go:45 +0x39
Previous write at 0x00c0000822a8 by goroutine 20:
code.ndumas.com/ndumas/wikilink-parser.(*Lexer).emit()
/home/ndumas/work/wikilink-parser/lexer.go:119 +0x4cf
code.ndumas.com/ndumas/wikilink-parser.lexOpenLink()
/home/ndumas/work/wikilink-parser/states.go:97 +0xa4
code.ndumas.com/ndumas/wikilink-parser.(*Lexer).NextItem()
/home/ndumas/work/wikilink-parser/lexer.go:64 +0xd5
code.ndumas.com/ndumas/wikilink-parser_test.Test_Lexer.func1()
/home/ndumas/work/wikilink-parser/lexer_test.go:98 +0x18c
testing.tRunner()
/usr/lib/golang/src/testing/testing.go:1446 +0x216
testing.(*T).Run.func1()
/usr/lib/golang/src/testing/testing.go:1493 +0x47
Goroutine 21 (running) created at:
code.ndumas.com/ndumas/wikilink-parser.Lex()
/home/ndumas/work/wikilink-parser/lexer.go:45 +0x41a
code.ndumas.com/ndumas/wikilink-parser_test.Test_Lexer.func1()
/home/ndumas/work/wikilink-parser/lexer_test.go:95 +0x69
testing.tRunner()
/usr/lib/golang/src/testing/testing.go:1446 +0x216
testing.(*T).Run.func1()
/usr/lib/golang/src/testing/testing.go:1493 +0x47
Goroutine 20 (running) created at:
testing.(*T).Run()
/usr/lib/golang/src/testing/testing.go:1493 +0x75d
code.ndumas.com/ndumas/wikilink-parser_test.Test_Lexer()
/home/ndumas/work/wikilink-parser/lexer_test.go:93 +0x1086
testing.tRunner()
/usr/lib/golang/src/testing/testing.go:1446 +0x216
testing.(*T).Run.func1()
/usr/lib/golang/src/testing/testing.go:1493 +0x47
==================
2023/07/01 18:25:42 lexHeading
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":20,"width":0,"item":"ItemText:\"wikilink#heading]]\""}
testing.go:1319: race detected during execution of test
--- FAIL: Test_Lexer (0.01s)
--- PASS: Test_Lexer/wikilink (0.00s)
--- PASS: Test_Lexer/wikilink|display_name (0.00s)
--- PASS: Test_Lexer/wikilink|display_name|second_pipe (0.00s)
--- PASS: Test_Lexer/wikilink_with_numeric_alias|420|second_pipe (0.00s)
--- PASS: Test_Lexer/wikilink_with_spaces_in_filename (0.00s)
--- FAIL: Test_Lexer/#heading (0.00s)
--- FAIL: Test_Lexer/wikilink#heading (0.00s)
panic: runtime error: slice bounds out of range [:21] with length 20 [recovered]
panic: runtime error: slice bounds out of range [:21] with length 20
goroutine 34 [running]:
testing.tRunner.func1.2({0x6e8160, 0xc0000e20a8})
/usr/lib/golang/src/testing/testing.go:1396 +0x372
testing.tRunner.func1()
/usr/lib/golang/src/testing/testing.go:1399 +0x5f0
panic({0x6e8160, 0xc0000e20a8})
/usr/lib/golang/src/runtime/panic.go:890 +0x262
code.ndumas.com/ndumas/wikilink-parser.(*Lexer).emit(0xc000082280, 0x5)
/home/ndumas/work/wikilink-parser/lexer.go:109 +0x506
code.ndumas.com/ndumas/wikilink-parser.lexHeading(0xc000082280)
/home/ndumas/work/wikilink-parser/states.go:58 +0xa5
code.ndumas.com/ndumas/wikilink-parser.(*Lexer).NextItem(0xc000082280)
/home/ndumas/work/wikilink-parser/lexer.go:64 +0xd6
code.ndumas.com/ndumas/wikilink-parser_test.Test_Lexer.func1(0xc000190b60)
/home/ndumas/work/wikilink-parser/lexer_test.go:98 +0x18d
testing.tRunner(0xc000190b60, 0xc00019cb40)
/usr/lib/golang/src/testing/testing.go:1446 +0x217
created by testing.(*T).Run
/usr/lib/golang/src/testing/testing.go:1493 +0x75e
FAIL code.ndumas.com/ndumas/wikilink-parser 0.024s
FAIL

@ -28,78 +28,90 @@ func isBlockRef(s string) bool {
func lexIdent(l *Lexer) stateFn {
for {
r := l.next()
l.L.Named("lexIdent").Debug("stepping through lexIdent",
zap.String("r", string(r)),
)
L := l.L.Named("lexIdent")
s := l.input[l.GetPos():]
if r == '\\' { // i think this will handle escape characters?
L.Debug("stepping through lexIdent")
if s[0] == '\\' { // i think this will handle escape characters?
break
}
switch {
case isCloseLink(s):
L.Debug("found CloseLink")
l.emit(LexIdent)
return lexCloseLink
case isBlockRef(s):
l.emit(ItemIdent)
L.Debug("found BlockRef")
l.emit(LexIdent)
return lexBlockRef
case isAlias(s):
l.emit(ItemIdent)
L.Debug("found Alias")
l.emit(LexIdent)
return lexAlias
case isCloseLink(s):
l.emit(ItemIdent)
return lexCloseLink
case isHeading(s):
l.emit(ItemIdent)
L.Debug("found Heading")
l.emit(LexIdent)
return lexHeading
}
r := l.next()
L = l.L.With(
zap.String("rune", string(r)),
)
}
return l.errorf("malformed link")
}
func lexHeading(l *Lexer) stateFn {
l.SetPos(l.GetPos() + len(Heading))
l.emit(ItemHeading)
l.emit(LexHeading)
return lexIdent
}
func lexBlockRef(l *Lexer) stateFn {
l.SetPos(l.GetPos() + len(BlockRef))
l.emit(ItemBlockRef)
l.emit(LexBlockRef)
return lexIdent
}
func lexAlias(l *Lexer) stateFn {
l.SetPos(l.GetPos() + len(Alias))
l.emit(ItemAlias)
l.emit(LexAlias)
return lexIdent
}
func lexText(l *Lexer) stateFn {
L := l.L.Named("lexText")
for {
if isOpenLink(l.input[l.GetPos():]) {
L.Debug("found openLink")
l.emit(LexText)
return lexOpenLink
}
r := l.next()
switch {
case r == EOF || r == '\n':
l.emit(ItemText)
case r == EOF:
l.emit(LexText)
return nil
case r == '\n':
l.emit(LexText)
return lexText
}
}
}
func lexOpenLink(l *Lexer) stateFn {
l.SetPos(l.GetPos() + len(OpenLink))
l.emit(ItemOpenLink)
l.emit(LexOpenLink)
return lexIdent
}
func lexCloseLink(l *Lexer) stateFn {
l.SetPos(l.GetPos() + len(CloseLink))
l.emit(ItemCloseLink)
l.emit(LexCloseLink)
return lexText
}

Loading…
Cancel
Save