Compare commits

...

20 Commits
v0.0.2 ... main

Author SHA1 Message Date
Nick Dumas 40713dd763 bazel + gazelle integration 2 years ago
Nick Dumas b269ca3100 messing with bazel 2 years ago
Nick Dumas 9de66ba0da cleaning tests up 2 years ago
Nick Dumas 8602e2e16e don't need that anymore 2 years ago
Nick Dumas f179840fb9 more escape test cases 2 years ago
Nick Dumas be33bd2c20 expand escape character test cases 2 years ago
Nick Dumas cb9786ece8 properly handle escape characters 2 years ago
Nick Dumas b9049f340e trying to handle escape characters now 2 years ago
Nick Dumas 5e12c32fc5 Big renaming pass, this is about more than just wikilinks 2 years ago
Nick Dumas be0d629280 Cleaning test code and output up with testify 2 years ago
Nick Dumas 8b4ba717e2 Renaming in preparation for more tests 2 years ago
Nick Dumas 3f3696eb9f Lexer can now handle multi-line inputs 2 years ago
Nick Dumas 17699a114c tests for simple parsing of wikilinks in larger content blocks 2 years ago
Nick Dumas 4235beff81 can't assume this empty token will be there anymore 2 years ago
Nick Dumas 37dcdb9168 Logging is tuneable for better debugging experience 2 years ago
Nick Dumas ee6cad7662 removing nonexistent origin for this repo 2 years ago
Nick Dumas 82dd27bd06 More tests 2 years ago
Nick Dumas 04458800d1 tests refactored pretty nicely now 2 years ago
Nick Dumas 25f949fafd refactoring tests to make this easier 2 years ago
Nick Dumas 0b377a0500 Expand test cases, refine logging configuration
Wiki links don't occur in a vacuum, I need to be able to parse entire
blocks of markdown and extract the wikilinks properly.

Now that I've gotten the lexer working I mostly don't need to see every
single change to the position/start/width values, but the
instrumentation will be useful for future debugging.
2 years ago

@ -0,0 +1 @@
common --experimental_enable_bzlmod

3
.gitignore vendored

@ -2,3 +2,6 @@ node_modules/*
package*.json
dist/*
reports/*
lexer.log
parser.log
bazel-*

33
BUILD

@ -0,0 +1,33 @@
load("@rules_go//go:def.bzl", "go_library", "go_test")
load("@gazelle//:def.bzl", "gazelle")
gazelle(name = "gazelle")
go_library(
name = "obsidian-markdown",
srcs = [
"lexemetype_string.go",
"lexer.go",
"states.go",
"wikilink.go",
],
importpath = "code.ndumas.com/ndumas/obsidian-markdown",
visibility = ["//visibility:public"],
deps = [
"@org_uber_go_zap//:zap",
"@org_uber_go_zap//zapcore",
],
)
go_test(
name = "obsidian-markdown_test",
srcs = [
"lexer_test.go",
"wikilink_test.go",
],
deps = [
":obsidian-markdown",
"@com_github_stretchr_testify//assert",
"@org_uber_go_zap//zapcore",
],
)

@ -0,0 +1,12 @@
module(
name = "obsidian-markdown",
repo_name = "code.ndumas.com_ndumas_obsidian-markdown",
)
bazel_dep(name = "gazelle", version = "0.32.0")
bazel_dep(name = "rules_go", version = "0.41.0")
bazel_dep(name = "rules_oci", version = "1.2.0")
go_deps = use_extension("@gazelle//:extensions.bzl", "go_deps")
go_deps.from_file(go_mod = "//:go.mod")
use_repo(go_deps, "com_github_stretchr_testify", "org_uber_go_zap")

File diff suppressed because it is too large Load Diff

@ -40,7 +40,7 @@
# Parameters
PKG = code.ndumas.com/ndumas/wikilink-parser
PKG = code.ndumas.com/ndumas/obsidian-markdown
NAME = parse-wikilinks
DOC = README.md LICENSE
@ -99,7 +99,6 @@ all: debug setup dep format lint test bench build dist
git-push:
git push origin main --tags
git push github main --tags
release-major: bump-major git-push
@ -194,13 +193,13 @@ docker: docker-image docker-push
.PHONY: docker-push
docker-push:
$(DOCKER_CMD) tag code.ndumas.com/ndumas/wikilink-parser:$(VERSION) code.ndumas.com/ndumas/wikilink-parser:latest
$(DOCKER_CMD) push code.ndumas.com/ndumas/wikilink-parser:latest
$(DOCKER_CMD) push code.ndumas.com/ndumas/wikilink-parser:$(VERSION)
$(DOCKER_CMD) tag $(PKG):$(VERSION) $(PKG):latest
$(DOCKER_CMD) push $(PKG):latest
$(DOCKER_CMD) push $(PKG):$(VERSION)
.PHONY: docker-image
docker-image:
$(DOCKER_CMD) build --build-arg VERSION=$(VERSION) -t code.ndumas.com/ndumas/wikilink-parser:$(VERSION) .
$(DOCKER_CMD) build --build-arg VERSION=$(VERSION) -t $(PKG):$(VERSION) .
.PHONY: build-alpine
build-alpine:

@ -0,0 +1,18 @@
load("@rules_go//go:def.bzl", "go_binary", "go_library")
go_library(
name = "demo_lib",
srcs = ["main.go"],
importpath = "code.ndumas.com/ndumas/obsidian-markdown/cmd/demo",
visibility = ["//visibility:private"],
deps = [
"//:obsidian-markdown",
"@org_uber_go_zap//zapcore",
],
)
go_binary(
name = "demo",
embed = [":demo_lib"],
visibility = ["//visibility:public"],
)

@ -3,11 +3,13 @@ package main
import (
"log"
"code.ndumas.com/ndumas/wikilink-parser"
"go.uber.org/zap/zapcore"
"code.ndumas.com/ndumas/obsidian-markdown"
)
func main() {
l := wikilink.Lex("debugLexer", `[[#heading]]`)
l := markdown.Lex("debugLexer", `this is a [[wikilink]]`, zapcore.InfoLevel)
for _, item := range l.Items {
item := item
log.Printf("%#+v\n", item)

@ -1,10 +1,16 @@
module code.ndumas.com/ndumas/wikilink-parser
module code.ndumas.com/ndumas/obsidian-markdown
go 1.19
require go.uber.org/zap v1.24.0
require (
github.com/stretchr/testify v1.8.0
go.uber.org/zap v1.24.0
)
require (
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
go.uber.org/atomic v1.7.0 // indirect
go.uber.org/multierr v1.6.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
)

@ -6,8 +6,11 @@ github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0 h1:pSgiaMZlXftHpm5L7V1+rVB+AZJydKsMxsQBIJw4PKk=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
go.uber.org/atomic v1.7.0 h1:ADUqmZGgLDDfbSL9ZmPxKTybcoEYHgpYfELNoN+7hsw=
go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
go.uber.org/goleak v1.1.11 h1:wy28qYRKZgnJTxGxvye5/wgWr1EKjmUDGYox5mGlRlI=
@ -15,4 +18,8 @@ go.uber.org/multierr v1.6.0 h1:y6IPFStTAIT5Ytl7/XYmHvzXQ7S3g/IeZW9hyZ5thw4=
go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU=
go.uber.org/zap v1.24.0 h1:FiJd5l1UOLj0wCgbSE0rwwXHzEdAZS6hiiSnxJN/D60=
go.uber.org/zap v1.24.0/go.mod h1:2kMP+WWQ8aoFoedH3T2sq6iJ2yDWpHbP0f6MQbS9Gkg=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

@ -1,6 +1,6 @@
// Code generated by "stringer -type=LexemeType"; DO NOT EDIT.
package wikilink
package markdown
import "strconv"

@ -1,14 +1,17 @@
//go:generate stringer -type=LexemeType
package wikilink
package markdown
import (
"fmt"
// "os"
"strings"
"sync"
// "unicode"
"unicode/utf8"
"go.uber.org/zap"
"go.uber.org/zap/zapcore"
)
const (
@ -28,16 +31,37 @@ const (
)
const (
OpenLink = "[["
CloseLink = "]]"
Alias = "|"
Heading = "#"
BlockRef = "#^"
OpenLink = "[["
CloseLink = "]]"
Alias = "|"
Heading = "#"
BlockRef = "#^"
EscapeChar = `\`
)
func Lex(name, input string) *Lexer {
func Lex(name, input string, level zapcore.Level) *Lexer {
encoderCfg := zap.NewProductionEncoderConfig()
encoderCfg.EncodeTime = zapcore.ISO8601TimeEncoder
config := zap.Config{
Level: zap.NewAtomicLevelAt(level),
EncoderConfig: encoderCfg,
OutputPaths: []string{
"./lexer.log",
"stdout",
},
Encoding: "console",
ErrorOutputPaths: []string{
"stderr",
},
InitialFields: map[string]interface{}{
"lexer": name,
// "pid": os.Getpid(),
},
}
l := &Lexer{
L: zap.NewExample().Sugar().Named("lexer"),
L: zap.Must(config.Build()).Named("lexer"),
name: name,
input: input,
state: lexText,
@ -79,7 +103,7 @@ func (l *Lexer) backup() {
}
type Lexer struct {
L *zap.SugaredLogger
L *zap.Logger
name, input string
start, pos, width int
state stateFn
@ -127,7 +151,7 @@ func (l *Lexer) emit(t LexemeType) {
L := l.L.Named("emit").With(
zap.String("item", i.String()),
)
L.Debug("emitting lexeme")
L.Info("emitting lexeme")
l.Items = append(l.Items, i)
l.SetStart(l.GetPos())
/* original concurrent implementation
@ -139,7 +163,7 @@ func (l *Lexer) emit(t LexemeType) {
zap.Int("width", l.GetWidth()),
).Named("emit")
L.Debugw("emitting item",
L.Debug("emitting item",
zap.String("item", i.String()),
)
l.items <- i
@ -152,7 +176,7 @@ func (l *Lexer) errorf(format string, args ...interface{}) stateFn {
LexError,
fmt.Sprintf(format, args...),
}
L.Debugw("emitting errorItem",
L.Debug("emitting errorItem",
zap.String("error", errorItem.String()),
)
@ -169,7 +193,7 @@ func (l *Lexer) next() rune {
return EOF
}
r, width := utf8.DecodeRuneInString(l.input[l.GetPos():])
L.Debugw("found rune",
L.Debug("found rune",
zap.String("rune", string(r)),
zap.Int("width", width),
)
@ -195,7 +219,7 @@ func (l *Lexer) run() {
func (l *Lexer) GetPos() int {
defer l.posMutex.Unlock()
l.posMutex.Lock()
l.L.Named("GetPos").Debugw("getting current position",
l.L.Named("GetPos").Debug("getting current position",
zap.Int("old", l.pos),
)
return l.pos
@ -204,7 +228,7 @@ func (l *Lexer) GetPos() int {
func (l *Lexer) SetPos(pos int) {
defer l.posMutex.Unlock()
l.posMutex.Lock()
l.L.Named("SetPos").Debugw("setting new position",
l.L.Named("SetPos").Debug("setting new position",
zap.Int("new", pos),
zap.Int("old", l.pos),
)
@ -214,7 +238,7 @@ func (l *Lexer) SetPos(pos int) {
func (l *Lexer) GetWidth() int {
defer l.widthMutex.Unlock()
l.widthMutex.Lock()
l.L.Named("GetWidth").Debugw("setting new width",
l.L.Named("GetWidth").Debug("setting new width",
zap.Int("old", l.width),
)
return l.width
@ -223,7 +247,7 @@ func (l *Lexer) GetWidth() int {
func (l *Lexer) SetWidth(width int) {
defer l.widthMutex.Unlock()
l.widthMutex.Lock()
l.L.Named("SetWidth").Debugw("setting new width",
l.L.Named("SetWidth").Debug("setting new width",
zap.Int("new", width),
zap.Int("old", l.width),
)
@ -233,7 +257,7 @@ func (l *Lexer) SetWidth(width int) {
func (l *Lexer) GetStart() int {
defer l.startMutex.Unlock()
l.startMutex.Lock()
l.L.Named("GetStart").Debugw("getting old start",
l.L.Named("GetStart").Debug("getting old start",
zap.Int("old", l.start),
)
return l.start
@ -242,7 +266,7 @@ func (l *Lexer) GetStart() int {
func (l *Lexer) SetStart(start int) {
defer l.startMutex.Unlock()
l.startMutex.Lock()
l.L.Named("SetStart").Debugw("setting new start",
l.L.Named("SetStart").Debug("setting new start",
zap.Int("new", start),
zap.Int("old", l.start),
)

@ -1,248 +1,442 @@
package wikilink_test
package markdown_test
import (
"testing"
"code.ndumas.com/ndumas/wikilink-parser"
"github.com/stretchr/testify/assert"
"go.uber.org/zap/zapcore"
"code.ndumas.com/ndumas/obsidian-markdown"
)
func Test_ObsidianWikilinks(t *testing.T) {
// t.Parallel()
tcs := []struct {
name string
in string
expected []wikilink.Lexeme
}{
{
name: "wikilink",
in: "[[wikilink]]",
expected: []wikilink.Lexeme{
{Typ: wikilink.LexOpenLink, Val: "[["},
{Typ: wikilink.LexIdent, Val: "wikilink"},
{Typ: wikilink.LexCloseLink, Val: "]]"},
{Typ: wikilink.LexText, Val: ""},
},
},
{
name: "wikilink|display name",
in: "[[wikilink|display name]]",
expected: []wikilink.Lexeme{
{Typ: wikilink.LexOpenLink, Val: "[["},
{Typ: wikilink.LexIdent, Val: "wikilink"},
{Typ: wikilink.LexAlias, Val: "|"},
{Typ: wikilink.LexIdent, Val: "display name"},
{Typ: wikilink.LexCloseLink, Val: "]]"},
{Typ: wikilink.LexText, Val: ""},
},
},
{
name: "wikilink|display name|second pipe",
in: "[[wikilink|display name|second pipe]]",
expected: []wikilink.Lexeme{
{Typ: wikilink.LexOpenLink, Val: "[["},
{Typ: wikilink.LexIdent, Val: "wikilink"},
{Typ: wikilink.LexAlias, Val: "|"},
{Typ: wikilink.LexIdent, Val: "display name"},
{Typ: wikilink.LexAlias, Val: "|"},
{Typ: wikilink.LexIdent, Val: "second pipe"},
{Typ: wikilink.LexCloseLink, Val: "]]"},
{Typ: wikilink.LexText, Val: ""},
},
},
{
name: "wikilink with numeric alias|420|second pipe",
in: "[[wikilink|420|second pipe]]",
expected: []wikilink.Lexeme{
{Typ: wikilink.LexOpenLink, Val: "[["},
{Typ: wikilink.LexIdent, Val: "wikilink"},
{Typ: wikilink.LexAlias, Val: "|"},
{Typ: wikilink.LexIdent, Val: "420"},
{Typ: wikilink.LexAlias, Val: "|"},
{Typ: wikilink.LexIdent, Val: "second pipe"},
{Typ: wikilink.LexCloseLink, Val: "]]"},
{Typ: wikilink.LexText, Val: ""},
},
},
{
name: "wikilink with spaces in filename",
in: "[[wikilink spaces]]",
expected: []wikilink.Lexeme{
{Typ: wikilink.LexOpenLink, Val: "[["},
{Typ: wikilink.LexIdent, Val: "wikilink spaces"},
{Typ: wikilink.LexCloseLink, Val: "]]"},
{Typ: wikilink.LexText, Val: ""},
},
},
{
name: "#heading",
in: "[[#heading]]",
expected: []wikilink.Lexeme{
{Typ: wikilink.LexOpenLink, Val: "[["},
{Typ: wikilink.LexIdent, Val: ""},
{Typ: wikilink.LexHeading, Val: "#"},
{Typ: wikilink.LexIdent, Val: "heading"},
{Typ: wikilink.LexCloseLink, Val: "]]"},
{Typ: wikilink.LexText, Val: ""},
},
},
{
name: "wikilink#heading",
in: "[[wikilink#heading]]",
expected: []wikilink.Lexeme{
{Typ: wikilink.LexOpenLink, Val: "[["},
{Typ: wikilink.LexIdent, Val: "wikilink"},
{Typ: wikilink.LexHeading, Val: "#"},
{Typ: wikilink.LexIdent, Val: "heading"},
{Typ: wikilink.LexCloseLink, Val: "]]"},
{Typ: wikilink.LexText, Val: ""},
},
},
{
name: "wikilink#heading|display name",
in: "[[wikilink#heading|display name]]",
expected: []wikilink.Lexeme{
{Typ: wikilink.LexOpenLink, Val: "[["},
{Typ: wikilink.LexIdent, Val: "wikilink"},
{Typ: wikilink.LexHeading, Val: "#"},
{Typ: wikilink.LexIdent, Val: "heading"},
{Typ: wikilink.LexAlias, Val: "|"},
{Typ: wikilink.LexIdent, Val: "display name"},
{Typ: wikilink.LexCloseLink, Val: "]]"},
{Typ: wikilink.LexText, Val: ""},
},
},
{
name: "wikilink#heading|display name|second pipe",
in: "[[wikilink#heading|display name|second pipe]]",
expected: []wikilink.Lexeme{
{Typ: wikilink.LexOpenLink, Val: "[["},
{Typ: wikilink.LexIdent, Val: "wikilink"},
{Typ: wikilink.LexHeading, Val: "#"},
{Typ: wikilink.LexIdent, Val: "heading"},
{Typ: wikilink.LexAlias, Val: "|"},
{Typ: wikilink.LexIdent, Val: "display name"},
{Typ: wikilink.LexAlias, Val: "|"},
{Typ: wikilink.LexIdent, Val: "second pipe"},
{Typ: wikilink.LexCloseLink, Val: "]]"},
{Typ: wikilink.LexText, Val: ""},
},
},
{
name: "wikilink with numeric aliases#heading|420|display name",
in: "[[wikilink#heading|420|second pipe]]",
expected: []wikilink.Lexeme{
{Typ: wikilink.LexOpenLink, Val: "[["},
{Typ: wikilink.LexIdent, Val: "wikilink"},
{Typ: wikilink.LexHeading, Val: "#"},
{Typ: wikilink.LexIdent, Val: "heading"},
{Typ: wikilink.LexAlias, Val: "|"},
{Typ: wikilink.LexIdent, Val: "420"},
{Typ: wikilink.LexAlias, Val: "|"},
{Typ: wikilink.LexIdent, Val: "second pipe"},
{Typ: wikilink.LexCloseLink, Val: "]]"},
{Typ: wikilink.LexText, Val: ""},
},
},
{
name: "#^blockRef",
in: "[[#^blockRef]]",
expected: []wikilink.Lexeme{
{Typ: wikilink.LexOpenLink, Val: "[["},
{Typ: wikilink.LexIdent, Val: ""},
{Typ: wikilink.LexBlockRef, Val: "#^"},
{Typ: wikilink.LexIdent, Val: "blockRef"},
{Typ: wikilink.LexCloseLink, Val: "]]"},
{Typ: wikilink.LexText, Val: ""},
},
},
{
name: "wikilink#^blockRef",
in: "[[wikilink#^blockRef]]",
expected: []wikilink.Lexeme{
{Typ: wikilink.LexOpenLink, Val: "[["},
{Typ: wikilink.LexIdent, Val: "wikilink"},
{Typ: wikilink.LexBlockRef, Val: "#^"},
{Typ: wikilink.LexIdent, Val: "blockRef"},
{Typ: wikilink.LexCloseLink, Val: "]]"},
{Typ: wikilink.LexText, Val: ""},
},
},
{
name: "wikilink#^blockRef|display name",
in: "[[wikilink#^blockRef|display name]]",
expected: []wikilink.Lexeme{
{Typ: wikilink.LexOpenLink, Val: "[["},
{Typ: wikilink.LexIdent, Val: "wikilink"},
{Typ: wikilink.LexBlockRef, Val: "#^"},
{Typ: wikilink.LexIdent, Val: "blockRef"},
{Typ: wikilink.LexAlias, Val: "|"},
{Typ: wikilink.LexIdent, Val: "display name"},
{Typ: wikilink.LexCloseLink, Val: "]]"},
{Typ: wikilink.LexText, Val: ""},
},
},
{
name: "wikilink#^blockRef|display name|second pipe",
in: "[[wikilink#^blockRef|display name|second pipe]]",
expected: []wikilink.Lexeme{
{Typ: wikilink.LexOpenLink, Val: "[["},
{Typ: wikilink.LexIdent, Val: "wikilink"},
{Typ: wikilink.LexBlockRef, Val: "#^"},
{Typ: wikilink.LexIdent, Val: "blockRef"},
{Typ: wikilink.LexAlias, Val: "|"},
{Typ: wikilink.LexIdent, Val: "display name"},
{Typ: wikilink.LexAlias, Val: "|"},
{Typ: wikilink.LexIdent, Val: "second pipe"},
{Typ: wikilink.LexCloseLink, Val: "]]"},
{Typ: wikilink.LexText, Val: ""},
},
},
{
name: "wikilink with numeric aliases#^blockRef|420|second pipe",
in: "[[wikilink#^blockRef|420|second pipe]]",
expected: []wikilink.Lexeme{
{Typ: wikilink.LexOpenLink, Val: "[["},
{Typ: wikilink.LexIdent, Val: "wikilink"},
{Typ: wikilink.LexBlockRef, Val: "#^"},
{Typ: wikilink.LexIdent, Val: "blockRef"},
{Typ: wikilink.LexAlias, Val: "|"},
{Typ: wikilink.LexIdent, Val: "420"},
{Typ: wikilink.LexAlias, Val: "|"},
{Typ: wikilink.LexIdent, Val: "second pipe"},
{Typ: wikilink.LexCloseLink, Val: "]]"},
{Typ: wikilink.LexText, Val: ""},
},
var wikilinkWithEscapeCharacters = []tc{
{
name: "wikilink with escaped close link",
in: `[[wiki\]\]link]]`,
expected: []markdown.Lexeme{
{Typ: markdown.LexOpenLink, Val: "[["},
{Typ: markdown.LexIdent, Val: `wiki\]\]link`},
{Typ: markdown.LexCloseLink, Val: "]]"},
},
},
{
name: "wikilink with partial escaped close link",
in: `[[wiki\]link]]`,
expected: []markdown.Lexeme{
{Typ: markdown.LexOpenLink, Val: "[["},
{Typ: markdown.LexIdent, Val: `wiki\]link`},
{Typ: markdown.LexCloseLink, Val: "]]"},
},
},
{
name: "wikilink with escaped open link",
in: `[[wiki\[\[link]]`,
expected: []markdown.Lexeme{
{Typ: markdown.LexOpenLink, Val: "[["},
{Typ: markdown.LexIdent, Val: `wiki\[\[link`},
{Typ: markdown.LexCloseLink, Val: "]]"},
},
},
{
name: "wikilink with partial escaped open link",
in: `[[wiki\[link]]`,
expected: []markdown.Lexeme{
{Typ: markdown.LexOpenLink, Val: "[["},
{Typ: markdown.LexIdent, Val: `wiki\[link`},
{Typ: markdown.LexCloseLink, Val: "]]"},
},
},
{
name: "wikilink with escaped alias",
in: `[[wiki\|link]]`,
expected: []markdown.Lexeme{
{Typ: markdown.LexOpenLink, Val: "[["},
{Typ: markdown.LexIdent, Val: `wiki\|link`},
{Typ: markdown.LexCloseLink, Val: "]]"},
},
},
{
name: "wikilink with escaped blockref",
in: `[[wiki\#\^link]]`,
expected: []markdown.Lexeme{
{Typ: markdown.LexOpenLink, Val: "[["},
{Typ: markdown.LexIdent, Val: `wiki\#\^link`},
{Typ: markdown.LexCloseLink, Val: "]]"},
},
},
{
name: "wikilink with partial escaped blockref",
in: `[[wiki\^link]]`,
expected: []markdown.Lexeme{
{Typ: markdown.LexOpenLink, Val: "[["},
{Typ: markdown.LexIdent, Val: `wiki\^link`},
{Typ: markdown.LexCloseLink, Val: "]]"},
},
},
{
name: "wikilink with escaped header",
in: `[[wiki\#link]]`,
expected: []markdown.Lexeme{
{Typ: markdown.LexOpenLink, Val: "[["},
{Typ: markdown.LexIdent, Val: `wiki\#link`},
{Typ: markdown.LexCloseLink, Val: "]]"},
},
},
}
var singleWikilink = []tc{
{
name: "wikilink",
in: "[[wikilink]]",
expected: []markdown.Lexeme{
{Typ: markdown.LexOpenLink, Val: "[["},
{Typ: markdown.LexIdent, Val: "wikilink"},
{Typ: markdown.LexCloseLink, Val: "]]"},
},
},
{
name: "wikilink|display name",
in: "[[wikilink|display name]]",
expected: []markdown.Lexeme{
{Typ: markdown.LexOpenLink, Val: "[["},
{Typ: markdown.LexIdent, Val: "wikilink"},
{Typ: markdown.LexAlias, Val: "|"},
{Typ: markdown.LexIdent, Val: "display name"},
{Typ: markdown.LexCloseLink, Val: "]]"},
},
},
{
name: "wikilink|display name|second pipe",
in: "[[wikilink|display name|second pipe]]",
expected: []markdown.Lexeme{
{Typ: markdown.LexOpenLink, Val: "[["},
{Typ: markdown.LexIdent, Val: "wikilink"},
{Typ: markdown.LexAlias, Val: "|"},
{Typ: markdown.LexIdent, Val: "display name"},
{Typ: markdown.LexAlias, Val: "|"},
{Typ: markdown.LexIdent, Val: "second pipe"},
{Typ: markdown.LexCloseLink, Val: "]]"},
},
},
{
name: "wikilink with numeric alias|420|second pipe",
in: "[[wikilink|420|second pipe]]",
expected: []markdown.Lexeme{
{Typ: markdown.LexOpenLink, Val: "[["},
{Typ: markdown.LexIdent, Val: "wikilink"},
{Typ: markdown.LexAlias, Val: "|"},
{Typ: markdown.LexIdent, Val: "420"},
{Typ: markdown.LexAlias, Val: "|"},
{Typ: markdown.LexIdent, Val: "second pipe"},
{Typ: markdown.LexCloseLink, Val: "]]"},
},
},
{
name: "wikilink with spaces in filename",
in: "[[wikilink spaces]]",
expected: []markdown.Lexeme{
{Typ: markdown.LexOpenLink, Val: "[["},
{Typ: markdown.LexIdent, Val: "wikilink spaces"},
{Typ: markdown.LexCloseLink, Val: "]]"},
},
},
{
name: "#heading",
in: "[[#heading]]",
expected: []markdown.Lexeme{
{Typ: markdown.LexOpenLink, Val: "[["},
{Typ: markdown.LexIdent, Val: ""},
{Typ: markdown.LexHeading, Val: "#"},
{Typ: markdown.LexIdent, Val: "heading"},
{Typ: markdown.LexCloseLink, Val: "]]"},
},
},
{
name: "wikilink#heading",
in: "[[wikilink#heading]]",
expected: []markdown.Lexeme{
{Typ: markdown.LexOpenLink, Val: "[["},
{Typ: markdown.LexIdent, Val: "wikilink"},
{Typ: markdown.LexHeading, Val: "#"},
{Typ: markdown.LexIdent, Val: "heading"},
{Typ: markdown.LexCloseLink, Val: "]]"},
},
},
{
name: "wikilink#heading|display name",
in: "[[wikilink#heading|display name]]",
expected: []markdown.Lexeme{
{Typ: markdown.LexOpenLink, Val: "[["},
{Typ: markdown.LexIdent, Val: "wikilink"},
{Typ: markdown.LexHeading, Val: "#"},
{Typ: markdown.LexIdent, Val: "heading"},
{Typ: markdown.LexAlias, Val: "|"},
{Typ: markdown.LexIdent, Val: "display name"},
{Typ: markdown.LexCloseLink, Val: "]]"},
},
},
{
name: "wikilink#heading|display name|second pipe",
in: "[[wikilink#heading|display name|second pipe]]",
expected: []markdown.Lexeme{
{Typ: markdown.LexOpenLink, Val: "[["},
{Typ: markdown.LexIdent, Val: "wikilink"},
{Typ: markdown.LexHeading, Val: "#"},
{Typ: markdown.LexIdent, Val: "heading"},
{Typ: markdown.LexAlias, Val: "|"},
{Typ: markdown.LexIdent, Val: "display name"},
{Typ: markdown.LexAlias, Val: "|"},
{Typ: markdown.LexIdent, Val: "second pipe"},
{Typ: markdown.LexCloseLink, Val: "]]"},
},
},
{
name: "wikilink with numeric aliases#heading|420|display name",
in: "[[wikilink#heading|420|second pipe]]",
expected: []markdown.Lexeme{
{Typ: markdown.LexOpenLink, Val: "[["},
{Typ: markdown.LexIdent, Val: "wikilink"},
{Typ: markdown.LexHeading, Val: "#"},
{Typ: markdown.LexIdent, Val: "heading"},
{Typ: markdown.LexAlias, Val: "|"},
{Typ: markdown.LexIdent, Val: "420"},
{Typ: markdown.LexAlias, Val: "|"},
{Typ: markdown.LexIdent, Val: "second pipe"},
{Typ: markdown.LexCloseLink, Val: "]]"},
},
},
{
name: "#^blockRef",
in: "[[#^blockRef]]",
expected: []markdown.Lexeme{
{Typ: markdown.LexOpenLink, Val: "[["},
{Typ: markdown.LexIdent, Val: ""},
{Typ: markdown.LexBlockRef, Val: "#^"},
{Typ: markdown.LexIdent, Val: "blockRef"},
{Typ: markdown.LexCloseLink, Val: "]]"},
},
},
{
name: "wikilink#^blockRef",
in: "[[wikilink#^blockRef]]",
expected: []markdown.Lexeme{
{Typ: markdown.LexOpenLink, Val: "[["},
{Typ: markdown.LexIdent, Val: "wikilink"},
{Typ: markdown.LexBlockRef, Val: "#^"},
{Typ: markdown.LexIdent, Val: "blockRef"},
{Typ: markdown.LexCloseLink, Val: "]]"},
},
},
{
name: "wikilink#^blockRef|display name",
in: "[[wikilink#^blockRef|display name]]",
expected: []markdown.Lexeme{
{Typ: markdown.LexOpenLink, Val: "[["},
{Typ: markdown.LexIdent, Val: "wikilink"},
{Typ: markdown.LexBlockRef, Val: "#^"},
{Typ: markdown.LexIdent, Val: "blockRef"},
{Typ: markdown.LexAlias, Val: "|"},
{Typ: markdown.LexIdent, Val: "display name"},
{Typ: markdown.LexCloseLink, Val: "]]"},
},
},
{
name: "wikilink#^blockRef|display name|second pipe",
in: "[[wikilink#^blockRef|display name|second pipe]]",
expected: []markdown.Lexeme{
{Typ: markdown.LexOpenLink, Val: "[["},
{Typ: markdown.LexIdent, Val: "wikilink"},
{Typ: markdown.LexBlockRef, Val: "#^"},
{Typ: markdown.LexIdent, Val: "blockRef"},
{Typ: markdown.LexAlias, Val: "|"},
{Typ: markdown.LexIdent, Val: "display name"},
{Typ: markdown.LexAlias, Val: "|"},
{Typ: markdown.LexIdent, Val: "second pipe"},
{Typ: markdown.LexCloseLink, Val: "]]"},
},
},
{
name: "wikilink with numeric aliases#^blockRef|420|second pipe",
in: "[[wikilink#^blockRef|420|second pipe]]",
expected: []markdown.Lexeme{
{Typ: markdown.LexOpenLink, Val: "[["},
{Typ: markdown.LexIdent, Val: "wikilink"},
{Typ: markdown.LexBlockRef, Val: "#^"},
{Typ: markdown.LexIdent, Val: "blockRef"},
{Typ: markdown.LexAlias, Val: "|"},
{Typ: markdown.LexIdent, Val: "420"},
{Typ: markdown.LexAlias, Val: "|"},
{Typ: markdown.LexIdent, Val: "second pipe"},
{Typ: markdown.LexCloseLink, Val: "]]"},
},
},
}
func Test_ObsidianWikilinks_TextWithEscapeCharacters(t *testing.T) {
for _, tc := range singleWikilink {
tc.name = "escape characters preceding " + tc.name
mut, test := mutateTestCase(
tc,
`foo\[\[not a link, but this is`,
"",
[]markdown.Lexeme{
{Typ: markdown.LexText, Val: `foo\[\[not a link, but this is`},
},
[]markdown.Lexeme{
{Typ: markdown.LexText, Val: ""},
},
)
t.Run(mut.name, test)
}
for _, tc := range singleWikilink {
tc.name = "escape characters following " + tc.name
mut, test := mutateTestCase(
tc,
"",
`foo\[\[not a link, but this is`,
[]markdown.Lexeme{
{Typ: markdown.LexText, Val: ""},
},
[]markdown.Lexeme{
{Typ: markdown.LexText, Val: `foo\[\[not a link, but this is`},
},
)
t.Run(mut.name, test)
}
}
func Test_ObsidianWikilinks_EscapeCharacters(t *testing.T) {
for _, tc := range wikilinkWithEscapeCharacters {
mut, test := mutateTestCase(
tc,
"",
"",
[]markdown.Lexeme{
{Typ: markdown.LexText, Val: ""},
},
[]markdown.Lexeme{
{Typ: markdown.LexText, Val: ""},
},
)
t.Run(mut.name, test)
}
}
func Test_ObsidianWikilinks_LinksEndOfMultiLineInput(t *testing.T) {
for _, tc := range singleWikilink {
mut, test := mutateTestCase(
tc,
" test data please ignore.\nbling blonk more lines\nbling blong\nthis is a",
"",
[]markdown.Lexeme{
{Typ: markdown.LexText, Val: " test data please ignore.\n"},
{Typ: markdown.LexText, Val: "bling blonk more lines\n"},
{Typ: markdown.LexText, Val: "bling blong\n"},
{Typ: markdown.LexText, Val: "this is a"},
},
[]markdown.Lexeme{
{Typ: markdown.LexText, Val: ""},
},
)
t.Run(mut.name, test)
}
}
func Test_ObsidianWikilinks_LinksStartOfMultiLineInput(t *testing.T) {
for _, tc := range singleWikilink {
mut, test := mutateTestCase(
tc,
"",
" test data please ignore.\nbling blonk more lines\nbling blong\nthis is a",
[]markdown.Lexeme{
{Typ: markdown.LexText, Val: ""},
},
[]markdown.Lexeme{
{Typ: markdown.LexText, Val: " test data please ignore.\n"},
{Typ: markdown.LexText, Val: "bling blonk more lines\n"},
{Typ: markdown.LexText, Val: "bling blong\n"},
{Typ: markdown.LexText, Val: "this is a"},
},
)
t.Run(mut.name, test)
}
}
func Test_ObsidianWikilinks_LinksStartOfInput(t *testing.T) {
for _, tc := range singleWikilink {
mut, test := mutateTestCase(
tc,
"",
" test data please ignore",
[]markdown.Lexeme{
{Typ: markdown.LexText, Val: ""},
},
[]markdown.Lexeme{
{Typ: markdown.LexText, Val: " test data please ignore"},
},
)
t.Run(mut.name, test)
}
}
func Test_ObsidianWikilinks_LinksEndOfInput(t *testing.T) {
for _, tc := range singleWikilink {
mut, test := mutateTestCase(
tc,
"this is a ",
"",
[]markdown.Lexeme{
{Typ: markdown.LexText, Val: "this is a "},
},
[]markdown.Lexeme{
{Typ: markdown.LexText, Val: ""},
},
)
t.Run(mut.name, test)
}
}
func Test_ObsidianWikilinks_Basic(t *testing.T) {
for _, tc := range singleWikilink {
mut, test := mutateTestCase(
tc,
"",
"",
[]markdown.Lexeme{
{Typ: markdown.LexText, Val: ""},
},
[]markdown.Lexeme{
{Typ: markdown.LexText, Val: ""},
},
)
t.Run(mut.name, test)
}
}
type tc struct {
name string
in string
expected []markdown.Lexeme
}
func mutateTestCase(tc tc, prefix, suffix string, expectedPrefix, expectedSuffix []markdown.Lexeme) (tc, func(t *testing.T)) {
tc.in = prefix + tc.in
tc.in = tc.in + suffix
if expectedPrefix != nil {
tc.expected = append(expectedPrefix, tc.expected...)
}
if expectedSuffix != nil {
tc.expected = append(tc.expected, expectedSuffix...)
}
for _, tc := range tcs {
tc := tc
t.Run(tc.name, func(t *testing.T) {
t.Parallel()
l := wikilink.Lex("testLexer", tc.in)
defer l.L.Sync()
if len(tc.expected) != len(l.Items) {
t.Logf("expected %d tokens, got %d\n", len(tc.expected), len(l.Items))
t.Fail()
return
}
for i, e := range tc.expected {
n := l.Items[i]
if e.Typ != n.Typ {
t.Logf("expected Type %s, received %s", e.Typ.String(), n.Typ.String())
t.Fail()
return
}
if e.Val != n.Val {
t.Logf("expected Value %q, received %q", e.Val, n.Val)
t.Fail()
return
}
}
})
return tc, func(t *testing.T) {
l := markdown.Lex("testLexer", tc.in, zapcore.WarnLevel)
defer l.L.Sync()
assert.Equal(t, tc.expected, l.Items, "token stream mismatch")
}
}

@ -1,275 +0,0 @@
mkdir -p "/home/ndumas/work/wikilink-parser/reports"
mkdir -p "/home/ndumas/work/wikilink-parser/dist"
go clean code.ndumas.com/ndumas/wikilink-parser
rm -vrf "/home/ndumas/work/wikilink-parser/dist"/*
rm -vf "/home/ndumas/work/wikilink-parser/reports"/*
removed '/home/ndumas/work/wikilink-parser/reports/test.out'
go get -d -t code.ndumas.com/ndumas/wikilink-parser/...
go install golang.org/x/tools/cmd/stringer@latest
go generate
go test -race -v -tags "release" $(go list "code.ndumas.com/ndumas/wikilink-parser/..." | grep -v /vendor/) | tee "/home/ndumas/work/wikilink-parser/reports/test.out"
=== RUN Test_Lexer
=== RUN Test_Lexer/wikilink
2023/07/01 18:25:42 lexText
2023/07/01 18:25:42 lexOpenLink
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":2,"width":0,"item":"ItemOpenLink:\"[[\""}
2023/07/01 18:25:42 lexIdent
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":10,"width":1,"item":"ItemIdent:\"wikilink\""}
2023/07/01 18:25:42 lexCloseLink
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":12,"width":1,"item":"ItemCloseLink:\"]]\""}
=== RUN Test_Lexer/wikilink|display_name
2023/07/01 18:25:42 lexText
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":12,"width":0,"item":"ItemText:\"\""}
2023/07/01 18:25:42 lexText
2023/07/01 18:25:42 lexOpenLink
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":2,"width":0,"item":"ItemOpenLink:\"[[\""}
2023/07/01 18:25:42 lexIdent
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":10,"width":1,"item":"ItemIdent:\"wikilink\""}
2023/07/01 18:25:42 lexAlias
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":11,"width":1,"item":"ItemAlias:\"|\""}
2023/07/01 18:25:42 lexIdent
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":23,"width":1,"item":"ItemIdent:\"display name\""}
2023/07/01 18:25:42 lexCloseLink
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":25,"width":1,"item":"ItemCloseLink:\"]]\""}
=== RUN Test_Lexer/wikilink|display_name|second_pipe
2023/07/01 18:25:42 lexText
2023/07/01 18:25:42 lexOpenLink
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":2,"width":0,"item":"ItemOpenLink:\"[[\""}
2023/07/01 18:25:42 lexIdent
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":10,"width":1,"item":"ItemIdent:\"wikilink\""}
2023/07/01 18:25:42 lexAlias
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":11,"width":1,"item":"ItemAlias:\"|\""}
2023/07/01 18:25:42 lexIdent
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":23,"width":1,"item":"ItemIdent:\"display name\""}
2023/07/01 18:25:42 lexText
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":25,"width":0,"item":"ItemText:\"\""}
2023/07/01 18:25:42 lexAlias
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":24,"width":1,"item":"ItemAlias:\"|\""}
2023/07/01 18:25:42 lexIdent
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":35,"width":1,"item":"ItemIdent:\"second pipe\""}
2023/07/01 18:25:42 lexCloseLink
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":37,"width":1,"item":"ItemCloseLink:\"]]\""}
=== RUN Test_Lexer/wikilink_with_numeric_alias|420|second_pipe
2023/07/01 18:25:42 lexText
2023/07/01 18:25:42 lexOpenLink
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":2,"width":0,"item":"ItemOpenLink:\"[[\""}
2023/07/01 18:25:42 lexIdent
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":10,"width":1,"item":"ItemIdent:\"wikilink\""}
2023/07/01 18:25:42 lexAlias
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":11,"width":1,"item":"ItemAlias:\"|\""}
2023/07/01 18:25:42 lexIdent
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":14,"width":1,"item":"ItemIdent:\"420\""}
2023/07/01 18:25:42 lexAlias
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":15,"width":1,"item":"ItemAlias:\"|\""}
2023/07/01 18:25:42 lexIdent
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":26,"width":1,"item":"ItemIdent:\"second pipe\""}
2023/07/01 18:25:42 lexCloseLink
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":28,"width":1,"item":"ItemCloseLink:\"]]\""}
=== RUN Test_Lexer/wikilink_with_spaces_in_filename
2023/07/01 18:25:42 lexText
2023/07/01 18:25:42 lexOpenLink
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":2,"width":0,"item":"ItemOpenLink:\"[[\""}
2023/07/01 18:25:42 lexIdent
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":17,"width":1,"item":"ItemIdent:\"wikilink spaces\""}
2023/07/01 18:25:42 lexCloseLink
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":19,"width":1,"item":"ItemCloseLink:\"]]\""}
=== RUN Test_Lexer/#heading
2023/07/01 18:25:42 lexText
2023/07/01 18:25:42 lexOpenLink
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":2,"width":0,"item":"ItemOpenLink:\"[[\""}
2023/07/01 18:25:42 lexIdent
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":10,"width":1,"item":"ItemIdent:\"#heading\""}
lexer_test.go:100: expected Type ItemHeading, received ItemIdent
lexer_test.go:105: expected Value "#", received "#heading"
2023/07/01 18:25:42 lexCloseLink
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":12,"width":1,"item":"ItemCloseLink:\"]]\""}
lexer_test.go:100: expected Type ItemIdent, received ItemCloseLink
lexer_test.go:105: expected Value "heading", received "]]"
2023/07/01 18:25:42 lexText
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":12,"width":0,"item":"ItemText:\"\""}
lexer_test.go:100: expected Type ItemCloseLink, received ItemText
lexer_test.go:105: expected Value "]]", received ""
=== RUN Test_Lexer/wikilink#heading
2023/07/01 18:25:42 lexText
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":37,"width":0,"item":"ItemText:\"\""}
2023/07/01 18:25:42 lexText
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":28,"width":0,"item":"ItemText:\"\""}
2023/07/01 18:25:42 lexText
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":19,"width":0,"item":"ItemText:\"\""}
2023/07/01 18:25:42 lexText
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":12,"width":0,"item":"ItemText:\"\""}
2023/07/01 18:25:42 lexText
2023/07/01 18:25:42 lexOpenLink
2023/07/01 18:25:42 lexText
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":2,"width":0,"item":"ItemOpenLink:\"[[\""}
2023/07/01 18:25:42 lexIdent
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":10,"width":1,"item":"ItemIdent:\"wikilink\""}
==================
WARNING: DATA RACE
Read at 0x00c0000822b0 by goroutine 21:
code.ndumas.com/ndumas/wikilink-parser.lexText()
/home/ndumas/work/wikilink-parser/states.go:82 +0x69
code.ndumas.com/ndumas/wikilink-parser.(*Lexer).run()
/home/ndumas/work/wikilink-parser/lexer.go:149 +0x3a
code.ndumas.com/ndumas/wikilink-parser.Lex.func1()
/home/ndumas/work/wikilink-parser/lexer.go:45 +0x39
Previous write at 0x00c0000822b0 by goroutine 20:
code.ndumas.com/ndumas/wikilink-parser.lexOpenLink()
/home/ndumas/work/wikilink-parser/states.go:96 +0x86
code.ndumas.com/ndumas/wikilink-parser.(*Lexer).NextItem()
/home/ndumas/work/wikilink-parser/lexer.go:64 +0xd5
code.ndumas.com/ndumas/wikilink-parser_test.Test_Lexer.func1()
/home/ndumas/work/wikilink-parser/lexer_test.go:98 +0x18c
testing.tRunner()
/usr/lib/golang/src/testing/testing.go:1446 +0x216
testing.(*T).Run.func1()
/usr/lib/golang/src/testing/testing.go:1493 +0x47
Goroutine 21 (running) created at:
code.ndumas.com/ndumas/wikilink-parser.Lex()
/home/ndumas/work/wikilink-parser/lexer.go:45 +0x41a
code.ndumas.com/ndumas/wikilink-parser_test.Test_Lexer.func1()
/home/ndumas/work/wikilink-parser/lexer_test.go:95 +0x69
testing.tRunner()
/usr/lib/golang/src/testing/testing.go:1446 +0x216
testing.(*T).Run.func1()
/usr/lib/golang/src/testing/testing.go:1493 +0x47
Goroutine 20 (running) created at:
testing.(*T).Run()
/usr/lib/golang/src/testing/testing.go:1493 +0x75d
code.ndumas.com/ndumas/wikilink-parser_test.Test_Lexer()
/home/ndumas/work/wikilink-parser/lexer_test.go:93 +0x1086
testing.tRunner()
/usr/lib/golang/src/testing/testing.go:1446 +0x216
testing.(*T).Run.func1()
/usr/lib/golang/src/testing/testing.go:1493 +0x47
==================
==================
WARNING: DATA RACE
Write at 0x00c0000822b8 by goroutine 21:
code.ndumas.com/ndumas/wikilink-parser.(*Lexer).next()
/home/ndumas/work/wikilink-parser/lexer.go:142 +0xe8
code.ndumas.com/ndumas/wikilink-parser.lexText()
/home/ndumas/work/wikilink-parser/states.go:85 +0xc4
code.ndumas.com/ndumas/wikilink-parser.(*Lexer).run()
/home/ndumas/work/wikilink-parser/lexer.go:149 +0x3a
code.ndumas.com/ndumas/wikilink-parser.Lex.func1()
/home/ndumas/work/wikilink-parser/lexer.go:45 +0x39
Previous write at 0x00c0000822b8 by goroutine 20:
code.ndumas.com/ndumas/wikilink-parser.(*Lexer).next()
/home/ndumas/work/wikilink-parser/lexer.go:142 +0xe8
code.ndumas.com/ndumas/wikilink-parser.lexIdent()
/home/ndumas/work/wikilink-parser/states.go:31 +0x64
code.ndumas.com/ndumas/wikilink-parser.(*Lexer).NextItem()
/home/ndumas/work/wikilink-parser/lexer.go:64 +0xd5
code.ndumas.com/ndumas/wikilink-parser_test.Test_Lexer.func1()
/home/ndumas/work/wikilink-parser/lexer_test.go:98 +0x18c
testing.tRunner()
/usr/lib/golang/src/testing/testing.go:1446 +0x216
testing.(*T).Run.func1()
/usr/lib/golang/src/testing/testing.go:1493 +0x47
Goroutine 21 (running) created at:
code.ndumas.com/ndumas/wikilink-parser.Lex()
/home/ndumas/work/wikilink-parser/lexer.go:45 +0x41a
code.ndumas.com/ndumas/wikilink-parser_test.Test_Lexer.func1()
/home/ndumas/work/wikilink-parser/lexer_test.go:95 +0x69
testing.tRunner()
/usr/lib/golang/src/testing/testing.go:1446 +0x216
testing.(*T).Run.func1()
/usr/lib/golang/src/testing/testing.go:1493 +0x47
Goroutine 20 (running) created at:
testing.(*T).Run()
/usr/lib/golang/src/testing/testing.go:1493 +0x75d
code.ndumas.com/ndumas/wikilink-parser_test.Test_Lexer()
/home/ndumas/work/wikilink-parser/lexer_test.go:93 +0x1086
testing.tRunner()
/usr/lib/golang/src/testing/testing.go:1446 +0x216
testing.(*T).Run.func1()
/usr/lib/golang/src/testing/testing.go:1493 +0x47
==================
==================
WARNING: DATA RACE
Read at 0x00c0000822a8 by goroutine 21:
code.ndumas.com/ndumas/wikilink-parser.(*Lexer).emit()
/home/ndumas/work/wikilink-parser/lexer.go:109 +0x64
code.ndumas.com/ndumas/wikilink-parser.lexText()
/home/ndumas/work/wikilink-parser/states.go:88 +0xf4
code.ndumas.com/ndumas/wikilink-parser.(*Lexer).run()
/home/ndumas/work/wikilink-parser/lexer.go:149 +0x3a
code.ndumas.com/ndumas/wikilink-parser.Lex.func1()
/home/ndumas/work/wikilink-parser/lexer.go:45 +0x39
Previous write at 0x00c0000822a8 by goroutine 20:
code.ndumas.com/ndumas/wikilink-parser.(*Lexer).emit()
/home/ndumas/work/wikilink-parser/lexer.go:119 +0x4cf
code.ndumas.com/ndumas/wikilink-parser.lexOpenLink()
/home/ndumas/work/wikilink-parser/states.go:97 +0xa4
code.ndumas.com/ndumas/wikilink-parser.(*Lexer).NextItem()
/home/ndumas/work/wikilink-parser/lexer.go:64 +0xd5
code.ndumas.com/ndumas/wikilink-parser_test.Test_Lexer.func1()
/home/ndumas/work/wikilink-parser/lexer_test.go:98 +0x18c
testing.tRunner()
/usr/lib/golang/src/testing/testing.go:1446 +0x216
testing.(*T).Run.func1()
/usr/lib/golang/src/testing/testing.go:1493 +0x47
Goroutine 21 (running) created at:
code.ndumas.com/ndumas/wikilink-parser.Lex()
/home/ndumas/work/wikilink-parser/lexer.go:45 +0x41a
code.ndumas.com/ndumas/wikilink-parser_test.Test_Lexer.func1()
/home/ndumas/work/wikilink-parser/lexer_test.go:95 +0x69
testing.tRunner()
/usr/lib/golang/src/testing/testing.go:1446 +0x216
testing.(*T).Run.func1()
/usr/lib/golang/src/testing/testing.go:1493 +0x47
Goroutine 20 (running) created at:
testing.(*T).Run()
/usr/lib/golang/src/testing/testing.go:1493 +0x75d
code.ndumas.com/ndumas/wikilink-parser_test.Test_Lexer()
/home/ndumas/work/wikilink-parser/lexer_test.go:93 +0x1086
testing.tRunner()
/usr/lib/golang/src/testing/testing.go:1446 +0x216
testing.(*T).Run.func1()
/usr/lib/golang/src/testing/testing.go:1493 +0x47
==================
2023/07/01 18:25:42 lexHeading
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":20,"width":0,"item":"ItemText:\"wikilink#heading]]\""}
testing.go:1319: race detected during execution of test
--- FAIL: Test_Lexer (0.01s)
--- PASS: Test_Lexer/wikilink (0.00s)
--- PASS: Test_Lexer/wikilink|display_name (0.00s)
--- PASS: Test_Lexer/wikilink|display_name|second_pipe (0.00s)
--- PASS: Test_Lexer/wikilink_with_numeric_alias|420|second_pipe (0.00s)
--- PASS: Test_Lexer/wikilink_with_spaces_in_filename (0.00s)
--- FAIL: Test_Lexer/#heading (0.00s)
--- FAIL: Test_Lexer/wikilink#heading (0.00s)
panic: runtime error: slice bounds out of range [:21] with length 20 [recovered]
panic: runtime error: slice bounds out of range [:21] with length 20
goroutine 34 [running]:
testing.tRunner.func1.2({0x6e8160, 0xc0000e20a8})
/usr/lib/golang/src/testing/testing.go:1396 +0x372
testing.tRunner.func1()
/usr/lib/golang/src/testing/testing.go:1399 +0x5f0
panic({0x6e8160, 0xc0000e20a8})
/usr/lib/golang/src/runtime/panic.go:890 +0x262
code.ndumas.com/ndumas/wikilink-parser.(*Lexer).emit(0xc000082280, 0x5)
/home/ndumas/work/wikilink-parser/lexer.go:109 +0x506
code.ndumas.com/ndumas/wikilink-parser.lexHeading(0xc000082280)
/home/ndumas/work/wikilink-parser/states.go:58 +0xa5
code.ndumas.com/ndumas/wikilink-parser.(*Lexer).NextItem(0xc000082280)
/home/ndumas/work/wikilink-parser/lexer.go:64 +0xd6
code.ndumas.com/ndumas/wikilink-parser_test.Test_Lexer.func1(0xc000190b60)
/home/ndumas/work/wikilink-parser/lexer_test.go:98 +0x18d
testing.tRunner(0xc000190b60, 0xc00019cb40)
/usr/lib/golang/src/testing/testing.go:1446 +0x217
created by testing.(*T).Run
/usr/lib/golang/src/testing/testing.go:1493 +0x75e
FAIL code.ndumas.com/ndumas/wikilink-parser 0.024s
FAIL

@ -1,4 +1,4 @@
package wikilink
package markdown
import (
"strings"
@ -26,15 +26,20 @@ func isBlockRef(s string) bool {
return strings.HasPrefix(s, BlockRef)
}
func isEscape(s string) bool {
return strings.HasPrefix(s, EscapeChar)
}
func lexIdent(l *Lexer) stateFn {
L := l.L.Named("lexIdent")
for {
L := l.L.Named("lexIdent")
s := l.input[l.GetPos():]
L.Debug("stepping through lexIdent")
if s[0] == '\\' { // i think this will handle escape characters?
break
}
switch {
case isEscape(s):
l.next()
l.next()
continue
case isCloseLink(s):
L.Debug("found CloseLink")
l.emit(LexIdent)
@ -85,15 +90,27 @@ func lexAlias(l *Lexer) stateFn {
func lexText(l *Lexer) stateFn {
L := l.L.Named("lexText")
for {
if isOpenLink(l.input[l.GetPos():]) {
s := l.input[l.GetPos():]
L.Debug("stepping through lexText")
switch {
case isEscape(s):
l.next()
l.next()
continue
case isOpenLink(s):
L.Debug("found openLink")
l.emit(LexText)
return lexOpenLink
}
r := l.next()
switch {
case r == EOF || r == '\n':
case r == EOF:
l.emit(LexText)
return nil
case r == '\n':
l.emit(LexText)
return lexText
}
}
}

@ -1,4 +1,4 @@
package wikilink
package markdown
import (
// "log"

@ -1,9 +1,9 @@
package wikilink_test
package markdown_test
import (
"testing"
"code.ndumas.com/ndumas/wikilink-parser"
"code.ndumas.com/ndumas/obsidian-markdown"
)
func _Test_Wikilink_Parsing(t *testing.T) {
@ -36,7 +36,7 @@ func _Test_Wikilink_Parsing(t *testing.T) {
tc := tc
t.Run(tc.name, func(t *testing.T) {
t.Parallel()
out := wikilink.Extract(tc.in)
out := markdown.Extract(tc.in)
if out.Link != tc.link {
t.Logf("got %#v\n", out)
t.Fail()
@ -50,7 +50,7 @@ func _Test_Wikilink_Parsing(t *testing.T) {
tc := tc
t.Run(tc.name, func(t *testing.T) {
t.Parallel()
out := wikilink.Extract(tc.in)
out := markdown.Extract(tc.in)
if out.Alias != tc.alias {
t.Logf("got %#v\n", out)
t.Fail()
@ -64,7 +64,7 @@ func _Test_Wikilink_Parsing(t *testing.T) {
tc := tc
t.Run(tc.name, func(t *testing.T) {
t.Parallel()
out := wikilink.Extract(tc.in)
out := markdown.Extract(tc.in)
if out.Fragment != tc.fragment {
t.Logf("got %#v\n", out)
t.Fail()

Loading…
Cancel
Save