Compare commits

...

15 Commits
v0.0.3 ... main

@ -0,0 +1 @@
common --experimental_enable_bzlmod

1
.gitignore vendored

@ -4,3 +4,4 @@ dist/*
reports/* reports/*
lexer.log lexer.log
parser.log parser.log
bazel-*

33
BUILD

@ -0,0 +1,33 @@
load("@rules_go//go:def.bzl", "go_library", "go_test")
load("@gazelle//:def.bzl", "gazelle")
gazelle(name = "gazelle")
go_library(
name = "obsidian-markdown",
srcs = [
"lexemetype_string.go",
"lexer.go",
"states.go",
"wikilink.go",
],
importpath = "code.ndumas.com/ndumas/obsidian-markdown",
visibility = ["//visibility:public"],
deps = [
"@org_uber_go_zap//:zap",
"@org_uber_go_zap//zapcore",
],
)
go_test(
name = "obsidian-markdown_test",
srcs = [
"lexer_test.go",
"wikilink_test.go",
],
deps = [
":obsidian-markdown",
"@com_github_stretchr_testify//assert",
"@org_uber_go_zap//zapcore",
],
)

@ -0,0 +1,12 @@
module(
name = "obsidian-markdown",
repo_name = "code.ndumas.com_ndumas_obsidian-markdown",
)
bazel_dep(name = "gazelle", version = "0.32.0")
bazel_dep(name = "rules_go", version = "0.41.0")
bazel_dep(name = "rules_oci", version = "1.2.0")
go_deps = use_extension("@gazelle//:extensions.bzl", "go_deps")
go_deps.from_file(go_mod = "//:go.mod")
use_repo(go_deps, "com_github_stretchr_testify", "org_uber_go_zap")

File diff suppressed because it is too large Load Diff

@ -40,7 +40,7 @@
# Parameters # Parameters
PKG = code.ndumas.com/ndumas/wikilink-parser PKG = code.ndumas.com/ndumas/obsidian-markdown
NAME = parse-wikilinks NAME = parse-wikilinks
DOC = README.md LICENSE DOC = README.md LICENSE
@ -193,13 +193,13 @@ docker: docker-image docker-push
.PHONY: docker-push .PHONY: docker-push
docker-push: docker-push:
$(DOCKER_CMD) tag code.ndumas.com/ndumas/wikilink-parser:$(VERSION) code.ndumas.com/ndumas/wikilink-parser:latest $(DOCKER_CMD) tag $(PKG):$(VERSION) $(PKG):latest
$(DOCKER_CMD) push code.ndumas.com/ndumas/wikilink-parser:latest $(DOCKER_CMD) push $(PKG):latest
$(DOCKER_CMD) push code.ndumas.com/ndumas/wikilink-parser:$(VERSION) $(DOCKER_CMD) push $(PKG):$(VERSION)
.PHONY: docker-image .PHONY: docker-image
docker-image: docker-image:
$(DOCKER_CMD) build --build-arg VERSION=$(VERSION) -t code.ndumas.com/ndumas/wikilink-parser:$(VERSION) . $(DOCKER_CMD) build --build-arg VERSION=$(VERSION) -t $(PKG):$(VERSION) .
.PHONY: build-alpine .PHONY: build-alpine
build-alpine: build-alpine:

@ -0,0 +1,18 @@
load("@rules_go//go:def.bzl", "go_binary", "go_library")
go_library(
name = "demo_lib",
srcs = ["main.go"],
importpath = "code.ndumas.com/ndumas/obsidian-markdown/cmd/demo",
visibility = ["//visibility:private"],
deps = [
"//:obsidian-markdown",
"@org_uber_go_zap//zapcore",
],
)
go_binary(
name = "demo",
embed = [":demo_lib"],
visibility = ["//visibility:public"],
)

@ -3,11 +3,13 @@ package main
import ( import (
"log" "log"
"code.ndumas.com/ndumas/wikilink-parser" "go.uber.org/zap/zapcore"
"code.ndumas.com/ndumas/obsidian-markdown"
) )
func main() { func main() {
l := wikilink.Lex("debugLexer", `this is a [[wikilink]]`) l := markdown.Lex("debugLexer", `this is a [[wikilink]]`, zapcore.InfoLevel)
for _, item := range l.Items { for _, item := range l.Items {
item := item item := item
log.Printf("%#+v\n", item) log.Printf("%#+v\n", item)

@ -1,10 +1,16 @@
module code.ndumas.com/ndumas/wikilink-parser module code.ndumas.com/ndumas/obsidian-markdown
go 1.19 go 1.19
require go.uber.org/zap v1.24.0 require (
github.com/stretchr/testify v1.8.0
go.uber.org/zap v1.24.0
)
require ( require (
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
go.uber.org/atomic v1.7.0 // indirect go.uber.org/atomic v1.7.0 // indirect
go.uber.org/multierr v1.6.0 // indirect go.uber.org/multierr v1.6.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
) )

@ -6,8 +6,11 @@ github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0 h1:pSgiaMZlXftHpm5L7V1+rVB+AZJydKsMxsQBIJw4PKk= github.com/stretchr/testify v1.8.0 h1:pSgiaMZlXftHpm5L7V1+rVB+AZJydKsMxsQBIJw4PKk=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
go.uber.org/atomic v1.7.0 h1:ADUqmZGgLDDfbSL9ZmPxKTybcoEYHgpYfELNoN+7hsw= go.uber.org/atomic v1.7.0 h1:ADUqmZGgLDDfbSL9ZmPxKTybcoEYHgpYfELNoN+7hsw=
go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
go.uber.org/goleak v1.1.11 h1:wy28qYRKZgnJTxGxvye5/wgWr1EKjmUDGYox5mGlRlI= go.uber.org/goleak v1.1.11 h1:wy28qYRKZgnJTxGxvye5/wgWr1EKjmUDGYox5mGlRlI=
@ -15,4 +18,8 @@ go.uber.org/multierr v1.6.0 h1:y6IPFStTAIT5Ytl7/XYmHvzXQ7S3g/IeZW9hyZ5thw4=
go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU=
go.uber.org/zap v1.24.0 h1:FiJd5l1UOLj0wCgbSE0rwwXHzEdAZS6hiiSnxJN/D60= go.uber.org/zap v1.24.0 h1:FiJd5l1UOLj0wCgbSE0rwwXHzEdAZS6hiiSnxJN/D60=
go.uber.org/zap v1.24.0/go.mod h1:2kMP+WWQ8aoFoedH3T2sq6iJ2yDWpHbP0f6MQbS9Gkg= go.uber.org/zap v1.24.0/go.mod h1:2kMP+WWQ8aoFoedH3T2sq6iJ2yDWpHbP0f6MQbS9Gkg=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

@ -1,6 +1,6 @@
// Code generated by "stringer -type=LexemeType"; DO NOT EDIT. // Code generated by "stringer -type=LexemeType"; DO NOT EDIT.
package wikilink package markdown
import "strconv" import "strconv"

@ -1,5 +1,5 @@
//go:generate stringer -type=LexemeType //go:generate stringer -type=LexemeType
package wikilink package markdown
import ( import (
"fmt" "fmt"
@ -31,19 +31,20 @@ const (
) )
const ( const (
OpenLink = "[[" OpenLink = "[["
CloseLink = "]]" CloseLink = "]]"
Alias = "|" Alias = "|"
Heading = "#" Heading = "#"
BlockRef = "#^" BlockRef = "#^"
EscapeChar = `\`
) )
func Lex(name, input string) *Lexer { func Lex(name, input string, level zapcore.Level) *Lexer {
encoderCfg := zap.NewProductionEncoderConfig() encoderCfg := zap.NewProductionEncoderConfig()
encoderCfg.EncodeTime = zapcore.ISO8601TimeEncoder encoderCfg.EncodeTime = zapcore.ISO8601TimeEncoder
config := zap.Config{ config := zap.Config{
Level: zap.NewAtomicLevelAt(zap.InfoLevel), Level: zap.NewAtomicLevelAt(level),
EncoderConfig: encoderCfg, EncoderConfig: encoderCfg,
OutputPaths: []string{ OutputPaths: []string{
"./lexer.log", "./lexer.log",
@ -150,7 +151,7 @@ func (l *Lexer) emit(t LexemeType) {
L := l.L.Named("emit").With( L := l.L.Named("emit").With(
zap.String("item", i.String()), zap.String("item", i.String()),
) )
L.Debug("emitting lexeme") L.Info("emitting lexeme")
l.Items = append(l.Items, i) l.Items = append(l.Items, i)
l.SetStart(l.GetPos()) l.SetStart(l.GetPos())
/* original concurrent implementation /* original concurrent implementation

@ -1,226 +1,382 @@
package wikilink_test package markdown_test
import ( import (
"testing" "testing"
"code.ndumas.com/ndumas/wikilink-parser" "github.com/stretchr/testify/assert"
"go.uber.org/zap/zapcore"
"code.ndumas.com/ndumas/obsidian-markdown"
) )
var testCases = []tc{ var wikilinkWithEscapeCharacters = []tc{
{
name: "wikilink with escaped close link",
in: `[[wiki\]\]link]]`,
expected: []markdown.Lexeme{
{Typ: markdown.LexOpenLink, Val: "[["},
{Typ: markdown.LexIdent, Val: `wiki\]\]link`},
{Typ: markdown.LexCloseLink, Val: "]]"},
},
},
{
name: "wikilink with partial escaped close link",
in: `[[wiki\]link]]`,
expected: []markdown.Lexeme{
{Typ: markdown.LexOpenLink, Val: "[["},
{Typ: markdown.LexIdent, Val: `wiki\]link`},
{Typ: markdown.LexCloseLink, Val: "]]"},
},
},
{
name: "wikilink with escaped open link",
in: `[[wiki\[\[link]]`,
expected: []markdown.Lexeme{
{Typ: markdown.LexOpenLink, Val: "[["},
{Typ: markdown.LexIdent, Val: `wiki\[\[link`},
{Typ: markdown.LexCloseLink, Val: "]]"},
},
},
{
name: "wikilink with partial escaped open link",
in: `[[wiki\[link]]`,
expected: []markdown.Lexeme{
{Typ: markdown.LexOpenLink, Val: "[["},
{Typ: markdown.LexIdent, Val: `wiki\[link`},
{Typ: markdown.LexCloseLink, Val: "]]"},
},
},
{
name: "wikilink with escaped alias",
in: `[[wiki\|link]]`,
expected: []markdown.Lexeme{
{Typ: markdown.LexOpenLink, Val: "[["},
{Typ: markdown.LexIdent, Val: `wiki\|link`},
{Typ: markdown.LexCloseLink, Val: "]]"},
},
},
{
name: "wikilink with escaped blockref",
in: `[[wiki\#\^link]]`,
expected: []markdown.Lexeme{
{Typ: markdown.LexOpenLink, Val: "[["},
{Typ: markdown.LexIdent, Val: `wiki\#\^link`},
{Typ: markdown.LexCloseLink, Val: "]]"},
},
},
{
name: "wikilink with partial escaped blockref",
in: `[[wiki\^link]]`,
expected: []markdown.Lexeme{
{Typ: markdown.LexOpenLink, Val: "[["},
{Typ: markdown.LexIdent, Val: `wiki\^link`},
{Typ: markdown.LexCloseLink, Val: "]]"},
},
},
{
name: "wikilink with escaped header",
in: `[[wiki\#link]]`,
expected: []markdown.Lexeme{
{Typ: markdown.LexOpenLink, Val: "[["},
{Typ: markdown.LexIdent, Val: `wiki\#link`},
{Typ: markdown.LexCloseLink, Val: "]]"},
},
},
}
var singleWikilink = []tc{
{ {
name: "wikilink", name: "wikilink",
in: "[[wikilink]]", in: "[[wikilink]]",
expected: []wikilink.Lexeme{ expected: []markdown.Lexeme{
{Typ: wikilink.LexOpenLink, Val: "[["}, {Typ: markdown.LexOpenLink, Val: "[["},
{Typ: wikilink.LexIdent, Val: "wikilink"}, {Typ: markdown.LexIdent, Val: "wikilink"},
{Typ: wikilink.LexCloseLink, Val: "]]"}, {Typ: markdown.LexCloseLink, Val: "]]"},
{Typ: wikilink.LexText, Val: ""},
}, },
}, },
{ {
name: "wikilink|display name", name: "wikilink|display name",
in: "[[wikilink|display name]]", in: "[[wikilink|display name]]",
expected: []wikilink.Lexeme{ expected: []markdown.Lexeme{
{Typ: wikilink.LexOpenLink, Val: "[["}, {Typ: markdown.LexOpenLink, Val: "[["},
{Typ: wikilink.LexIdent, Val: "wikilink"}, {Typ: markdown.LexIdent, Val: "wikilink"},
{Typ: wikilink.LexAlias, Val: "|"}, {Typ: markdown.LexAlias, Val: "|"},
{Typ: wikilink.LexIdent, Val: "display name"}, {Typ: markdown.LexIdent, Val: "display name"},
{Typ: wikilink.LexCloseLink, Val: "]]"}, {Typ: markdown.LexCloseLink, Val: "]]"},
{Typ: wikilink.LexText, Val: ""},
}, },
}, },
{ {
name: "wikilink|display name|second pipe", name: "wikilink|display name|second pipe",
in: "[[wikilink|display name|second pipe]]", in: "[[wikilink|display name|second pipe]]",
expected: []wikilink.Lexeme{ expected: []markdown.Lexeme{
{Typ: wikilink.LexOpenLink, Val: "[["}, {Typ: markdown.LexOpenLink, Val: "[["},
{Typ: wikilink.LexIdent, Val: "wikilink"}, {Typ: markdown.LexIdent, Val: "wikilink"},
{Typ: wikilink.LexAlias, Val: "|"}, {Typ: markdown.LexAlias, Val: "|"},
{Typ: wikilink.LexIdent, Val: "display name"}, {Typ: markdown.LexIdent, Val: "display name"},
{Typ: wikilink.LexAlias, Val: "|"}, {Typ: markdown.LexAlias, Val: "|"},
{Typ: wikilink.LexIdent, Val: "second pipe"}, {Typ: markdown.LexIdent, Val: "second pipe"},
{Typ: wikilink.LexCloseLink, Val: "]]"}, {Typ: markdown.LexCloseLink, Val: "]]"},
{Typ: wikilink.LexText, Val: ""},
}, },
}, },
{ {
name: "wikilink with numeric alias|420|second pipe", name: "wikilink with numeric alias|420|second pipe",
in: "[[wikilink|420|second pipe]]", in: "[[wikilink|420|second pipe]]",
expected: []wikilink.Lexeme{ expected: []markdown.Lexeme{
{Typ: wikilink.LexOpenLink, Val: "[["}, {Typ: markdown.LexOpenLink, Val: "[["},
{Typ: wikilink.LexIdent, Val: "wikilink"}, {Typ: markdown.LexIdent, Val: "wikilink"},
{Typ: wikilink.LexAlias, Val: "|"}, {Typ: markdown.LexAlias, Val: "|"},
{Typ: wikilink.LexIdent, Val: "420"}, {Typ: markdown.LexIdent, Val: "420"},
{Typ: wikilink.LexAlias, Val: "|"}, {Typ: markdown.LexAlias, Val: "|"},
{Typ: wikilink.LexIdent, Val: "second pipe"}, {Typ: markdown.LexIdent, Val: "second pipe"},
{Typ: wikilink.LexCloseLink, Val: "]]"}, {Typ: markdown.LexCloseLink, Val: "]]"},
{Typ: wikilink.LexText, Val: ""},
}, },
}, },
{ {
name: "wikilink with spaces in filename", name: "wikilink with spaces in filename",
in: "[[wikilink spaces]]", in: "[[wikilink spaces]]",
expected: []wikilink.Lexeme{ expected: []markdown.Lexeme{
{Typ: wikilink.LexOpenLink, Val: "[["}, {Typ: markdown.LexOpenLink, Val: "[["},
{Typ: wikilink.LexIdent, Val: "wikilink spaces"}, {Typ: markdown.LexIdent, Val: "wikilink spaces"},
{Typ: wikilink.LexCloseLink, Val: "]]"}, {Typ: markdown.LexCloseLink, Val: "]]"},
{Typ: wikilink.LexText, Val: ""},
}, },
}, },
{ {
name: "#heading", name: "#heading",
in: "[[#heading]]", in: "[[#heading]]",
expected: []wikilink.Lexeme{ expected: []markdown.Lexeme{
{Typ: wikilink.LexOpenLink, Val: "[["}, {Typ: markdown.LexOpenLink, Val: "[["},
{Typ: wikilink.LexIdent, Val: ""}, {Typ: markdown.LexIdent, Val: ""},
{Typ: wikilink.LexHeading, Val: "#"}, {Typ: markdown.LexHeading, Val: "#"},
{Typ: wikilink.LexIdent, Val: "heading"}, {Typ: markdown.LexIdent, Val: "heading"},
{Typ: wikilink.LexCloseLink, Val: "]]"}, {Typ: markdown.LexCloseLink, Val: "]]"},
{Typ: wikilink.LexText, Val: ""},
}, },
}, },
{ {
name: "wikilink#heading", name: "wikilink#heading",
in: "[[wikilink#heading]]", in: "[[wikilink#heading]]",
expected: []wikilink.Lexeme{ expected: []markdown.Lexeme{
{Typ: wikilink.LexOpenLink, Val: "[["}, {Typ: markdown.LexOpenLink, Val: "[["},
{Typ: wikilink.LexIdent, Val: "wikilink"}, {Typ: markdown.LexIdent, Val: "wikilink"},
{Typ: wikilink.LexHeading, Val: "#"}, {Typ: markdown.LexHeading, Val: "#"},
{Typ: wikilink.LexIdent, Val: "heading"}, {Typ: markdown.LexIdent, Val: "heading"},
{Typ: wikilink.LexCloseLink, Val: "]]"}, {Typ: markdown.LexCloseLink, Val: "]]"},
{Typ: wikilink.LexText, Val: ""},
}, },
}, },
{ {
name: "wikilink#heading|display name", name: "wikilink#heading|display name",
in: "[[wikilink#heading|display name]]", in: "[[wikilink#heading|display name]]",
expected: []wikilink.Lexeme{ expected: []markdown.Lexeme{
{Typ: wikilink.LexOpenLink, Val: "[["}, {Typ: markdown.LexOpenLink, Val: "[["},
{Typ: wikilink.LexIdent, Val: "wikilink"}, {Typ: markdown.LexIdent, Val: "wikilink"},
{Typ: wikilink.LexHeading, Val: "#"}, {Typ: markdown.LexHeading, Val: "#"},
{Typ: wikilink.LexIdent, Val: "heading"}, {Typ: markdown.LexIdent, Val: "heading"},
{Typ: wikilink.LexAlias, Val: "|"}, {Typ: markdown.LexAlias, Val: "|"},
{Typ: wikilink.LexIdent, Val: "display name"}, {Typ: markdown.LexIdent, Val: "display name"},
{Typ: wikilink.LexCloseLink, Val: "]]"}, {Typ: markdown.LexCloseLink, Val: "]]"},
{Typ: wikilink.LexText, Val: ""},
}, },
}, },
{ {
name: "wikilink#heading|display name|second pipe", name: "wikilink#heading|display name|second pipe",
in: "[[wikilink#heading|display name|second pipe]]", in: "[[wikilink#heading|display name|second pipe]]",
expected: []wikilink.Lexeme{ expected: []markdown.Lexeme{
{Typ: wikilink.LexOpenLink, Val: "[["}, {Typ: markdown.LexOpenLink, Val: "[["},
{Typ: wikilink.LexIdent, Val: "wikilink"}, {Typ: markdown.LexIdent, Val: "wikilink"},
{Typ: wikilink.LexHeading, Val: "#"}, {Typ: markdown.LexHeading, Val: "#"},
{Typ: wikilink.LexIdent, Val: "heading"}, {Typ: markdown.LexIdent, Val: "heading"},
{Typ: wikilink.LexAlias, Val: "|"}, {Typ: markdown.LexAlias, Val: "|"},
{Typ: wikilink.LexIdent, Val: "display name"}, {Typ: markdown.LexIdent, Val: "display name"},
{Typ: wikilink.LexAlias, Val: "|"}, {Typ: markdown.LexAlias, Val: "|"},
{Typ: wikilink.LexIdent, Val: "second pipe"}, {Typ: markdown.LexIdent, Val: "second pipe"},
{Typ: wikilink.LexCloseLink, Val: "]]"}, {Typ: markdown.LexCloseLink, Val: "]]"},
{Typ: wikilink.LexText, Val: ""},
}, },
}, },
{ {
name: "wikilink with numeric aliases#heading|420|display name", name: "wikilink with numeric aliases#heading|420|display name",
in: "[[wikilink#heading|420|second pipe]]", in: "[[wikilink#heading|420|second pipe]]",
expected: []wikilink.Lexeme{ expected: []markdown.Lexeme{
{Typ: wikilink.LexOpenLink, Val: "[["}, {Typ: markdown.LexOpenLink, Val: "[["},
{Typ: wikilink.LexIdent, Val: "wikilink"}, {Typ: markdown.LexIdent, Val: "wikilink"},
{Typ: wikilink.LexHeading, Val: "#"}, {Typ: markdown.LexHeading, Val: "#"},
{Typ: wikilink.LexIdent, Val: "heading"}, {Typ: markdown.LexIdent, Val: "heading"},
{Typ: wikilink.LexAlias, Val: "|"}, {Typ: markdown.LexAlias, Val: "|"},
{Typ: wikilink.LexIdent, Val: "420"}, {Typ: markdown.LexIdent, Val: "420"},
{Typ: wikilink.LexAlias, Val: "|"}, {Typ: markdown.LexAlias, Val: "|"},
{Typ: wikilink.LexIdent, Val: "second pipe"}, {Typ: markdown.LexIdent, Val: "second pipe"},
{Typ: wikilink.LexCloseLink, Val: "]]"}, {Typ: markdown.LexCloseLink, Val: "]]"},
{Typ: wikilink.LexText, Val: ""},
}, },
}, },
{ {
name: "#^blockRef", name: "#^blockRef",
in: "[[#^blockRef]]", in: "[[#^blockRef]]",
expected: []wikilink.Lexeme{ expected: []markdown.Lexeme{
{Typ: wikilink.LexOpenLink, Val: "[["}, {Typ: markdown.LexOpenLink, Val: "[["},
{Typ: wikilink.LexIdent, Val: ""}, {Typ: markdown.LexIdent, Val: ""},
{Typ: wikilink.LexBlockRef, Val: "#^"}, {Typ: markdown.LexBlockRef, Val: "#^"},
{Typ: wikilink.LexIdent, Val: "blockRef"}, {Typ: markdown.LexIdent, Val: "blockRef"},
{Typ: wikilink.LexCloseLink, Val: "]]"}, {Typ: markdown.LexCloseLink, Val: "]]"},
{Typ: wikilink.LexText, Val: ""},
}, },
}, },
{ {
name: "wikilink#^blockRef", name: "wikilink#^blockRef",
in: "[[wikilink#^blockRef]]", in: "[[wikilink#^blockRef]]",
expected: []wikilink.Lexeme{ expected: []markdown.Lexeme{
{Typ: wikilink.LexOpenLink, Val: "[["}, {Typ: markdown.LexOpenLink, Val: "[["},
{Typ: wikilink.LexIdent, Val: "wikilink"}, {Typ: markdown.LexIdent, Val: "wikilink"},
{Typ: wikilink.LexBlockRef, Val: "#^"}, {Typ: markdown.LexBlockRef, Val: "#^"},
{Typ: wikilink.LexIdent, Val: "blockRef"}, {Typ: markdown.LexIdent, Val: "blockRef"},
{Typ: wikilink.LexCloseLink, Val: "]]"}, {Typ: markdown.LexCloseLink, Val: "]]"},
{Typ: wikilink.LexText, Val: ""},
}, },
}, },
{ {
name: "wikilink#^blockRef|display name", name: "wikilink#^blockRef|display name",
in: "[[wikilink#^blockRef|display name]]", in: "[[wikilink#^blockRef|display name]]",
expected: []wikilink.Lexeme{ expected: []markdown.Lexeme{
{Typ: wikilink.LexOpenLink, Val: "[["}, {Typ: markdown.LexOpenLink, Val: "[["},
{Typ: wikilink.LexIdent, Val: "wikilink"}, {Typ: markdown.LexIdent, Val: "wikilink"},
{Typ: wikilink.LexBlockRef, Val: "#^"}, {Typ: markdown.LexBlockRef, Val: "#^"},
{Typ: wikilink.LexIdent, Val: "blockRef"}, {Typ: markdown.LexIdent, Val: "blockRef"},
{Typ: wikilink.LexAlias, Val: "|"}, {Typ: markdown.LexAlias, Val: "|"},
{Typ: wikilink.LexIdent, Val: "display name"}, {Typ: markdown.LexIdent, Val: "display name"},
{Typ: wikilink.LexCloseLink, Val: "]]"}, {Typ: markdown.LexCloseLink, Val: "]]"},
{Typ: wikilink.LexText, Val: ""},
}, },
}, },
{ {
name: "wikilink#^blockRef|display name|second pipe", name: "wikilink#^blockRef|display name|second pipe",
in: "[[wikilink#^blockRef|display name|second pipe]]", in: "[[wikilink#^blockRef|display name|second pipe]]",
expected: []wikilink.Lexeme{ expected: []markdown.Lexeme{
{Typ: wikilink.LexOpenLink, Val: "[["}, {Typ: markdown.LexOpenLink, Val: "[["},
{Typ: wikilink.LexIdent, Val: "wikilink"}, {Typ: markdown.LexIdent, Val: "wikilink"},
{Typ: wikilink.LexBlockRef, Val: "#^"}, {Typ: markdown.LexBlockRef, Val: "#^"},
{Typ: wikilink.LexIdent, Val: "blockRef"}, {Typ: markdown.LexIdent, Val: "blockRef"},
{Typ: wikilink.LexAlias, Val: "|"}, {Typ: markdown.LexAlias, Val: "|"},
{Typ: wikilink.LexIdent, Val: "display name"}, {Typ: markdown.LexIdent, Val: "display name"},
{Typ: wikilink.LexAlias, Val: "|"}, {Typ: markdown.LexAlias, Val: "|"},
{Typ: wikilink.LexIdent, Val: "second pipe"}, {Typ: markdown.LexIdent, Val: "second pipe"},
{Typ: wikilink.LexCloseLink, Val: "]]"}, {Typ: markdown.LexCloseLink, Val: "]]"},
{Typ: wikilink.LexText, Val: ""},
}, },
}, },
{ {
name: "wikilink with numeric aliases#^blockRef|420|second pipe", name: "wikilink with numeric aliases#^blockRef|420|second pipe",
in: "[[wikilink#^blockRef|420|second pipe]]", in: "[[wikilink#^blockRef|420|second pipe]]",
expected: []wikilink.Lexeme{ expected: []markdown.Lexeme{
{Typ: wikilink.LexOpenLink, Val: "[["}, {Typ: markdown.LexOpenLink, Val: "[["},
{Typ: wikilink.LexIdent, Val: "wikilink"}, {Typ: markdown.LexIdent, Val: "wikilink"},
{Typ: wikilink.LexBlockRef, Val: "#^"}, {Typ: markdown.LexBlockRef, Val: "#^"},
{Typ: wikilink.LexIdent, Val: "blockRef"}, {Typ: markdown.LexIdent, Val: "blockRef"},
{Typ: wikilink.LexAlias, Val: "|"}, {Typ: markdown.LexAlias, Val: "|"},
{Typ: wikilink.LexIdent, Val: "420"}, {Typ: markdown.LexIdent, Val: "420"},
{Typ: wikilink.LexAlias, Val: "|"}, {Typ: markdown.LexAlias, Val: "|"},
{Typ: wikilink.LexIdent, Val: "second pipe"}, {Typ: markdown.LexIdent, Val: "second pipe"},
{Typ: wikilink.LexCloseLink, Val: "]]"}, {Typ: markdown.LexCloseLink, Val: "]]"},
{Typ: wikilink.LexText, Val: ""},
}, },
}, },
} }
func Test_ObsidianWikilinks_TextWithEscapeCharacters(t *testing.T) {
for _, tc := range singleWikilink {
tc.name = "escape characters preceding " + tc.name
mut, test := mutateTestCase(
tc,
`foo\[\[not a link, but this is`,
"",
[]markdown.Lexeme{
{Typ: markdown.LexText, Val: `foo\[\[not a link, but this is`},
},
[]markdown.Lexeme{
{Typ: markdown.LexText, Val: ""},
},
)
t.Run(mut.name, test)
}
for _, tc := range singleWikilink {
tc.name = "escape characters following " + tc.name
mut, test := mutateTestCase(
tc,
"",
`foo\[\[not a link, but this is`,
[]markdown.Lexeme{
{Typ: markdown.LexText, Val: ""},
},
[]markdown.Lexeme{
{Typ: markdown.LexText, Val: `foo\[\[not a link, but this is`},
},
)
t.Run(mut.name, test)
}
}
func Test_ObsidianWikilinks_EscapeCharacters(t *testing.T) {
for _, tc := range wikilinkWithEscapeCharacters {
mut, test := mutateTestCase(
tc,
"",
"",
[]markdown.Lexeme{
{Typ: markdown.LexText, Val: ""},
},
[]markdown.Lexeme{
{Typ: markdown.LexText, Val: ""},
},
)
t.Run(mut.name, test)
}
}
func Test_ObsidianWikilinks_LinksEndOfMultiLineInput(t *testing.T) {
for _, tc := range singleWikilink {
mut, test := mutateTestCase(
tc,
" test data please ignore.\nbling blonk more lines\nbling blong\nthis is a",
"",
[]markdown.Lexeme{
{Typ: markdown.LexText, Val: " test data please ignore.\n"},
{Typ: markdown.LexText, Val: "bling blonk more lines\n"},
{Typ: markdown.LexText, Val: "bling blong\n"},
{Typ: markdown.LexText, Val: "this is a"},
},
[]markdown.Lexeme{
{Typ: markdown.LexText, Val: ""},
},
)
t.Run(mut.name, test)
}
}
func Test_ObsidianWikilinks_LinksStartOfMultiLineInput(t *testing.T) {
for _, tc := range singleWikilink {
mut, test := mutateTestCase(
tc,
"",
" test data please ignore.\nbling blonk more lines\nbling blong\nthis is a",
[]markdown.Lexeme{
{Typ: markdown.LexText, Val: ""},
},
[]markdown.Lexeme{
{Typ: markdown.LexText, Val: " test data please ignore.\n"},
{Typ: markdown.LexText, Val: "bling blonk more lines\n"},
{Typ: markdown.LexText, Val: "bling blong\n"},
{Typ: markdown.LexText, Val: "this is a"},
},
)
t.Run(mut.name, test)
}
}
func Test_ObsidianWikilinks_LinksStartOfInput(t *testing.T) { func Test_ObsidianWikilinks_LinksStartOfInput(t *testing.T) {
for _, tc := range testCases { for _, tc := range singleWikilink {
mut, test := mutateTestCase( mut, test := mutateTestCase(
tc, tc,
"", "",
" test data please ignore", " test data please ignore",
[]wikilink.Lexeme{ []markdown.Lexeme{
{Typ: wikilink.LexText, Val: ""}, {Typ: markdown.LexText, Val: ""},
}, },
[]wikilink.Lexeme{ []markdown.Lexeme{
{Typ: wikilink.LexText, Val: " test data please ignore"}, {Typ: markdown.LexText, Val: " test data please ignore"},
}, },
) )
t.Run(mut.name, test) t.Run(mut.name, test)
@ -228,32 +384,35 @@ func Test_ObsidianWikilinks_LinksStartOfInput(t *testing.T) {
} }
func Test_ObsidianWikilinks_LinksEndOfInput(t *testing.T) { func Test_ObsidianWikilinks_LinksEndOfInput(t *testing.T) {
for _, tc := range testCases { for _, tc := range singleWikilink {
mut, test := mutateTestCase( mut, test := mutateTestCase(
tc, tc,
"this is a ", "this is a ",
"", "",
[]wikilink.Lexeme{ []markdown.Lexeme{
{Typ: wikilink.LexText, Val: "this is a "}, {Typ: markdown.LexText, Val: "this is a "},
},
[]markdown.Lexeme{
{Typ: markdown.LexText, Val: ""},
}, },
nil,
) )
t.Run(mut.name, test) t.Run(mut.name, test)
} }
} }
func Test_ObsidianWikilinks_Basic(t *testing.T) { func Test_ObsidianWikilinks_Basic(t *testing.T) {
// t.Parallel()
for _, tc := range testCases { for _, tc := range singleWikilink {
mut, test := mutateTestCase( mut, test := mutateTestCase(
tc, tc,
"", "",
"", "",
[]wikilink.Lexeme{ []markdown.Lexeme{
{Typ: wikilink.LexText, Val: ""}, {Typ: markdown.LexText, Val: ""},
},
[]markdown.Lexeme{
{Typ: markdown.LexText, Val: ""},
}, },
nil,
) )
t.Run(mut.name, test) t.Run(mut.name, test)
} }
@ -262,10 +421,10 @@ func Test_ObsidianWikilinks_Basic(t *testing.T) {
type tc struct { type tc struct {
name string name string
in string in string
expected []wikilink.Lexeme expected []markdown.Lexeme
} }
func mutateTestCase(tc tc, prefix, suffix string, expectedPrefix, expectedSuffix []wikilink.Lexeme) (tc, func(t *testing.T)) { func mutateTestCase(tc tc, prefix, suffix string, expectedPrefix, expectedSuffix []markdown.Lexeme) (tc, func(t *testing.T)) {
tc.in = prefix + tc.in tc.in = prefix + tc.in
tc.in = tc.in + suffix tc.in = tc.in + suffix
if expectedPrefix != nil { if expectedPrefix != nil {
@ -276,32 +435,8 @@ func mutateTestCase(tc tc, prefix, suffix string, expectedPrefix, expectedSuffix
} }
return tc, func(t *testing.T) { return tc, func(t *testing.T) {
// t.Parallel() l := markdown.Lex("testLexer", tc.in, zapcore.WarnLevel)
l := wikilink.Lex("testLexer", tc.in)
defer l.L.Sync() defer l.L.Sync()
if len(tc.expected) != len(l.Items) { assert.Equal(t, tc.expected, l.Items, "token stream mismatch")
t.Logf("expected %d tokens, got %d\n", len(tc.expected), len(l.Items))
t.Logf("expected items: %#v\n", tc.expected)
t.Logf("raw items: %#v\n", l.Items)
t.Fail()
return
}
for i, e := range tc.expected {
n := l.Items[i]
if e.Typ != n.Typ {
t.Logf("expected Type %s, received %s", e.Typ.String(), n.Typ.String())
t.Fail()
return
}
if e.Val != n.Val {
t.Logf("expected Value %q, received %q", e.Val, n.Val)
t.Fail()
return
}
}
} }
} }

@ -1,275 +0,0 @@
mkdir -p "/home/ndumas/work/wikilink-parser/reports"
mkdir -p "/home/ndumas/work/wikilink-parser/dist"
go clean code.ndumas.com/ndumas/wikilink-parser
rm -vrf "/home/ndumas/work/wikilink-parser/dist"/*
rm -vf "/home/ndumas/work/wikilink-parser/reports"/*
removed '/home/ndumas/work/wikilink-parser/reports/test.out'
go get -d -t code.ndumas.com/ndumas/wikilink-parser/...
go install golang.org/x/tools/cmd/stringer@latest
go generate
go test -race -v -tags "release" $(go list "code.ndumas.com/ndumas/wikilink-parser/..." | grep -v /vendor/) | tee "/home/ndumas/work/wikilink-parser/reports/test.out"
=== RUN Test_Lexer
=== RUN Test_Lexer/wikilink
2023/07/01 18:25:42 lexText
2023/07/01 18:25:42 lexOpenLink
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":2,"width":0,"item":"ItemOpenLink:\"[[\""}
2023/07/01 18:25:42 lexIdent
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":10,"width":1,"item":"ItemIdent:\"wikilink\""}
2023/07/01 18:25:42 lexCloseLink
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":12,"width":1,"item":"ItemCloseLink:\"]]\""}
=== RUN Test_Lexer/wikilink|display_name
2023/07/01 18:25:42 lexText
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":12,"width":0,"item":"ItemText:\"\""}
2023/07/01 18:25:42 lexText
2023/07/01 18:25:42 lexOpenLink
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":2,"width":0,"item":"ItemOpenLink:\"[[\""}
2023/07/01 18:25:42 lexIdent
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":10,"width":1,"item":"ItemIdent:\"wikilink\""}
2023/07/01 18:25:42 lexAlias
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":11,"width":1,"item":"ItemAlias:\"|\""}
2023/07/01 18:25:42 lexIdent
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":23,"width":1,"item":"ItemIdent:\"display name\""}
2023/07/01 18:25:42 lexCloseLink
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":25,"width":1,"item":"ItemCloseLink:\"]]\""}
=== RUN Test_Lexer/wikilink|display_name|second_pipe
2023/07/01 18:25:42 lexText
2023/07/01 18:25:42 lexOpenLink
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":2,"width":0,"item":"ItemOpenLink:\"[[\""}
2023/07/01 18:25:42 lexIdent
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":10,"width":1,"item":"ItemIdent:\"wikilink\""}
2023/07/01 18:25:42 lexAlias
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":11,"width":1,"item":"ItemAlias:\"|\""}
2023/07/01 18:25:42 lexIdent
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":23,"width":1,"item":"ItemIdent:\"display name\""}
2023/07/01 18:25:42 lexText
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":25,"width":0,"item":"ItemText:\"\""}
2023/07/01 18:25:42 lexAlias
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":24,"width":1,"item":"ItemAlias:\"|\""}
2023/07/01 18:25:42 lexIdent
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":35,"width":1,"item":"ItemIdent:\"second pipe\""}
2023/07/01 18:25:42 lexCloseLink
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":37,"width":1,"item":"ItemCloseLink:\"]]\""}
=== RUN Test_Lexer/wikilink_with_numeric_alias|420|second_pipe
2023/07/01 18:25:42 lexText
2023/07/01 18:25:42 lexOpenLink
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":2,"width":0,"item":"ItemOpenLink:\"[[\""}
2023/07/01 18:25:42 lexIdent
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":10,"width":1,"item":"ItemIdent:\"wikilink\""}
2023/07/01 18:25:42 lexAlias
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":11,"width":1,"item":"ItemAlias:\"|\""}
2023/07/01 18:25:42 lexIdent
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":14,"width":1,"item":"ItemIdent:\"420\""}
2023/07/01 18:25:42 lexAlias
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":15,"width":1,"item":"ItemAlias:\"|\""}
2023/07/01 18:25:42 lexIdent
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":26,"width":1,"item":"ItemIdent:\"second pipe\""}
2023/07/01 18:25:42 lexCloseLink
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":28,"width":1,"item":"ItemCloseLink:\"]]\""}
=== RUN Test_Lexer/wikilink_with_spaces_in_filename
2023/07/01 18:25:42 lexText
2023/07/01 18:25:42 lexOpenLink
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":2,"width":0,"item":"ItemOpenLink:\"[[\""}
2023/07/01 18:25:42 lexIdent
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":17,"width":1,"item":"ItemIdent:\"wikilink spaces\""}
2023/07/01 18:25:42 lexCloseLink
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":19,"width":1,"item":"ItemCloseLink:\"]]\""}
=== RUN Test_Lexer/#heading
2023/07/01 18:25:42 lexText
2023/07/01 18:25:42 lexOpenLink
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":2,"width":0,"item":"ItemOpenLink:\"[[\""}
2023/07/01 18:25:42 lexIdent
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":10,"width":1,"item":"ItemIdent:\"#heading\""}
lexer_test.go:100: expected Type ItemHeading, received ItemIdent
lexer_test.go:105: expected Value "#", received "#heading"
2023/07/01 18:25:42 lexCloseLink
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":12,"width":1,"item":"ItemCloseLink:\"]]\""}
lexer_test.go:100: expected Type ItemIdent, received ItemCloseLink
lexer_test.go:105: expected Value "heading", received "]]"
2023/07/01 18:25:42 lexText
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":12,"width":0,"item":"ItemText:\"\""}
lexer_test.go:100: expected Type ItemCloseLink, received ItemText
lexer_test.go:105: expected Value "]]", received ""
=== RUN Test_Lexer/wikilink#heading
2023/07/01 18:25:42 lexText
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":37,"width":0,"item":"ItemText:\"\""}
2023/07/01 18:25:42 lexText
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":28,"width":0,"item":"ItemText:\"\""}
2023/07/01 18:25:42 lexText
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":19,"width":0,"item":"ItemText:\"\""}
2023/07/01 18:25:42 lexText
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":12,"width":0,"item":"ItemText:\"\""}
2023/07/01 18:25:42 lexText
2023/07/01 18:25:42 lexOpenLink
2023/07/01 18:25:42 lexText
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":2,"width":0,"item":"ItemOpenLink:\"[[\""}
2023/07/01 18:25:42 lexIdent
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":10,"width":1,"item":"ItemIdent:\"wikilink\""}
==================
WARNING: DATA RACE
Read at 0x00c0000822b0 by goroutine 21:
code.ndumas.com/ndumas/wikilink-parser.lexText()
/home/ndumas/work/wikilink-parser/states.go:82 +0x69
code.ndumas.com/ndumas/wikilink-parser.(*Lexer).run()
/home/ndumas/work/wikilink-parser/lexer.go:149 +0x3a
code.ndumas.com/ndumas/wikilink-parser.Lex.func1()
/home/ndumas/work/wikilink-parser/lexer.go:45 +0x39
Previous write at 0x00c0000822b0 by goroutine 20:
code.ndumas.com/ndumas/wikilink-parser.lexOpenLink()
/home/ndumas/work/wikilink-parser/states.go:96 +0x86
code.ndumas.com/ndumas/wikilink-parser.(*Lexer).NextItem()
/home/ndumas/work/wikilink-parser/lexer.go:64 +0xd5
code.ndumas.com/ndumas/wikilink-parser_test.Test_Lexer.func1()
/home/ndumas/work/wikilink-parser/lexer_test.go:98 +0x18c
testing.tRunner()
/usr/lib/golang/src/testing/testing.go:1446 +0x216
testing.(*T).Run.func1()
/usr/lib/golang/src/testing/testing.go:1493 +0x47
Goroutine 21 (running) created at:
code.ndumas.com/ndumas/wikilink-parser.Lex()
/home/ndumas/work/wikilink-parser/lexer.go:45 +0x41a
code.ndumas.com/ndumas/wikilink-parser_test.Test_Lexer.func1()
/home/ndumas/work/wikilink-parser/lexer_test.go:95 +0x69
testing.tRunner()
/usr/lib/golang/src/testing/testing.go:1446 +0x216
testing.(*T).Run.func1()
/usr/lib/golang/src/testing/testing.go:1493 +0x47
Goroutine 20 (running) created at:
testing.(*T).Run()
/usr/lib/golang/src/testing/testing.go:1493 +0x75d
code.ndumas.com/ndumas/wikilink-parser_test.Test_Lexer()
/home/ndumas/work/wikilink-parser/lexer_test.go:93 +0x1086
testing.tRunner()
/usr/lib/golang/src/testing/testing.go:1446 +0x216
testing.(*T).Run.func1()
/usr/lib/golang/src/testing/testing.go:1493 +0x47
==================
==================
WARNING: DATA RACE
Write at 0x00c0000822b8 by goroutine 21:
code.ndumas.com/ndumas/wikilink-parser.(*Lexer).next()
/home/ndumas/work/wikilink-parser/lexer.go:142 +0xe8
code.ndumas.com/ndumas/wikilink-parser.lexText()
/home/ndumas/work/wikilink-parser/states.go:85 +0xc4
code.ndumas.com/ndumas/wikilink-parser.(*Lexer).run()
/home/ndumas/work/wikilink-parser/lexer.go:149 +0x3a
code.ndumas.com/ndumas/wikilink-parser.Lex.func1()
/home/ndumas/work/wikilink-parser/lexer.go:45 +0x39
Previous write at 0x00c0000822b8 by goroutine 20:
code.ndumas.com/ndumas/wikilink-parser.(*Lexer).next()
/home/ndumas/work/wikilink-parser/lexer.go:142 +0xe8
code.ndumas.com/ndumas/wikilink-parser.lexIdent()
/home/ndumas/work/wikilink-parser/states.go:31 +0x64
code.ndumas.com/ndumas/wikilink-parser.(*Lexer).NextItem()
/home/ndumas/work/wikilink-parser/lexer.go:64 +0xd5
code.ndumas.com/ndumas/wikilink-parser_test.Test_Lexer.func1()
/home/ndumas/work/wikilink-parser/lexer_test.go:98 +0x18c
testing.tRunner()
/usr/lib/golang/src/testing/testing.go:1446 +0x216
testing.(*T).Run.func1()
/usr/lib/golang/src/testing/testing.go:1493 +0x47
Goroutine 21 (running) created at:
code.ndumas.com/ndumas/wikilink-parser.Lex()
/home/ndumas/work/wikilink-parser/lexer.go:45 +0x41a
code.ndumas.com/ndumas/wikilink-parser_test.Test_Lexer.func1()
/home/ndumas/work/wikilink-parser/lexer_test.go:95 +0x69
testing.tRunner()
/usr/lib/golang/src/testing/testing.go:1446 +0x216
testing.(*T).Run.func1()
/usr/lib/golang/src/testing/testing.go:1493 +0x47
Goroutine 20 (running) created at:
testing.(*T).Run()
/usr/lib/golang/src/testing/testing.go:1493 +0x75d
code.ndumas.com/ndumas/wikilink-parser_test.Test_Lexer()
/home/ndumas/work/wikilink-parser/lexer_test.go:93 +0x1086
testing.tRunner()
/usr/lib/golang/src/testing/testing.go:1446 +0x216
testing.(*T).Run.func1()
/usr/lib/golang/src/testing/testing.go:1493 +0x47
==================
==================
WARNING: DATA RACE
Read at 0x00c0000822a8 by goroutine 21:
code.ndumas.com/ndumas/wikilink-parser.(*Lexer).emit()
/home/ndumas/work/wikilink-parser/lexer.go:109 +0x64
code.ndumas.com/ndumas/wikilink-parser.lexText()
/home/ndumas/work/wikilink-parser/states.go:88 +0xf4
code.ndumas.com/ndumas/wikilink-parser.(*Lexer).run()
/home/ndumas/work/wikilink-parser/lexer.go:149 +0x3a
code.ndumas.com/ndumas/wikilink-parser.Lex.func1()
/home/ndumas/work/wikilink-parser/lexer.go:45 +0x39
Previous write at 0x00c0000822a8 by goroutine 20:
code.ndumas.com/ndumas/wikilink-parser.(*Lexer).emit()
/home/ndumas/work/wikilink-parser/lexer.go:119 +0x4cf
code.ndumas.com/ndumas/wikilink-parser.lexOpenLink()
/home/ndumas/work/wikilink-parser/states.go:97 +0xa4
code.ndumas.com/ndumas/wikilink-parser.(*Lexer).NextItem()
/home/ndumas/work/wikilink-parser/lexer.go:64 +0xd5
code.ndumas.com/ndumas/wikilink-parser_test.Test_Lexer.func1()
/home/ndumas/work/wikilink-parser/lexer_test.go:98 +0x18c
testing.tRunner()
/usr/lib/golang/src/testing/testing.go:1446 +0x216
testing.(*T).Run.func1()
/usr/lib/golang/src/testing/testing.go:1493 +0x47
Goroutine 21 (running) created at:
code.ndumas.com/ndumas/wikilink-parser.Lex()
/home/ndumas/work/wikilink-parser/lexer.go:45 +0x41a
code.ndumas.com/ndumas/wikilink-parser_test.Test_Lexer.func1()
/home/ndumas/work/wikilink-parser/lexer_test.go:95 +0x69
testing.tRunner()
/usr/lib/golang/src/testing/testing.go:1446 +0x216
testing.(*T).Run.func1()
/usr/lib/golang/src/testing/testing.go:1493 +0x47
Goroutine 20 (running) created at:
testing.(*T).Run()
/usr/lib/golang/src/testing/testing.go:1493 +0x75d
code.ndumas.com/ndumas/wikilink-parser_test.Test_Lexer()
/home/ndumas/work/wikilink-parser/lexer_test.go:93 +0x1086
testing.tRunner()
/usr/lib/golang/src/testing/testing.go:1446 +0x216
testing.(*T).Run.func1()
/usr/lib/golang/src/testing/testing.go:1493 +0x47
==================
2023/07/01 18:25:42 lexHeading
{"level":"debug","logger":"lexer.emit","msg":"emitting item","pos":20,"width":0,"item":"ItemText:\"wikilink#heading]]\""}
testing.go:1319: race detected during execution of test
--- FAIL: Test_Lexer (0.01s)
--- PASS: Test_Lexer/wikilink (0.00s)
--- PASS: Test_Lexer/wikilink|display_name (0.00s)
--- PASS: Test_Lexer/wikilink|display_name|second_pipe (0.00s)
--- PASS: Test_Lexer/wikilink_with_numeric_alias|420|second_pipe (0.00s)
--- PASS: Test_Lexer/wikilink_with_spaces_in_filename (0.00s)
--- FAIL: Test_Lexer/#heading (0.00s)
--- FAIL: Test_Lexer/wikilink#heading (0.00s)
panic: runtime error: slice bounds out of range [:21] with length 20 [recovered]
panic: runtime error: slice bounds out of range [:21] with length 20
goroutine 34 [running]:
testing.tRunner.func1.2({0x6e8160, 0xc0000e20a8})
/usr/lib/golang/src/testing/testing.go:1396 +0x372
testing.tRunner.func1()
/usr/lib/golang/src/testing/testing.go:1399 +0x5f0
panic({0x6e8160, 0xc0000e20a8})
/usr/lib/golang/src/runtime/panic.go:890 +0x262
code.ndumas.com/ndumas/wikilink-parser.(*Lexer).emit(0xc000082280, 0x5)
/home/ndumas/work/wikilink-parser/lexer.go:109 +0x506
code.ndumas.com/ndumas/wikilink-parser.lexHeading(0xc000082280)
/home/ndumas/work/wikilink-parser/states.go:58 +0xa5
code.ndumas.com/ndumas/wikilink-parser.(*Lexer).NextItem(0xc000082280)
/home/ndumas/work/wikilink-parser/lexer.go:64 +0xd6
code.ndumas.com/ndumas/wikilink-parser_test.Test_Lexer.func1(0xc000190b60)
/home/ndumas/work/wikilink-parser/lexer_test.go:98 +0x18d
testing.tRunner(0xc000190b60, 0xc00019cb40)
/usr/lib/golang/src/testing/testing.go:1446 +0x217
created by testing.(*T).Run
/usr/lib/golang/src/testing/testing.go:1493 +0x75e
FAIL code.ndumas.com/ndumas/wikilink-parser 0.024s
FAIL

@ -1,4 +1,4 @@
package wikilink package markdown
import ( import (
"strings" "strings"
@ -26,15 +26,20 @@ func isBlockRef(s string) bool {
return strings.HasPrefix(s, BlockRef) return strings.HasPrefix(s, BlockRef)
} }
func isEscape(s string) bool {
return strings.HasPrefix(s, EscapeChar)
}
func lexIdent(l *Lexer) stateFn { func lexIdent(l *Lexer) stateFn {
L := l.L.Named("lexIdent")
for { for {
L := l.L.Named("lexIdent")
s := l.input[l.GetPos():] s := l.input[l.GetPos():]
L.Debug("stepping through lexIdent") L.Debug("stepping through lexIdent")
if s[0] == '\\' { // i think this will handle escape characters?
break
}
switch { switch {
case isEscape(s):
l.next()
l.next()
continue
case isCloseLink(s): case isCloseLink(s):
L.Debug("found CloseLink") L.Debug("found CloseLink")
l.emit(LexIdent) l.emit(LexIdent)
@ -85,16 +90,27 @@ func lexAlias(l *Lexer) stateFn {
func lexText(l *Lexer) stateFn { func lexText(l *Lexer) stateFn {
L := l.L.Named("lexText") L := l.L.Named("lexText")
for { for {
if isOpenLink(l.input[l.GetPos():]) { s := l.input[l.GetPos():]
L.Debug("stepping through lexText")
switch {
case isEscape(s):
l.next()
l.next()
continue
case isOpenLink(s):
L.Debug("found openLink") L.Debug("found openLink")
l.emit(LexText) l.emit(LexText)
return lexOpenLink return lexOpenLink
} }
r := l.next() r := l.next()
switch { switch {
case r == EOF || r == '\n': case r == EOF:
l.emit(LexText) l.emit(LexText)
return nil return nil
case r == '\n':
l.emit(LexText)
return lexText
} }
} }
} }

@ -1,4 +1,4 @@
package wikilink package markdown
import ( import (
// "log" // "log"

@ -1,9 +1,9 @@
package wikilink_test package markdown_test
import ( import (
"testing" "testing"
"code.ndumas.com/ndumas/wikilink-parser" "code.ndumas.com/ndumas/obsidian-markdown"
) )
func _Test_Wikilink_Parsing(t *testing.T) { func _Test_Wikilink_Parsing(t *testing.T) {
@ -36,7 +36,7 @@ func _Test_Wikilink_Parsing(t *testing.T) {
tc := tc tc := tc
t.Run(tc.name, func(t *testing.T) { t.Run(tc.name, func(t *testing.T) {
t.Parallel() t.Parallel()
out := wikilink.Extract(tc.in) out := markdown.Extract(tc.in)
if out.Link != tc.link { if out.Link != tc.link {
t.Logf("got %#v\n", out) t.Logf("got %#v\n", out)
t.Fail() t.Fail()
@ -50,7 +50,7 @@ func _Test_Wikilink_Parsing(t *testing.T) {
tc := tc tc := tc
t.Run(tc.name, func(t *testing.T) { t.Run(tc.name, func(t *testing.T) {
t.Parallel() t.Parallel()
out := wikilink.Extract(tc.in) out := markdown.Extract(tc.in)
if out.Alias != tc.alias { if out.Alias != tc.alias {
t.Logf("got %#v\n", out) t.Logf("got %#v\n", out)
t.Fail() t.Fail()
@ -64,7 +64,7 @@ func _Test_Wikilink_Parsing(t *testing.T) {
tc := tc tc := tc
t.Run(tc.name, func(t *testing.T) { t.Run(tc.name, func(t *testing.T) {
t.Parallel() t.Parallel()
out := wikilink.Extract(tc.in) out := markdown.Extract(tc.in)
if out.Fragment != tc.fragment { if out.Fragment != tc.fragment {
t.Logf("got %#v\n", out) t.Logf("got %#v\n", out)
t.Fail() t.Fail()

Loading…
Cancel
Save