From e5f7bb8f98dba1ab512d6816cd1c58821067ddd5 Mon Sep 17 00:00:00 2001 From: Nick Dumas Date: Tue, 7 Feb 2023 02:02:31 +0000 Subject: [PATCH] removing lexer --- cmd/main.go | 14 ------ lexer/lexer.go | 116 -------------------------------------------- lexer/lexer_test.go | 50 ------------------- 3 files changed, 180 deletions(-) delete mode 100644 cmd/main.go delete mode 100644 lexer/lexer.go delete mode 100644 lexer/lexer_test.go diff --git a/cmd/main.go b/cmd/main.go deleted file mode 100644 index 04bf083..0000000 --- a/cmd/main.go +++ /dev/null @@ -1,14 +0,0 @@ -package main - -import ( - "log" - - "github.com/therealfakemoot/wikilinks-parser/lexer" -) - -func main() { - _, tokens := lexer.Lex(`[[foo/bar/butts.png]]`) - for t := range tokens { - log.Printf("%#v\n", t) - } -} diff --git a/lexer/lexer.go b/lexer/lexer.go deleted file mode 100644 index 2e6d588..0000000 --- a/lexer/lexer.go +++ /dev/null @@ -1,116 +0,0 @@ -package lexer - -import ( - "bufio" - // "log" - "strings" -) - -type Token int - -const ( - LinkBeginToken = iota - LinkEndToken - LinkTextToken - LinkAliasBeginToken - LinkAliasTextToken - LinkSectionBeginToken - LinkSectionTextToken - LinkBlockBeginToken - LinkBlockTextToken -) - -/* -var ( - LinkBeginToken = Token{Raw: "[["} - LinkEndToken = Token{Raw: "]]"} - LinkTextToken = Token{} - LinkAliasBeginToken = Token{Raw: "|"} - LinkAliasTextToken = Token{} - LinkSectionBeginToken = Token{Raw: "#"} - LinkSectionTextToken = Token{} - LinkBlockBeginToken = Token{Raw: "^"} - LinkBlockTextToken = Token{} -) -*/ - -type Lexer struct { - input []string - pos int - link bool - Tokens chan Token -} - -func Lex(input string) (*Lexer, chan Token) { - l := &Lexer{ - input: make([]string, 0), - pos: 0, - Tokens: make(chan Token), - } - - scanner := bufio.NewScanner(strings.NewReader(input)) - scanner.Split(bufio.ScanRunes) - - for scanner.Scan() { - l.input = append(l.input, scanner.Text()) - } - - go l.Run() - return l, l.Tokens -} - -func (l *Lexer) Next() string { - if l.pos > len(l.input)-1 { - return "eof" - } - r := l.input[l.pos] - l.pos++ - return r -} - -func (l *Lexer) Backup() { - l.pos-- -} - -func (l *Lexer) Peek() string { - r := l.Next() - l.Backup() - return r -} - -func (l *Lexer) Emit(t Token) { - // log.Printf("token emitted: %#v\n", t) - l.Tokens <- t -} - -func (l *Lexer) Run() { - defer close(l.Tokens) - for { - s := l.Next() - if s == "eof" { - return - } - - if s == "[" && l.Peek() == "[" { - l.link = true - l.Next() - l.Emit(LinkBeginToken) - - switch s { - case "|": - l.Emit(LinkAliasBeginToken) - case "#": - l.Emit(LinkSectionBeginToken) - case "^": - l.Emit(LinkBlockBeginToken) - } - - if s == "]" && l.Peek() == "]" { - l.link = false - l.Next() - l.Emit(LinkEndToken) - } - } - - } -} diff --git a/lexer/lexer_test.go b/lexer/lexer_test.go deleted file mode 100644 index 5ab4817..0000000 --- a/lexer/lexer_test.go +++ /dev/null @@ -1,50 +0,0 @@ -package lexer - -import ( - "testing" -) - -func Test_LexerSimple(t *testing.T) { - tt := []string{ - "[[", - "]]", - "[[foo]]", - "[[foo]]", - "[[foo|bar]]", - } - - for _, tc := range tt { - t.Run(tc, func(t *testing.T) { - t.Logf("checking %q", tc) - _, tokens := Lex(tc) - for tok := range tokens { - t.Logf("found token: %#v", tok) - } - }) - } -} - -/* - -func Test_LexerFull(t *testing.T) { - tt := []string{ - `[[Regular Link]]`, - `![[Transcluded Link]]`, - `[[Regular Link|Alias]]`, - `[[Regular Link#Subsection of page]]`, - `[[Regular Link^link to block]]`, - `[[Regular Link#Subsection of page|Alias]]`, - `[[Regular Link^link to block|Alias]]`, - `[[Regular Link\|Alias]]`, - `[[Regular Link^link to block\|Alias]]`, - `[[Regular Link#Subsection of page\|Alias]]`, - } - - for _, tc := range tt { - t.Run(tc, func(t *testing.T) { - t.Fail() - }) - } - -} -*/