gotta commit
						commit
						c56128156e
					
				| @ -0,0 +1,15 @@ | ||||
| package main | ||||
| 
 | ||||
| import ( | ||||
| 	"log" | ||||
| 
 | ||||
| 	"github.com/therealfakemoot/obsidian-parser" | ||||
| ) | ||||
| 
 | ||||
| func main() { | ||||
| 	l, tokens := lexer.Lex("[[foo/bar/butts.png]]") | ||||
| 	go l.Run() | ||||
| 	for t := range tokens { | ||||
| 		log.Printf("%#v\n", t) | ||||
| 	} | ||||
| } | ||||
| @ -0,0 +1,3 @@ | ||||
| module github.com/therealfakemoot/obsidian-parser | ||||
| 
 | ||||
| go 1.19 | ||||
| @ -0,0 +1,10 @@ | ||||
| At this point, I've got a fully automated publishing pipeline. As soon as a commit gets pushed to my blog repository, Drone jumps into action and runs a fresh Hugo build. The process is far from perfect, though. | ||||
| 
 | ||||
| ![[Resources/attachments/obsidian-pipeline-screenshot.png]] | ||||
| [[Resources/attachments/obsidian-pipeline-screenshot.png]] | ||||
| 
 | ||||
| For some reason, `audit2allow` was emitting invalid output as the result of something in my audit log. I never traced it down. Whatever was causing this wasn't related to my `drone` setup since I got everything running without fixing it. | ||||
| 
 | ||||
| [[Resources/notes/202203261920-faerun-atropus|black moon]] ![[Resources/attachments/obsidian-pipeline-screenshot.png]] | ||||
| [[Resources/notes/202203261920-faerun-atropus|black moon]]![[Resources/attachments/obsidian-pipeline-screenshot.png]] | ||||
| ![[Resources/attachments/obsidian-pipeline-screenshot.png]][[Resources/attachments/obsidian-pipeline-screenshot.png]] | ||||
| @ -0,0 +1,92 @@ | ||||
| package lexer | ||||
| 
 | ||||
| import ( | ||||
| 	"bufio" | ||||
| 	// "log"
 | ||||
| 	"strings" | ||||
| ) | ||||
| 
 | ||||
| type Token struct { | ||||
| 	Raw string | ||||
| } | ||||
| 
 | ||||
| var ( | ||||
| 	LinkBeginToken        = Token{Raw: "[["} | ||||
| 	LinkEndToken          = Token{Raw: "]]"} | ||||
| 	LinkTextToken         = Token{} | ||||
| 	LinkAliasBeginToken   = Token{Raw: "|"} | ||||
| 	LinkAliasTextToken    = Token{} | ||||
| 	LinkSectionBeginToken = Token{Raw: "#"} | ||||
| 	LinkSectionTextToken  = Token{} | ||||
| 	LinkBlockBeginToken   = Token{Raw: "^"} | ||||
| 	LinkBlockTextToken    = Token{} | ||||
| ) | ||||
| 
 | ||||
| type Lexer struct { | ||||
| 	input  []string | ||||
| 	pos    int | ||||
| 	Tokens chan Token | ||||
| } | ||||
| 
 | ||||
| func Lex(input string) (*Lexer, chan Token) { | ||||
| 	l := &Lexer{ | ||||
| 		input:  make([]string, 0), | ||||
| 		pos:    0, | ||||
| 		Tokens: make(chan Token), | ||||
| 	} | ||||
| 
 | ||||
| 	scanner := bufio.NewScanner(strings.NewReader(input)) | ||||
| 	scanner.Split(bufio.ScanRunes) | ||||
| 
 | ||||
| 	for scanner.Scan() { | ||||
| 		l.input = append(l.input, scanner.Text()) | ||||
| 	} | ||||
| 
 | ||||
| 	go l.Run() | ||||
| 	return l, l.Tokens | ||||
| } | ||||
| 
 | ||||
| func (l *Lexer) Next() string { | ||||
| 	if l.pos > len(l.input)-1 { | ||||
| 		return "eof" | ||||
| 	} | ||||
| 	r := l.input[l.pos] | ||||
| 	l.pos++ | ||||
| 	return r | ||||
| } | ||||
| 
 | ||||
| func (l *Lexer) Backup() { | ||||
| 	l.pos-- | ||||
| } | ||||
| 
 | ||||
| func (l *Lexer) Peek() string { | ||||
| 	r := l.Next() | ||||
| 	l.Backup() | ||||
| 	return r | ||||
| } | ||||
| 
 | ||||
| func (l *Lexer) Emit(t Token) { | ||||
| 	// log.Printf("token emitted: %#v\n", t)
 | ||||
| 	l.Tokens <- t | ||||
| } | ||||
| 
 | ||||
| func (l *Lexer) Run() { | ||||
| 	defer close(l.Tokens) | ||||
| 	for { | ||||
| 		s := l.Next() | ||||
| 		if s == "eof" { | ||||
| 			return | ||||
| 		} | ||||
| 
 | ||||
| 		if s == "[" && l.Peek() == "[" { | ||||
| 			l.Next() | ||||
| 			l.Emit(LinkBeginToken) | ||||
| 		} | ||||
| 
 | ||||
| 		if s == "]" && l.Peek() == "]" { | ||||
| 			l.Next() | ||||
| 			l.Emit(LinkBeginToken) | ||||
| 		} | ||||
| 
 | ||||
| 	} | ||||
| } | ||||
| @ -0,0 +1,29 @@ | ||||
| package lexer | ||||
| 
 | ||||
| import ( | ||||
| 	"testing" | ||||
| ) | ||||
| 
 | ||||
| func Test_Lexer(t *testing.T) { | ||||
| 	tt := []string{ | ||||
| 		"[[Regular Link]]", | ||||
| 		/* | ||||
| 			"![[Transcluded Link]]", | ||||
| 			"[[Regular Link|Alias]]", | ||||
| 			"[[Regular Link#Subsection of page]]", | ||||
| 			"[[Regular Link^link to block]]", | ||||
| 			"[[Regular Link#Subsection of page|Alias]]", | ||||
| 			"[[Regular Link^link to block|Alias]]", | ||||
| 			"[[Regular Link\|Alias]]", | ||||
| 			"[[Regular Link^link to block\|Alias]]", | ||||
| 			"[[Regular Link#Subsection of page\|Alias]]", | ||||
| 		*/ | ||||
| 	} | ||||
| 
 | ||||
| 	for _, tc := range tt { | ||||
| 		t.Run(tc, func(t *testing.T) { | ||||
| 			t.Fail() | ||||
| 		}) | ||||
| 	} | ||||
| 
 | ||||
| } | ||||
| @ -0,0 +1,4 @@ | ||||
| #! /bin/sh | ||||
| 
 | ||||
| sed "s/!?\]\]/\)/g" $1 | ||||
| sed "s/!?\[\[/\[\]\(/g" $1 | ||||
| @ -0,0 +1,10 @@ | ||||
| [[Regular Link]] | ||||
| ![[Transcluded Link]] | ||||
| [[Regular Link|Alias]] | ||||
| [[Regular Link#Subsection of page]] | ||||
| [[Regular Link^link to block]] | ||||
| [[Regular Link#Subsection of page|Alias]] | ||||
| [[Regular Link^link to block|Alias]] | ||||
| [[Regular Link\|Alias]] | ||||
| [[Regular Link^link to block\|Alias]] | ||||
| [[Regular Link#Subsection of page\|Alias]] | ||||
					Loading…
					
					
				
		Reference in New Issue