Compare commits

...

31 Commits
v0.1.1 ... main

Author SHA1 Message Date
Nick Dumas e95c747899 restructuring subcommands for hugo tooling
continuous-integration/drone/tag Build is passing Details
2 years ago
Nick Dumas add10c2d1a Hugo bundle directory scaffolding
continuous-integration/drone/tag Build is passing Details
2 years ago
Nick Dumas 0dda41e3f8 Namespacing your viper binds is important 2 years ago
Nick Dumas b631b717b6 debugging viper 2 years ago
Nick Dumas 93fa411750 saving my work 2 years ago
Nick Dumas 4f76a1d571 First step: inventory posts in provided directory 2 years ago
Nick Dumas 94701a01b2 Cleaning up dockerfile 2 years ago
Nick Dumas 3252693509 more hugo command drafting 2 years ago
Nick Dumas 7271830829 drafting the hugo subcommand 2 years ago
Nick Dumas cff10a36ca starting on hugo command 2 years ago
Nick Dumas f479ba4d4c Proper error handling for the walkfunc
continuous-integration/drone/tag Build is passing Details
2 years ago
Nick Dumas 72d5ed9d53 This is the happy case.
continuous-integration/drone/tag Build is failing Details
2 years ago
Nick Dumas 56f806a1f5 No more contextless error returns
continuous-integration/drone/tag Build is passing Details
2 years ago
Nick Dumas 8de6452c64 refactoring makefile, adding error detail 2 years ago
Nick Dumas 5813113d9f Not sure why I changed this return statement
continuous-integration/drone/tag Build is passing Details
2 years ago
Nick Dumas 3036a6a717 Cleaning up copy command
continuous-integration/drone/tag Build is passing Details
2 years ago
Nick Dumas 84aa6ce9ca Now the alpine build injects build info
continuous-integration/drone/tag Build is passing Details
2 years ago
Nick Dumas 755c213b91 Alpine build works, without version injection 2 years ago
Nick Dumas 2524fd772a more convenience tools for releasing 2 years ago
Nick Dumas 78fb258b5d Reordering target definitions 2 years ago
Nick Dumas d8d58acc36 Making version tagging easier 2 years ago
Nick Dumas 869ca7b52a adding a positive test case
continuous-integration/drone/tag Build is passing Details
2 years ago
Nick Dumas 06d7bc3c56 Only build docker images on tags 2 years ago
Nick Dumas 5c48fe6b22 Unit tests and linting cleanup 2 years ago
Nick Dumas 13e1abd65a more condensing
continuous-integration/drone/push Build is passing Details
2 years ago
Nick Dumas 0dd2d0dae3 condensing more stuff together for sanity/cleanup 2 years ago
Nick Dumas 60e96cce18 Now that's what i call linting 2 years ago
Nick Dumas 54e60b113e also working on unit tests 2 years ago
Nick Dumas 1dcd39afa3 Starting big time cleanup 2 years ago
Nick Dumas 230d8b2160 Adding some failing tests 2 years ago
Nick Dumas 0aec42aceb gotta run setup for the tests
continuous-integration/drone/tag Build is passing Details
continuous-integration/drone/push Build is passing Details
2 years ago

@ -36,3 +36,8 @@ services:
volumes: volumes:
- name: dockersock - name: dockersock
temp: {} temp: {}
trigger:
ref:
include:
- refs/tags/*

@ -21,7 +21,7 @@ jobs:
with: with:
go-version: ">=1.20.0" go-version: ">=1.20.0"
- name: Test - name: Test
run: make report run: make setup && make check
goreleaser: goreleaser:
needs: test needs: test

@ -1,10 +1,37 @@
FROM golang:latest AS builder # syntax=docker/dockerfile:1
ADD . /opt/obp
WORKDIR /opt/obp # Build the application from source
FROM golang:latest AS build-stage
WORKDIR /app
COPY go.mod go.sum ./
RUN go mod download
COPY . ./
# This definitely works
# RUN CGO_ENABLED=0 GOOS=linux go build -o obp ./cmd/obp/
RUN make build-alpine RUN make build-alpine
FROM alpine:latest
RUN apk --no-cache add ca-certificates # Deploy the application binary into a lean image
ARG VERSION=* FROM alpine:latest AS build-release-stage
COPY --from=builder /opt/obp/dist/obp-$VERSION-alpine_amd64/obp /bin/obp
WORKDIR /
ARG VERSION=version
COPY --from=build-stage /app/dist/*-alpine/obp /bin/
RUN chmod +x /bin/obp RUN chmod +x /bin/obp
ARG USER=default
ENV HOME /home/$USER
RUN apk add --update sudo
RUN adduser -D $USER \
&& echo "$USER ALL=(ALL) NOPASSWD: ALL" > /etc/sudoers.d/$USER \
&& chmod 0440 /etc/sudoers.d/$USER
USER $USER
WORKDIR $HOME

@ -1,28 +1,11 @@
DOCKER_CMD=docker --config ~/.docker/
.PHONY: docker
docker: docker-image docker-push
.PHONY: docker-push
docker-push:
$(DOCKER_CMD) tag code.ndumas.com/ndumas/obsidian-pipeline:$(VERSION) code.ndumas.com/ndumas/obsidian-pipeline:latest
$(DOCKER_CMD) push code.ndumas.com/ndumas/obsidian-pipeline:latest
$(DOCKER_CMD) push code.ndumas.com/ndumas/obsidian-pipeline:$(VERSION)
.PHONY: docker-image
docker-image:
$(DOCKER_CMD) build --build-arg VERSION=$(VERSION) -t code.ndumas.com/ndumas/obsidian-pipeline:$(VERSION) .
.PHONE: build-alpine
build-alpine:
CGO_ENABLED=0 go build -a -installsuffix cgo -o $(DISTDIR)/$(NAME)-$(VERSION)-alpine_amd64/obp cmd/obp/cmd/*.go
# This file is intended as a starting point for a customized makefile for a Go project. # This file is intended as a starting point for a customized makefile for a Go project.
# #
# Targets: # Targets:
# all: Format, check, build, and test the code # all: Format, check, build, and test the code
# setup: Install build/test toolchain dependencies (e.g. gox) # setup: Install build/test toolchain dependencies (e.g. gox)
# lint: Run linters against source code # lint: Run linters against source code
# bump-{major,minor,patch}: create a new semver git tag
# release-{major,minor,patch}: push a tagged release
# format: Format the source files # format: Format the source files
# build: Build the command(s) for target OS/arch combinations # build: Build the command(s) for target OS/arch combinations
# install: Install the command(s) # install: Install the command(s)
@ -58,7 +41,7 @@ build-alpine:
# Parameters # Parameters
PKG = code.ndumas.com/ndumas/obsidian-pipeline PKG = code.ndumas.com/ndumas/obsidian-pipeline
NAME = obp NAME = obsidian-pipeline
DOC = README.md LICENSE DOC = README.md LICENSE
@ -95,7 +78,8 @@ GOBUILD = gox -osarch="!darwin/386" -rebuild -gocmd="$(GOCMD)" -arch="$(ARCHES)"
GOCLEAN = $(GOCMD) clean GOCLEAN = $(GOCMD) clean
GOINSTALL = $(GOCMD) install -a -tags "$(BUILD_TAGS)" -ldflags "$(LDFLAGS)" GOINSTALL = $(GOCMD) install -a -tags "$(BUILD_TAGS)" -ldflags "$(LDFLAGS)"
GOTEST = $(GOCMD) test -v -tags "$(BUILD_TAGS)" GOTEST = $(GOCMD) test -v -tags "$(BUILD_TAGS)"
GOLINT = golangci-lint run --timeout=30s --tests DISABLED_LINTERS = varnamelen,interfacer,ifshort,exhaustivestruct,maligned,varcheck,scopelint,structcheck,deadcode,nosnakecase,golint,depguard
GOLINT = golangci-lint run --enable-all --disable "$(DISABLED_LINTERS)" --timeout=30s --tests
GODEP = $(GOCMD) get -d -t GODEP = $(GOCMD) get -d -t
GOFMT = goreturns -w GOFMT = goreturns -w
GOBENCH = $(GOCMD) test -v -tags "$(BUILD_TAGS)" -cpu=$(BENCHCPUS) -run=NOTHING -bench=. -benchmem -outputdir "$(RPTDIR)" GOBENCH = $(GOCMD) test -v -tags "$(BUILD_TAGS)" -cpu=$(BENCHCPUS) -run=NOTHING -bench=. -benchmem -outputdir "$(RPTDIR)"
@ -104,6 +88,7 @@ ZIPCMD = zip
SHACMD = sha256sum SHACMD = sha256sum
SLOCCMD = cloc --by-file --xml --exclude-dir="vendor" --include-lang="Go" SLOCCMD = cloc --by-file --xml --exclude-dir="vendor" --include-lang="Go"
XUCMD = go2xunit XUCMD = go2xunit
DOCKER_CMD=docker --config ~/.docker/
# Dynamic Targets # Dynamic Targets
INSTALL_TARGETS := $(addprefix install-,$(CMDS)) INSTALL_TARGETS := $(addprefix install-,$(CMDS))
@ -112,7 +97,30 @@ INSTALL_TARGETS := $(addprefix install-,$(CMDS))
all: debug setup dep format lint test bench build dist all: debug setup dep format lint test bench build dist
setup: setup-dirs setup-build setup-format setup-lint setup-reports git-push:
git push origin main --tags
git push github main --tags
release-major: bump-major git-push
release-minor: bump-minor git-push
release-patch: bump-patch git-push
setup: setup-dirs setup-build setup-format setup-lint setup-reports setup-bump
setup-bump:
go install github.com/guilhem/bump@latest
bump-major: setup-bump
bump major
bump-minor: setup-bump
bump minor
bump-patch: setup-bump
bump patch
setup-reports: setup-dirs setup-reports: setup-dirs
go install github.com/tebeka/go2xunit@latest go install github.com/tebeka/go2xunit@latest
@ -174,6 +182,27 @@ dist: clean build
cd "$(DISTDIR)"; find . -maxdepth 1 -type f -printf "$(SHACMD) %P | tee \"./%P.sha\"\n" | sh cd "$(DISTDIR)"; find . -maxdepth 1 -type f -printf "$(SHACMD) %P | tee \"./%P.sha\"\n" | sh
$(info "Built v$(VERSION), build $(COMMIT_ID)") $(info "Built v$(VERSION), build $(COMMIT_ID)")
.PHONY: docker
docker: docker-image docker-push
.PHONY: docker-push
docker-push:
$(DOCKER_CMD) tag code.ndumas.com/ndumas/obsidian-pipeline:$(VERSION) code.ndumas.com/ndumas/obsidian-pipeline:latest
$(DOCKER_CMD) push code.ndumas.com/ndumas/obsidian-pipeline:latest
$(DOCKER_CMD) push code.ndumas.com/ndumas/obsidian-pipeline:$(VERSION)
.PHONY: docker-image
docker-image:
$(DOCKER_CMD) build --build-arg VERSION=$(VERSION) -t code.ndumas.com/ndumas/obsidian-pipeline:$(VERSION) .
.PHONY: build-alpine
build-alpine:
# this version breaks build variable injection
# CGO_ENABLED=0 GOOS=linux go build -ldflags="buildmode=exe $(LDFLAGS) -linkmode external -w -extldflags '-static' " -o $(DISTDIR)/$(NAME)-$(VERSION)-alpine/obp cmd/obp/*.go
CGO_ENABLED=0 GOOS=linux go build -ldflags="$(LDFLAGS)" -o $(DISTDIR)/$(NAME)-$(VERSION)-alpine/obp cmd/obp/*.go
debug: debug:
$(info MD=$(MD)) $(info MD=$(MD))
$(info WD=$(WD)) $(info WD=$(WD))

@ -1,11 +0,0 @@
package obp
func (p *Pipeline) FindAttachments() error {
return nil
}
func (p *Pipeline) MoveAttachments(post string) error {
return nil
}

@ -1,145 +0,0 @@
package obp
import (
"fmt"
"io/fs"
"io/ioutil"
"os"
"path/filepath"
"regexp"
"strings"
"go.uber.org/zap"
)
func (p *Pipeline) Walk() error {
notesRoot := os.DirFS(p.Source)
blogRoot := os.DirFS(p.Target)
err := fs.WalkDir(notesRoot, ".", p.findAttachments)
if err != nil {
return fmt.Errorf("error scanning for attachments: %w", err)
}
err = fs.WalkDir(notesRoot, ".", p.findNotes)
if err != nil {
return fmt.Errorf("error scanning vault for posts: %w", err)
}
err = fs.WalkDir(blogRoot, ".", p.findPosts)
if err != nil {
return fmt.Errorf("error scanning blog for posts: %w", err)
}
return nil
}
func (p *Pipeline) findNotes(path string, d fs.DirEntry, err error) error {
if err != nil {
return err
}
if d.IsDir() {
return nil
}
walkLogger := p.L.Named("FindNotes").With(zap.String("path", path))
if strings.HasSuffix(path, ".md") && strings.Contains(path, p.BlogDir) {
walkLogger.Info("found blog post to publish, adding to index")
p.Notes = append(p.Notes, path)
}
return nil
}
func (p *Pipeline) findAttachments(path string, d fs.DirEntry, err error) error {
if err != nil {
return err
}
if d.IsDir() {
return nil
}
walkLogger := p.L.Named("FindAttachments").With(zap.String("path", path))
if strings.Contains(path, p.AttachmentsDir) {
walkLogger.Info("found attachment file, adding to index")
absPath, err := filepath.Abs(filepath.Join(p.Source, path))
if err != nil {
return fmt.Errorf("error generating absolute path for attachment %q: %w", path, err)
}
walkLogger.Info("adding Attachment",
zap.String("key", filepath.Base(absPath)),
zap.String("value", absPath),
)
p.Attachments[filepath.Base(absPath)] = absPath
}
return nil
}
func (p *Pipeline) findPosts(path string, d fs.DirEntry, err error) error {
if err != nil {
return err
}
if d.IsDir() {
return nil
}
walkLogger := p.L.Named("FindPosts").With(zap.String("path", path))
if strings.HasSuffix(path, "index.md") {
walkLogger.Info("found index.md, adding to index")
p.Posts = append(p.Posts, path)
}
return nil
}
func (p *Pipeline) Move() error {
moveLogger := p.L.Named("Move")
moveLogger.Info("scanning posts", zap.Strings("posts", p.Posts))
for _, post := range p.Notes {
// log.Printf("scanning %q for attachment links", post)
linkedAttachments, err := extractAttachments(filepath.Join(p.Source, post))
if err != nil {
return fmt.Errorf("could not extract attachment links from %q: %w", post, err)
}
for _, attachment := range linkedAttachments {
att, ok := p.Attachments[attachment]
if !ok {
return fmt.Errorf("Attachment is linked by post %q but doesn't exist in attachments directory %q", post, p.AttachmentsDir)
}
err := moveAttachment(post, att, p.L.Named("moveAttachment"))
if err != nil {
return fmt.Errorf("error moving attachments: %w", err)
}
}
}
return nil
}
func moveAttachment(post, attachment string, l *zap.Logger) error {
l.Info("moving attachment",
zap.String("post", post),
zap.String("attachment", attachment),
)
return nil
}
func extractAttachments(post string) ([]string, error) {
pat := regexp.MustCompile(`\[\[Resources\/attachments\/(.*)?\]\]`)
attachments := make([]string, 0)
postBody, err := ioutil.ReadFile(post)
if err != nil {
return attachments, fmt.Errorf("error opening post to scan for attachment links: %w", err)
}
for _, att := range pat.FindAllSubmatch(postBody, -1) {
filename := string(att[1])
attachments = append(attachments, filename)
}
return attachments, nil
}

@ -4,40 +4,15 @@ Copyright © 2023 NAME HERE <EMAIL ADDRESS>
package cmd package cmd
import ( import (
// "fmt"
"github.com/spf13/cobra" "github.com/spf13/cobra"
) )
var (
source, target string
)
// rootCmd represents the base command when called without any subcommands
var hugoCmd = &cobra.Command{ var hugoCmd = &cobra.Command{
Use: "hugo", Use: "hugo",
Short: "convert a set of Obsidian notes into a Hugo compatible directory structure", Short: "manage your hugo blog using your vault as a source of truth",
Long: `long description`, Long: `manage your hugo blog using your vault as a source of truth`,
PreRunE: func(cmd *cobra.Command, args []string) error {
// here is where I validate arguments, open and parse config files, etc
return nil
},
} }
func init() { func init() {
// Here you will define your flags and configuration settings.
// Cobra supports persistent flags, which, if defined here,
// will be global for your application.
hugoCmd.PersistentFlags().StringVar(&source, "source", "", "directory containing ready-to-publish posts")
hugoCmd.PersistentFlags().StringVar(&target, "target", "", "target Hugo directory (typically content/posts)")
// Cobra also supports local flags, which will only run
// when this action is called directly.
// rootCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
// rootCmd.SetHelpFunc(gloss.CharmHelp)
// rootCmd.SetUsageFunc(gloss.CharmUsage)
rootCmd.AddCommand(hugoCmd) rootCmd.AddCommand(hugoCmd)
} }

@ -0,0 +1,62 @@
/*
Copyright © 2023 NAME HERE <EMAIL ADDRESS>
*/
package cmd
import (
"fmt"
"log"
"github.com/spf13/cobra"
"github.com/spf13/viper"
"code.ndumas.com/ndumas/obsidian-pipeline"
)
var hugoBundleCmd = &cobra.Command{
Use: "bundle",
Short: "convert a set of Obsidian notes into a Hugo compatible directory structure",
Long: `generate hugo content from your vault`,
PreRunE: func(cmd *cobra.Command, args []string) error {
// here is where I validate arguments, open and parse config files, etc
return nil
},
RunE: func(cmd *cobra.Command, args []string) error {
source := viper.GetString("hugo.source")
target := viper.GetString("hugo.target")
err := obp.CopyPosts(source, target)
if err != nil {
return fmt.Errorf("error copying posts in %q: %w", source, err)
}
err = obp.Sanitize(source)
if err != nil {
return fmt.Errorf("error sanitizing posts in %q: %w", source, err)
}
err = obp.GatherMedia(source)
if err != nil {
return fmt.Errorf("error gathering media in %q: %w", source, err)
}
return nil
},
}
func init() {
hugoBundleCmd.Flags().StringP("source", "s", "", "path to vault directory containing hugo posts")
err := viper.BindPFlag("hugo.source", hugoBundleCmd.Flags().Lookup("source"))
if err != nil {
log.Panicln("error binding viper to source flag:", err)
}
hugoBundleCmd.Flags().StringP("target", "t", "", "hugo content/ directory")
err = viper.BindPFlag("hugo.target", hugoBundleCmd.Flags().Lookup("target"))
if err != nil {
log.Panicln("error binding viper to target flag:", err)
}
hugoBundleCmd.MarkFlagsRequiredTogether("source", "target")
hugoCmd.AddCommand(hugoBundleCmd)
}

@ -25,8 +25,8 @@ var validateCmd = &cobra.Command{
Long: `Validate YAML frontmatter with jsonschema Long: `Validate YAML frontmatter with jsonschema
`, `,
RunE: func(cmd *cobra.Command, args []string) error { RunE: func(cmd *cobra.Command, args []string) error {
schema := viper.GetString("schema") schema := viper.GetString("validate.schema")
target := viper.GetString("target") target := viper.GetString("validate.target")
if target == "" { if target == "" {
return fmt.Errorf("target flag must not be empty") return fmt.Errorf("target flag must not be empty")
} }
@ -34,7 +34,7 @@ var validateCmd = &cobra.Command{
err := fs.WalkDir(root, ".", func(path string, d fs.DirEntry, err error) error { err := fs.WalkDir(root, ".", func(path string, d fs.DirEntry, err error) error {
if err != nil { if err != nil {
return err return fmt.Errorf("could not walk %q: %w", path, err)
} }
if d.IsDir() { if d.IsDir() {
@ -45,23 +45,25 @@ var validateCmd = &cobra.Command{
if err != nil { if err != nil {
return fmt.Errorf("error generating absolute path for %q", target) return fmt.Errorf("error generating absolute path for %q", target)
} }
target, err := os.Open(absPath) file, err := os.Open(absPath)
if err != nil { if err != nil {
return fmt.Errorf("could not open target file: %w", err) return fmt.Errorf("could not open target file: %w", err)
} }
defer target.Close() defer file.Close()
err = obp.Validate(schema, target) err = obp.Validate(schema, file)
if err != nil { if err != nil {
details, ok := err.(*jsonschema.ValidationError) details, ok := err.(*jsonschema.ValidationError)
if !ok { if !ok {
return err return fmt.Errorf("eror validating %q: %w", path, err)
} }
obp.PrettyDetails(cmd.OutOrStdout(), viper.GetString("format"), details.DetailedOutput(), absPath) obp.PrettyDetails(cmd.OutOrStdout(), viper.GetString("format"), details.DetailedOutput(), absPath)
} }
return nil return nil
}) })
if err != nil {
return err return fmt.Errorf("walkfunc failed: %w", err)
}
return nil
}, },
} }
@ -73,21 +75,21 @@ func init() {
validateCmd.Flags().StringP("target", "t", "", "directory containing validation targets") validateCmd.Flags().StringP("target", "t", "", "directory containing validation targets")
validateCmd.MarkFlagsRequiredTogether("schema", "target") validateCmd.MarkFlagsRequiredTogether("schema", "target")
validateCmd.PersistentFlags().StringVar(&format, "format", "markdown", "output format [markdown, json, csv]") validateCmd.PersistentFlags().StringVar(&format, "format", "markdown", "output format [markdown, json, csv]")
rootCmd.AddCommand(validateCmd)
err := viper.BindPFlag("schema", validateCmd.Flags().Lookup("schema")) err := viper.BindPFlag("schema", validateCmd.Flags().Lookup("schema"))
if err != nil { if err != nil {
log.Panicln("error binding viper to schema flag:", err) log.Panicln("error binding viper to schema flag:", err)
} }
err = viper.BindPFlag("target", validateCmd.Flags().Lookup("target")) err = viper.BindPFlag("validate.target", validateCmd.Flags().Lookup("target"))
if err != nil { if err != nil {
log.Panicln("error binding viper to target flag:", err) log.Panicln("error binding viper to target flag:", err)
} }
err = viper.BindPFlag("format", validateCmd.PersistentFlags().Lookup("format")) err = viper.BindPFlag("validate.format", validateCmd.PersistentFlags().Lookup("format"))
if err != nil { if err != nil {
log.Panicln("error binding viper to format flag:", err) log.Panicln("error binding viper to format flag:", err)
} }
rootCmd.AddCommand(validateCmd)
} }

@ -0,0 +1,92 @@
package obp
import (
"fmt"
"io"
"io/fs"
// "log"
"os"
"path/filepath"
"strings"
)
func copy(src, dst string) (int64, error) {
sourceFileStat, err := os.Stat(src)
if err != nil {
return 0, err
}
if !sourceFileStat.Mode().IsRegular() {
return 0, fmt.Errorf("%s is not a regular file", src)
}
source, err := os.Open(src)
if err != nil {
return 0, err
}
defer source.Close()
destination, err := os.Create(dst)
if err != nil {
return 0, err
}
defer destination.Close()
nBytes, err := io.Copy(destination, source)
return nBytes, err
}
func CopyPosts(src, dst string) error {
posts := make([]string, 0)
srcRoot := os.DirFS(src)
err := fs.WalkDir(srcRoot, ".", func(path string, d fs.DirEntry, err error) error {
// here's where I walk through the source directory and collect all the markdown notes
if err != nil {
return fmt.Errorf("could not walk %q: %w", path, err)
}
if d.IsDir() {
return nil
}
if strings.HasSuffix(path, ".md") {
posts = append(posts, filepath.Join(src, path))
}
return nil
})
if err != nil {
return fmt.Errorf("walkfunc failed: %w", err)
}
for _, post := range posts {
base := filepath.Base(post)
splitPostName := strings.Split(base, ".")
postName := strings.Join(splitPostName[:len(splitPostName)-1], ".")
postDir := filepath.Join(dst, postName)
err := os.MkdirAll(postDir, 0777)
if err != nil && !os.IsExist(err) {
return fmt.Errorf("error creating target directory %q: %w", dst, err)
}
_, err = copy(post, filepath.Join(postDir, "index.md"))
if err != nil {
return fmt.Errorf("error opening %q for copying: %w", post, err)
}
}
return nil
}
func Sanitize(src string) error {
return nil
}
func GatherMedia(src string) error {
return nil
}

@ -1,17 +1,27 @@
package obp package obp
import ( import (
"fmt"
"io/fs"
"io/ioutil"
"os"
"path/filepath"
"regexp"
"strings"
"go.uber.org/zap" "go.uber.org/zap"
) )
func NewPipeline(dev bool) *Pipeline { func NewPipeline(dev bool) *Pipeline {
var p Pipeline var p Pipeline
var l *zap.Logger var l *zap.Logger
l, _ = zap.NewProduction() l, _ = zap.NewProduction()
if dev { if dev {
l, _ = zap.NewDevelopment() l, _ = zap.NewDevelopment()
} }
p.L = l p.L = l
p.Attachments = make(map[string]string) p.Attachments = make(map[string]string)
p.Posts = make([]string, 0) p.Posts = make([]string, 0)
@ -26,3 +36,169 @@ type Pipeline struct {
L *zap.Logger L *zap.Logger
BlogDir, AttachmentsDir string BlogDir, AttachmentsDir string
} }
func (p *Pipeline) Walk() error {
notesRoot := os.DirFS(p.Source)
blogRoot := os.DirFS(p.Target)
err := fs.WalkDir(notesRoot, ".", p.findAttachments)
if err != nil {
return fmt.Errorf("error scanning for attachments: %w", err)
}
err = fs.WalkDir(notesRoot, ".", p.findNotes)
if err != nil {
return fmt.Errorf("error scanning vault for posts: %w", err)
}
err = fs.WalkDir(blogRoot, ".", p.findPosts)
if err != nil {
return fmt.Errorf("error scanning blog for posts: %w", err)
}
return nil
}
func (p *Pipeline) findNotes(path string, d fs.DirEntry, err error) error {
if err != nil {
return err
}
if d.IsDir() {
return nil
}
walkLogger := p.L.Named("FindNotes").With(zap.String("path", path))
if strings.HasSuffix(path, ".md") && strings.Contains(path, p.BlogDir) {
walkLogger.Info("found blog post to publish, adding to index")
p.Notes = append(p.Notes, path)
}
return nil
}
func (p *Pipeline) findAttachments(path string, d fs.DirEntry, err error) error {
if err != nil {
return err
}
if d.IsDir() {
return nil
}
walkLogger := p.L.Named("FindAttachments").With(zap.String("path", path))
if strings.Contains(path, p.AttachmentsDir) {
walkLogger.Info("found attachment file, adding to index")
absPath, err := filepath.Abs(filepath.Join(p.Source, path))
if err != nil {
return fmt.Errorf("error generating absolute path for attachment %q: %w", path, err)
}
walkLogger.Info("adding Attachment",
zap.String("key", filepath.Base(absPath)),
zap.String("value", absPath),
)
p.Attachments[filepath.Base(absPath)] = absPath
}
return nil
}
func (p *Pipeline) findPosts(path string, d fs.DirEntry, err error) error {
if err != nil {
return err
}
if d.IsDir() {
return nil
}
walkLogger := p.L.Named("FindPosts").With(zap.String("path", path))
if strings.HasSuffix(path, "index.md") {
walkLogger.Info("found index.md, adding to index")
p.Posts = append(p.Posts, path)
}
return nil
}
func (p *Pipeline) Move() error {
moveLogger := p.L.Named("Move")
moveLogger.Info("scanning posts", zap.Strings("posts", p.Posts))
for _, post := range p.Notes {
// log.Printf("scanning %q for attachment links", post)
linkedAttachments, err := extractAttachments(filepath.Join(p.Source, post))
if err != nil {
return fmt.Errorf("could not extract attachment links from %q: %w", post, err)
}
for _, attachment := range linkedAttachments {
att, ok := p.Attachments[attachment]
if !ok {
return fmt.Errorf("Attachment is linked by post %q but doesn't exist in attachments directory %q", post, p.AttachmentsDir)
}
err := moveAttachment(post, att, p.L.Named("moveAttachment"))
if err != nil {
return fmt.Errorf("error moving attachments: %w", err)
}
}
}
return nil
}
func moveAttachment(post, attachment string, l *zap.Logger) error {
l.Info("moving attachment",
zap.String("post", post),
zap.String("attachment", attachment),
)
return nil
}
func extractAttachments(post string) ([]string, error) {
pat := regexp.MustCompile(`\[\[Resources\/attachments\/(.*)?\]\]`)
attachments := make([]string, 0)
postBody, err := ioutil.ReadFile(post)
if err != nil {
return attachments, fmt.Errorf("error opening post to scan for attachment links: %w", err)
}
for _, att := range pat.FindAllSubmatch(postBody, -1) {
filename := string(att[1])
attachments = append(attachments, filename)
}
return attachments, nil
}
func (p *Pipeline) FindAttachments() error {
return nil
}
func (p *Pipeline) MoveAttachments(post string) error {
return nil
}
func (p *Pipeline) FindPosts() error {
return nil
}
func (p *Pipeline) SanitizePost(post string) error {
return nil
}
func (p *Pipeline) CopyPost(post string) error {
return nil
}

@ -1,15 +0,0 @@
package obp
func (p *Pipeline) FindPosts() error {
return nil
}
func (p *Pipeline) SanitizePost(post string) error {
return nil
}
func (p *Pipeline) CopyPost(post string) error {
return nil
}

@ -2,34 +2,41 @@ package obp
import ( import (
"encoding/json" "encoding/json"
"errors"
"fmt" "fmt"
"io" "io"
"github.com/santhosh-tekuri/jsonschema/v5" "github.com/santhosh-tekuri/jsonschema/v5"
// allow the jsonschema validator to auto-download http-hosted schemas.
_ "github.com/santhosh-tekuri/jsonschema/v5/httploader" _ "github.com/santhosh-tekuri/jsonschema/v5/httploader"
"gopkg.in/yaml.v3" "gopkg.in/yaml.v3"
) )
var ErrUnsupportedOutputFormat = errors.New("unspported output format")
// Validate accepts a Markdown file as input via the Reader // Validate accepts a Markdown file as input via the Reader
// and parses the frontmatter present, if any. It then // and parses the frontmatter present, if any. It then
// applies the schema fetched from schemaURL against the // applies the schema fetched from schemaURL against the
// decoded YAML. // decoded YAML.
func Validate(schemaURL string, r io.Reader) error { func Validate(schemaURL string, r io.Reader) error {
var m interface{} var frontmatter interface{}
dec := yaml.NewDecoder(r) dec := yaml.NewDecoder(r)
err := dec.Decode(&m)
err := dec.Decode(&frontmatter)
if err != nil { if err != nil {
return fmt.Errorf("error decoding YAML: %w", err) return fmt.Errorf("error decoding YAML: %w", err)
} }
compiler := jsonschema.NewCompiler() compiler := jsonschema.NewCompiler()
schema, err := compiler.Compile(schemaURL) schema, err := compiler.Compile(schemaURL)
if err != nil { if err != nil {
return fmt.Errorf("error compiling schema: %w", err) return fmt.Errorf("error compiling schema: %w", err)
} }
if err := schema.Validate(m); err != nil {
return err if err != nil {
return fmt.Errorf("frontmatter failed validation: %w", schema.Validate(frontmatter))
} }
return nil return nil
@ -39,6 +46,7 @@ func recurseDetails(detailed jsonschema.Detailed, acc map[string]jsonschema.Deta
if detailed.Error != "" { if detailed.Error != "" {
acc[detailed.AbsoluteKeywordLocation] = detailed acc[detailed.AbsoluteKeywordLocation] = detailed
} }
for _, e := range detailed.Errors { for _, e := range detailed.Errors {
acc = recurseDetails(e, acc) acc = recurseDetails(e, acc)
} }
@ -49,29 +57,30 @@ func recurseDetails(detailed jsonschema.Detailed, acc map[string]jsonschema.Deta
// PrettyDetails takes error output from jsonschema.Validate // PrettyDetails takes error output from jsonschema.Validate
// and pretty-prints it to stdout. // and pretty-prints it to stdout.
// //
// Supported formats are: JSON, Markdown // Supported formats are: JSON, Markdown.
func PrettyDetails(w io.Writer, format string, details jsonschema.Detailed, filename string) error { func PrettyDetails(writer io.Writer, format string, details jsonschema.Detailed, filename string) error {
// acc := make([]jsonschema.Detailed, 0) // acc := make([]jsonschema.Detailed, 0)
acc := make(map[string]jsonschema.Detailed) acc := make(map[string]jsonschema.Detailed)
errors := recurseDetails(details, acc) errors := recurseDetails(details, acc)
switch format { switch format {
case "json": case "json":
enc := json.NewEncoder(w) enc := json.NewEncoder(writer)
err := enc.Encode(details) err := enc.Encode(details)
if err != nil { if err != nil {
return fmt.Errorf("error writing JSON payload to provided writer: %w", err) return fmt.Errorf("error writing JSON payload to provided writer: %w", err)
} }
case "markdown": case "markdown":
fmt.Fprintf(w, "# Validation Errors for %q\n", filename) fmt.Fprintf(writer, "# Validation Errors for %q\n", filename)
fmt.Fprintf(w, "Validation Rule|Failing Property|Error\n") fmt.Fprintf(writer, "Validation Rule|Failing Property|Error\n")
fmt.Fprintf(w, "--|---|---\n") fmt.Fprintf(writer, "--|---|---\n")
for _, e := range errors { for _, e := range errors {
fmt.Fprintf(w, "%s|%s|%s\n", e.KeywordLocation, e.InstanceLocation, e.Error) fmt.Fprintf(writer, "%s|%s|%s\n", e.KeywordLocation, e.InstanceLocation, e.Error)
} }
default: default:
return fmt.Errorf("unknown format") return ErrUnsupportedOutputFormat
} }
return nil return nil

@ -0,0 +1,77 @@
package obp_test
import (
"bytes"
"testing"
"code.ndumas.com/ndumas/obsidian-pipeline"
)
func Test_BasicValidation(t *testing.T) {
t.Parallel()
tt := []struct {
name string
b *bytes.Buffer
expected error
}{
{
name: "KeyMissing",
b: bytes.NewBufferString(`
---
boop: "bop"
---
# Markdown Content
`),
expected: nil,
},
{
name: "KeyTypeMismatch",
b: bytes.NewBufferString(`
---
title: 2
---
# Markdown Content
`),
expected: nil,
},
{
name: "GoodSchema",
b: bytes.NewBufferString(`
---
draft: false
title: "Mapping Aardwolf with Graphviz and Golang"
aliases: ["Mapping Aardwolf with Graphviz"]
series: ["mapping-aardwolf"]
date: "2023-04-06"
author: "Nick Dumas"
cover: ""
keywords: [""]
description: "Maxing out your CPU for fun and profit with dense graphs, or how I'm attempting to follow through on my plan to work on projects with more visual
outputs"
showFullContent: false
tags:
- graphviz
- graph
- aardwolf
- golang
---
## Textual Cartography
Aardwolf has a fairly active developer community, people who write and maintain plugins and try to map the game world and its contents.
`),
expected: nil,
},
}
for _, tc := range tt {
tc := tc
t.Run(tc.name, func(t *testing.T) {
t.Parallel()
err := obp.Validate("https://schemas.ndumas.com/obsidian/note.schema.json", tc.b)
if err == tc.expected {
t.Log("Expected Validate() to fail on input")
t.Fail()
}
})
}
}
Loading…
Cancel
Save