Signed-off-by: Naman Sood <mail@nsood.in>
This commit is contained in:
parent
18455362d3
commit
1981a291aa
17 changed files with 916 additions and 849 deletions
|
@ -1,74 +0,0 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"strconv"
|
||||
|
||||
"github.com/aymerick/raymond"
|
||||
)
|
||||
|
||||
// errorCatcher is a wrapper for http.ResponseWriter that
|
||||
// captures 4xx and 5xx status codes and handles them in
|
||||
// a custom manner
|
||||
type errorCatcher struct {
|
||||
req *http.Request
|
||||
res http.ResponseWriter
|
||||
errorTpl *raymond.Template
|
||||
notFoundTpl *raymond.Template
|
||||
handledError bool
|
||||
}
|
||||
|
||||
func (ec *errorCatcher) Header() http.Header {
|
||||
return ec.res.Header()
|
||||
}
|
||||
|
||||
func (ec *errorCatcher) Write(buf []byte) (int, error) {
|
||||
// if we have already sent a response, pretend that this was successful
|
||||
if ec.handledError {
|
||||
return len(buf), nil
|
||||
}
|
||||
return ec.res.Write(buf)
|
||||
}
|
||||
|
||||
func (ec *errorCatcher) WriteHeader(statusCode int) {
|
||||
if ec.handledError {
|
||||
return
|
||||
}
|
||||
if statusCode == 404 {
|
||||
ctx := map[string]string{
|
||||
"path": ec.req.URL.Path,
|
||||
}
|
||||
page, err := ec.notFoundTpl.Exec(ctx)
|
||||
// if we don't have a page to write, return before
|
||||
// we toggle the flag so we fall back to the original
|
||||
// error page
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
ec.res.Header().Set("Content-Type", "text/html; charset=utf-8")
|
||||
ec.res.WriteHeader(statusCode)
|
||||
ec.res.Write([]byte(page))
|
||||
ec.handledError = true
|
||||
return
|
||||
}
|
||||
|
||||
if statusCode >= 400 && statusCode < 600 {
|
||||
ctx := map[string]string{
|
||||
"code": strconv.Itoa(statusCode),
|
||||
}
|
||||
page, err := ec.errorTpl.Exec(ctx)
|
||||
// if we don't have a page to write, return before
|
||||
// we toggle the flag so we fall back to the original
|
||||
// error page
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
ec.res.Header().Set("Content-Type", "text/html; charset=utf-8")
|
||||
ec.res.WriteHeader(statusCode)
|
||||
ec.res.Write([]byte(page))
|
||||
ec.handledError = true
|
||||
return
|
||||
}
|
||||
|
||||
ec.res.WriteHeader(statusCode)
|
||||
}
|
|
@ -1,75 +0,0 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"log"
|
||||
"os"
|
||||
"runtime"
|
||||
"strings"
|
||||
|
||||
"github.com/rjeczalik/notify"
|
||||
)
|
||||
|
||||
type listener struct {
|
||||
folder string
|
||||
update func(string) error
|
||||
clean func(string) error
|
||||
}
|
||||
|
||||
func (l *listener) listen() {
|
||||
cwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
log.Fatal("could not get current working directory for listener!")
|
||||
}
|
||||
cwd = cwd + "/"
|
||||
|
||||
c := make(chan notify.EventInfo, 1)
|
||||
|
||||
var events []notify.Event
|
||||
|
||||
// inotify events prevent double-firing of
|
||||
// certain events in Linux.
|
||||
if runtime.GOOS == "linux" {
|
||||
events = []notify.Event{
|
||||
notify.InCloseWrite,
|
||||
notify.InMovedFrom,
|
||||
notify.InMovedTo,
|
||||
notify.InDelete,
|
||||
}
|
||||
} else {
|
||||
events = []notify.Event{
|
||||
notify.Create,
|
||||
notify.Remove,
|
||||
notify.Rename,
|
||||
notify.Write,
|
||||
}
|
||||
}
|
||||
|
||||
err = notify.Watch(l.folder, c, events...)
|
||||
|
||||
if err != nil {
|
||||
log.Fatalf("Could not setup watcher for folder %s: %s", l.folder, err)
|
||||
}
|
||||
|
||||
defer notify.Stop(c)
|
||||
|
||||
for {
|
||||
ei := <-c
|
||||
log.Printf("event: %s", ei.Event())
|
||||
switch ei.Event() {
|
||||
case notify.InCloseWrite, notify.InMovedTo, notify.Create, notify.Rename, notify.Write:
|
||||
filePath := strings.TrimPrefix(ei.Path(), cwd)
|
||||
log.Printf("updating file %s", filePath)
|
||||
err := l.update(strings.TrimPrefix(filePath, l.folder))
|
||||
if err != nil {
|
||||
log.Printf("watcher update action on %s failed: %v", filePath, err)
|
||||
}
|
||||
case notify.InMovedFrom, notify.InDelete, notify.Remove:
|
||||
filePath := strings.TrimPrefix(ei.Path(), cwd)
|
||||
log.Printf("cleaning file %s", filePath)
|
||||
err := l.clean(strings.TrimPrefix(filePath, l.folder))
|
||||
if err != nil {
|
||||
log.Printf("watcher clean action on %s failed: %v", filePath, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,191 +0,0 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/aymerick/raymond"
|
||||
"github.com/mitchellh/mapstructure"
|
||||
"github.com/yuin/goldmark"
|
||||
emoji "github.com/yuin/goldmark-emoji"
|
||||
highlighting "github.com/yuin/goldmark-highlighting"
|
||||
meta "github.com/yuin/goldmark-meta"
|
||||
"github.com/yuin/goldmark/extension"
|
||||
"github.com/yuin/goldmark/parser"
|
||||
"github.com/yuin/goldmark/renderer/html"
|
||||
)
|
||||
|
||||
// Metadata stores the data about a post that needs to be visible
|
||||
// at the home page.
|
||||
type Metadata struct {
|
||||
Title string
|
||||
Summary string
|
||||
Time int64 // unix timestamp
|
||||
}
|
||||
|
||||
// Post stores the contents of a blog post.
|
||||
type Post struct {
|
||||
Slug string
|
||||
Metadata Metadata
|
||||
Contents string
|
||||
Image []byte
|
||||
}
|
||||
|
||||
func newPost(slug string) (*Post, error) {
|
||||
data, err := os.ReadFile("posts/" + slug + ".md")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not read file: %s", err)
|
||||
}
|
||||
|
||||
md := goldmark.New(
|
||||
goldmark.WithExtensions(
|
||||
extension.Linkify,
|
||||
extension.Strikethrough,
|
||||
extension.Typographer,
|
||||
extension.Footnote,
|
||||
meta.Meta,
|
||||
highlighting.Highlighting,
|
||||
emoji.New(emoji.WithRenderingMethod(emoji.Unicode)),
|
||||
),
|
||||
goldmark.WithRendererOptions(
|
||||
html.WithUnsafe(),
|
||||
),
|
||||
)
|
||||
var converted bytes.Buffer
|
||||
ctx := parser.NewContext()
|
||||
err = md.Convert(data, &converted, parser.WithContext(ctx))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not parse markdown: %s", err)
|
||||
}
|
||||
mdMap, err := meta.TryGet(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not parse metadata: %s", err)
|
||||
}
|
||||
var metadata Metadata
|
||||
err = mapstructure.Decode(mdMap, &metadata)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not destructure metadata: %s", err)
|
||||
}
|
||||
|
||||
post := &Post{
|
||||
Slug: slug,
|
||||
Metadata: metadata,
|
||||
Contents: converted.String(),
|
||||
}
|
||||
|
||||
url := blogURL + "/" + slug
|
||||
var buf bytes.Buffer
|
||||
err = createImage(post.Metadata.Title, post.Metadata.Summary, url, &buf)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not create post image: %v", err)
|
||||
}
|
||||
post.Image, err = io.ReadAll(&buf)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return post, nil
|
||||
}
|
||||
|
||||
func (p *Post) render(tpl *raymond.Template) (string, error) {
|
||||
return tpl.Exec(p)
|
||||
}
|
||||
|
||||
func (p *Post) String() string {
|
||||
return p.Slug
|
||||
}
|
||||
|
||||
type postList []*Post
|
||||
|
||||
func newPostList() (postList, error) {
|
||||
files, err := os.ReadDir("posts/")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
pl := make(postList, 0, len(files))
|
||||
for _, f := range files {
|
||||
filename := f.Name()
|
||||
|
||||
if strings.HasSuffix(filename, ".md") {
|
||||
post, err := newPost(strings.TrimSuffix(filename, ".md"))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not render %s: %s", filename, err)
|
||||
}
|
||||
pl = append(pl, post)
|
||||
log.Printf("Loaded post %s", filename)
|
||||
}
|
||||
}
|
||||
sort.Sort(pl)
|
||||
|
||||
return pl, nil
|
||||
}
|
||||
|
||||
func insertOrUpdatePost(pl postList, p *Post) postList {
|
||||
for i, post := range pl {
|
||||
if post.Slug == p.Slug {
|
||||
pl[i] = p
|
||||
sort.Sort(pl)
|
||||
return pl
|
||||
}
|
||||
}
|
||||
pl = append(pl, p)
|
||||
sort.Sort(pl)
|
||||
return pl
|
||||
}
|
||||
|
||||
func removePost(pl postList, slug string) postList {
|
||||
for i, post := range pl {
|
||||
if post.Slug == slug {
|
||||
pl = append(pl[:i], pl[i+1:]...)
|
||||
break
|
||||
}
|
||||
}
|
||||
fmt.Println(pl)
|
||||
return pl
|
||||
}
|
||||
|
||||
// Len implements sort.Interface
|
||||
func (pl postList) Len() int {
|
||||
return len(pl)
|
||||
}
|
||||
|
||||
// Less implements sort.Interface
|
||||
func (pl postList) Less(i, j int) bool {
|
||||
return pl[i].Metadata.Time > pl[j].Metadata.Time
|
||||
}
|
||||
|
||||
// Swap implements sort.Interface
|
||||
func (pl postList) Swap(i, j int) {
|
||||
temp := pl[i]
|
||||
pl[i] = pl[j]
|
||||
pl[j] = temp
|
||||
}
|
||||
|
||||
func newPostListener(update func(func(postList) postList)) *listener {
|
||||
ln := &listener{
|
||||
folder: "posts/",
|
||||
update: func(file string) error {
|
||||
post, err := newPost(strings.TrimSuffix(file, ".md"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
update(func(oldList postList) postList {
|
||||
return insertOrUpdatePost(oldList, post)
|
||||
})
|
||||
return nil
|
||||
},
|
||||
clean: func(file string) error {
|
||||
update(func(oldList postList) postList {
|
||||
return removePost(oldList, strings.TrimSuffix(file, ".md"))
|
||||
})
|
||||
return nil
|
||||
},
|
||||
}
|
||||
return ln
|
||||
}
|
|
@ -3,18 +3,17 @@ package main
|
|||
import (
|
||||
"log"
|
||||
"net/http"
|
||||
"prose/server"
|
||||
)
|
||||
|
||||
func main() {
|
||||
log.Printf("Hello, world! This is Prose.")
|
||||
|
||||
s, err := newServer()
|
||||
err := server.New()
|
||||
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
http.HandleFunc("/", s.router)
|
||||
|
||||
log.Fatal(http.ListenAndServe(":8080", nil))
|
||||
}
|
||||
|
|
|
@ -1,316 +0,0 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
"log"
|
||||
"net/http"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/aymerick/raymond"
|
||||
"github.com/fogleman/gg"
|
||||
)
|
||||
|
||||
const (
|
||||
blogTitle = "Prose"
|
||||
blogURL = "https://prose.nsood.in"
|
||||
blogSummary = "Where I infodump in Markdown and nobody can stop me."
|
||||
)
|
||||
|
||||
type server struct {
|
||||
staticHandler http.Handler
|
||||
|
||||
mu sync.RWMutex
|
||||
templates map[string]*raymond.Template
|
||||
postList
|
||||
styles map[string]string
|
||||
homeImage []byte
|
||||
}
|
||||
|
||||
func newServer() (*server, error) {
|
||||
s := &server{
|
||||
staticHandler: http.FileServer(http.Dir("static/")),
|
||||
}
|
||||
|
||||
var imgBuffer bytes.Buffer
|
||||
err := createImage(blogTitle, blogSummary, blogURL, &imgBuffer)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
s.homeImage, err = io.ReadAll(&imgBuffer)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
posts, err := newPostList()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
s.mu.Lock()
|
||||
s.postList = posts
|
||||
s.mu.Unlock()
|
||||
|
||||
tpls, err := loadTemplates([]string{
|
||||
"page.html",
|
||||
"fullpost.html",
|
||||
"summary.html",
|
||||
"notfound.html",
|
||||
"error.html",
|
||||
"rss-channel.xml",
|
||||
"rss-item.xml",
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
s.mu.Lock()
|
||||
s.templates = tpls
|
||||
s.mu.Unlock()
|
||||
|
||||
styles, err := newStylesMap()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
s.mu.Lock()
|
||||
s.styles = styles
|
||||
s.mu.Unlock()
|
||||
|
||||
postsLn := newPostListener(func(updateFn func(postList) postList) {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
s.postList = updateFn(s.postList)
|
||||
})
|
||||
go postsLn.listen()
|
||||
|
||||
templatesLn := newTemplateListener(func(updateFn func(map[string]*raymond.Template)) {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
updateFn(s.templates)
|
||||
})
|
||||
go templatesLn.listen()
|
||||
|
||||
stylesLn := newStylesListener(func(updateFn func(map[string]string)) {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
updateFn(s.styles)
|
||||
})
|
||||
go stylesLn.listen()
|
||||
|
||||
return s, nil
|
||||
}
|
||||
|
||||
func (s *server) logRequest(req *http.Request) {
|
||||
log.Printf("%s %s from %s", req.Method, req.URL.Path, req.RemoteAddr)
|
||||
}
|
||||
|
||||
func (s *server) router(res http.ResponseWriter, req *http.Request) {
|
||||
s.mu.RLock()
|
||||
defer s.mu.RUnlock()
|
||||
s.logRequest(req)
|
||||
res = &errorCatcher{
|
||||
res: res,
|
||||
req: req,
|
||||
errorTpl: s.templates["error.html"],
|
||||
notFoundTpl: s.templates["notfound.html"],
|
||||
handledError: false,
|
||||
}
|
||||
slug := req.URL.Path[1:]
|
||||
|
||||
if slug == "" {
|
||||
s.homePage(res, req)
|
||||
return
|
||||
}
|
||||
if slug == "about.png" {
|
||||
s.renderImage(res, req, s.homeImage)
|
||||
return
|
||||
}
|
||||
if slug == "rss.xml" {
|
||||
s.renderRSS(res, req)
|
||||
return
|
||||
}
|
||||
|
||||
for _, p := range s.postList {
|
||||
if slug == p.Slug {
|
||||
s.postPage(p, res, req)
|
||||
return
|
||||
} else if slug == p.Slug+"/about.png" {
|
||||
s.renderImage(res, req, p.Image)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
if strings.HasPrefix(slug, "css/") {
|
||||
filename := strings.TrimPrefix(slug, "css/")
|
||||
ok := s.loadStylesheet(res, req, filename)
|
||||
if ok {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
s.staticHandler.ServeHTTP(res, req)
|
||||
}
|
||||
|
||||
func (s *server) errorInRequest(res http.ResponseWriter, req *http.Request, err error) {
|
||||
res.WriteHeader(http.StatusInternalServerError)
|
||||
res.Write([]byte("oh no"))
|
||||
log.Printf("ERR %s: %s", req.URL.Path, err)
|
||||
}
|
||||
|
||||
func (s *server) createWebPage(title, subtitle, contents, path string) (string, error) {
|
||||
ctx := map[string]interface{}{
|
||||
"title": title,
|
||||
"subtitle": subtitle,
|
||||
"contents": contents,
|
||||
"path": blogURL + path,
|
||||
}
|
||||
return s.templates["page.html"].Exec(ctx)
|
||||
}
|
||||
|
||||
func (s *server) postPage(p *Post, res http.ResponseWriter, req *http.Request) {
|
||||
res.Header().Add("content-type", "text/html; charset=utf-8")
|
||||
contents, err := p.render(s.templates["fullpost.html"])
|
||||
if err != nil {
|
||||
s.errorInRequest(res, req, err)
|
||||
}
|
||||
page, err := s.createWebPage(p.Metadata.Title, p.Metadata.Summary, contents, req.URL.Path)
|
||||
if err != nil {
|
||||
s.errorInRequest(res, req, err)
|
||||
}
|
||||
res.Write([]byte(page))
|
||||
}
|
||||
|
||||
func (s *server) homePage(res http.ResponseWriter, req *http.Request) {
|
||||
res.Header().Add("content-type", "text/html; charset=utf-8")
|
||||
|
||||
var posts string
|
||||
|
||||
for _, p := range s.postList {
|
||||
summary, err := p.render(s.templates["summary.html"])
|
||||
if err != nil {
|
||||
log.Printf("could not render post summary for %s", p.Slug)
|
||||
}
|
||||
posts = posts + summary
|
||||
}
|
||||
|
||||
page, err := s.createWebPage("Home", blogSummary, posts, "")
|
||||
|
||||
if err != nil {
|
||||
s.errorInRequest(res, req, err)
|
||||
}
|
||||
|
||||
res.Write([]byte(page))
|
||||
}
|
||||
|
||||
func (s *server) renderImage(res http.ResponseWriter, req *http.Request, img []byte) {
|
||||
res.Header().Add("content-type", "image/png")
|
||||
res.Write(img)
|
||||
}
|
||||
|
||||
func (s *server) renderRSS(res http.ResponseWriter, req *http.Request) {
|
||||
res.Header().Add("content-type", "text/xml; charset=utf-8")
|
||||
|
||||
var posts string
|
||||
|
||||
for _, p := range s.postList {
|
||||
summary, err := p.render(s.templates["rss-item.xml"])
|
||||
if err != nil {
|
||||
log.Printf("could not render post summary for %s", p.Slug)
|
||||
}
|
||||
posts = posts + summary
|
||||
}
|
||||
|
||||
var pubDate string
|
||||
if len(posts) > 0 {
|
||||
pubDate = rssDatetime(s.postList[0].Metadata.Time)
|
||||
} else {
|
||||
pubDate = rssDatetime(0)
|
||||
}
|
||||
|
||||
page, err := s.templates["rss-channel.xml"].Exec(map[string]string{
|
||||
"title": blogTitle,
|
||||
"description": blogSummary,
|
||||
"link": blogURL,
|
||||
"pubDate": pubDate,
|
||||
"items": posts,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
s.errorInRequest(res, req, err)
|
||||
}
|
||||
|
||||
res.Write([]byte(page))
|
||||
}
|
||||
|
||||
func (s *server) loadStylesheet(res http.ResponseWriter, req *http.Request, filename string) (ok bool) {
|
||||
contents, ok := s.styles[filename]
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
res.Header().Add("content-type", "text/css")
|
||||
res.Write([]byte(contents))
|
||||
return ok
|
||||
}
|
||||
|
||||
func rssDatetime(timestamp int64) string {
|
||||
return time.Unix(timestamp, 0).Format("Mon, 02 Jan 2006 15:04:05 MST")
|
||||
}
|
||||
|
||||
func createImage(title, summary, url string, out io.Writer) error {
|
||||
imgWidth, imgHeight, imgPaddingX, imgPaddingY := 1200, 600, 50, 100
|
||||
accentHeight, spacerHeight := 12.5, 20.0
|
||||
titleSize, summarySize, urlSize := 63.0, 42.0, 27.0
|
||||
lineHeight := 1.05
|
||||
textWidth := float64(imgWidth - 2*imgPaddingX)
|
||||
|
||||
draw := gg.NewContext(imgWidth, imgHeight)
|
||||
|
||||
titleFont, err := gg.LoadFontFace("static/fonts/Nunito-Bold.ttf", titleSize)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
summaryFont, err := gg.LoadFontFace("static/fonts/Nunito-LightItalic.ttf", summarySize)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
urlFont, err := gg.LoadFontFace("static/fonts/JetBrainsMono-ExtraLight.ttf", urlSize)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
draw.SetFontFace(titleFont)
|
||||
wrappedTitle := draw.WordWrap(title, textWidth)
|
||||
draw.SetFontFace(summaryFont)
|
||||
wrappedSummary := draw.WordWrap(summary, textWidth)
|
||||
|
||||
draw.SetHexColor("#fff")
|
||||
draw.DrawRectangle(0, 0, float64(imgWidth), float64(imgHeight))
|
||||
draw.Fill()
|
||||
draw.SetHexColor("#3498db")
|
||||
draw.DrawRectangle(0, float64(imgHeight)-accentHeight, float64(imgWidth), accentHeight)
|
||||
draw.Fill()
|
||||
|
||||
offset := float64(imgPaddingY)
|
||||
|
||||
draw.SetFontFace(titleFont)
|
||||
draw.SetHexColor("#333")
|
||||
for _, line := range wrappedTitle {
|
||||
draw.DrawString(line, float64(imgPaddingX), offset)
|
||||
offset += lineHeight * titleSize
|
||||
}
|
||||
offset += spacerHeight
|
||||
|
||||
draw.SetFontFace(summaryFont)
|
||||
draw.SetHexColor("#999")
|
||||
for _, line := range wrappedSummary {
|
||||
draw.DrawString(line, float64(imgPaddingX), offset)
|
||||
offset += lineHeight * summarySize
|
||||
}
|
||||
|
||||
draw.SetHexColor("#333")
|
||||
draw.SetFontFace(urlFont)
|
||||
urlY := float64(imgHeight - imgPaddingY)
|
||||
draw.DrawStringWrapped(url, float64(imgPaddingX), urlY, 0, 0, textWidth, lineHeight, gg.AlignRight)
|
||||
|
||||
return draw.EncodePNG(out)
|
||||
}
|
|
@ -1,99 +0,0 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/bep/godartsass/v2"
|
||||
)
|
||||
|
||||
var sassTranspiler *godartsass.Transpiler
|
||||
|
||||
func newStylesMap() (map[string]string, error) {
|
||||
folder, err := os.ReadDir("styles/")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not load styles directory: %s", err)
|
||||
}
|
||||
|
||||
styles := make(map[string]string)
|
||||
for _, s := range folder {
|
||||
contents, filename, err := loadStylesheet(s.Name())
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not generate styles for %s: %v", s.Name(), err)
|
||||
}
|
||||
styles[filename] = contents
|
||||
log.Printf("Loaded stylesheet %s", filename)
|
||||
}
|
||||
|
||||
return styles, nil
|
||||
}
|
||||
|
||||
func newStylesListener(updateMap func(func(map[string]string))) *listener {
|
||||
ln := &listener{
|
||||
folder: "styles/",
|
||||
update: func(file string) error {
|
||||
contents, filename, err := loadStylesheet(file)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
updateMap(func(styles map[string]string) {
|
||||
styles[filename] = contents
|
||||
})
|
||||
return nil
|
||||
},
|
||||
clean: func(file string) error {
|
||||
updateMap(func(styles map[string]string) {
|
||||
delete(styles, file+".css")
|
||||
})
|
||||
return nil
|
||||
},
|
||||
}
|
||||
return ln
|
||||
}
|
||||
|
||||
func loadStylesheet(filename string) (string, string, error) {
|
||||
if strings.HasSuffix(filename, ".scss") {
|
||||
return loadSCSS(filename)
|
||||
}
|
||||
return loadCSS(filename)
|
||||
}
|
||||
|
||||
func loadSCSS(filename string) (string, string, error) {
|
||||
in, err := os.Open("styles/" + filename)
|
||||
if err != nil {
|
||||
return "", "", fmt.Errorf("could not open stylesheet %s: %w", filename, err)
|
||||
}
|
||||
stylesheet, err := io.ReadAll(in)
|
||||
if err != nil {
|
||||
return "", "", fmt.Errorf("could not read stylesheet %s: %w", filename, err)
|
||||
}
|
||||
if sassTranspiler == nil {
|
||||
sassTranspiler, err = godartsass.Start(godartsass.Options{})
|
||||
if err != nil {
|
||||
return "", "", fmt.Errorf("could not start sass transpiler: %w", err)
|
||||
}
|
||||
}
|
||||
res, err := sassTranspiler.Execute(godartsass.Args{
|
||||
Source: string(stylesheet),
|
||||
})
|
||||
if err != nil {
|
||||
return "", "", fmt.Errorf("could not generate stylesheet %s: %w", filename, err)
|
||||
}
|
||||
return res.CSS, strings.TrimSuffix(filename, ".scss") + ".css", nil
|
||||
}
|
||||
|
||||
func loadCSS(filename string) (string, string, error) {
|
||||
in, err := os.Open("styles/" + filename)
|
||||
if err != nil {
|
||||
return "", "", fmt.Errorf("could not open style infile %s: %w", filename, err)
|
||||
}
|
||||
var buf strings.Builder
|
||||
_, err = io.Copy(&buf, in)
|
||||
if err != nil {
|
||||
return "", "", fmt.Errorf("could not copy stylesheet %s: %s", filename, err)
|
||||
}
|
||||
return buf.String(), filename, nil
|
||||
}
|
|
@ -1,78 +0,0 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/aymerick/raymond"
|
||||
)
|
||||
|
||||
func loadTemplate(file string) (*raymond.Template, error) {
|
||||
tpl, err := raymond.ParseFile("templates/" + file)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not parse %s template: %w", file, err)
|
||||
}
|
||||
tpl.RegisterHelper("datetime", func(timeStr string) string {
|
||||
timestamp, err := strconv.ParseInt(timeStr, 10, 64)
|
||||
if err != nil {
|
||||
log.Printf("Could not parse timestamp '%v', falling back to current time", timeStr)
|
||||
timestamp = time.Now().Unix()
|
||||
}
|
||||
return time.Unix(timestamp, 0).Format("Jan 2 2006, 3:04 PM")
|
||||
})
|
||||
tpl.RegisterHelper("rssDatetime", func(timeStr string) string {
|
||||
timestamp, err := strconv.ParseInt(timeStr, 10, 64)
|
||||
if err != nil {
|
||||
log.Printf("Could not parse timestamp '%v', falling back to current time", timeStr)
|
||||
timestamp = time.Now().Unix()
|
||||
}
|
||||
return rssDatetime(timestamp)
|
||||
})
|
||||
tpl.RegisterHelper("getFullUrl", func(slug string) string {
|
||||
return blogURL + "/" + slug
|
||||
})
|
||||
log.Printf("Loaded template: %s", file)
|
||||
return tpl, nil
|
||||
}
|
||||
|
||||
// loadTemplates, for each f in files, loads `templates/$f`
|
||||
// as a handlebars HTML/XML template. If any single template fails to
|
||||
// load, only an error is returned. Conversely, if there is no error,
|
||||
// every template name passed is guaranteed to have loaded successfully.
|
||||
func loadTemplates(files []string) (map[string]*raymond.Template, error) {
|
||||
templates := make(map[string]*raymond.Template)
|
||||
for _, f := range files {
|
||||
tpl, err := loadTemplate(f)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
templates[f] = tpl
|
||||
}
|
||||
log.Printf("Loaded templates: %s", files)
|
||||
return templates, nil
|
||||
}
|
||||
|
||||
func newTemplateListener(update func(func(map[string]*raymond.Template))) *listener {
|
||||
return &listener{
|
||||
folder: "templates/",
|
||||
update: func(file string) error {
|
||||
newTpl, err := loadTemplate(file)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
update(func(oldMap map[string]*raymond.Template) {
|
||||
oldMap[file] = newTpl
|
||||
})
|
||||
return nil
|
||||
},
|
||||
clean: func(file string) error {
|
||||
update(func(oldMap map[string]*raymond.Template) {
|
||||
delete(oldMap, file)
|
||||
})
|
||||
log.Printf("Unloaded template: %s", file)
|
||||
return nil
|
||||
},
|
||||
}
|
||||
}
|
77
common/common.go
Normal file
77
common/common.go
Normal file
|
@ -0,0 +1,77 @@
|
|||
package common
|
||||
|
||||
import (
|
||||
"io"
|
||||
"time"
|
||||
|
||||
"github.com/fogleman/gg"
|
||||
)
|
||||
|
||||
const (
|
||||
BlogTitle = "Prose"
|
||||
BlogURL = "https://prose.nsood.in"
|
||||
BlogSummary = "Where I infodump in Markdown and nobody can stop me."
|
||||
)
|
||||
|
||||
func RSSDatetime(timestamp int64) string {
|
||||
return time.Unix(timestamp, 0).Format("Mon, 02 Jan 2006 15:04:05 MST")
|
||||
}
|
||||
|
||||
func CreateImage(title, summary, url string, out io.Writer) error {
|
||||
imgWidth, imgHeight, imgPaddingX, imgPaddingY := 1200, 600, 50, 100
|
||||
accentHeight, spacerHeight := 12.5, 20.0
|
||||
titleSize, summarySize, urlSize := 63.0, 42.0, 27.0
|
||||
lineHeight := 1.05
|
||||
textWidth := float64(imgWidth - 2*imgPaddingX)
|
||||
|
||||
draw := gg.NewContext(imgWidth, imgHeight)
|
||||
|
||||
titleFont, err := gg.LoadFontFace("static/fonts/Nunito-Bold.ttf", titleSize)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
summaryFont, err := gg.LoadFontFace("static/fonts/Nunito-LightItalic.ttf", summarySize)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
urlFont, err := gg.LoadFontFace("static/fonts/JetBrainsMono-ExtraLight.ttf", urlSize)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
draw.SetFontFace(titleFont)
|
||||
wrappedTitle := draw.WordWrap(title, textWidth)
|
||||
draw.SetFontFace(summaryFont)
|
||||
wrappedSummary := draw.WordWrap(summary, textWidth)
|
||||
|
||||
draw.SetHexColor("#fff")
|
||||
draw.DrawRectangle(0, 0, float64(imgWidth), float64(imgHeight))
|
||||
draw.Fill()
|
||||
draw.SetHexColor("#3498db")
|
||||
draw.DrawRectangle(0, float64(imgHeight)-accentHeight, float64(imgWidth), accentHeight)
|
||||
draw.Fill()
|
||||
|
||||
offset := float64(imgPaddingY)
|
||||
|
||||
draw.SetFontFace(titleFont)
|
||||
draw.SetHexColor("#333")
|
||||
for _, line := range wrappedTitle {
|
||||
draw.DrawString(line, float64(imgPaddingX), offset)
|
||||
offset += lineHeight * titleSize
|
||||
}
|
||||
offset += spacerHeight
|
||||
|
||||
draw.SetFontFace(summaryFont)
|
||||
draw.SetHexColor("#999")
|
||||
for _, line := range wrappedSummary {
|
||||
draw.DrawString(line, float64(imgPaddingX), offset)
|
||||
offset += lineHeight * summarySize
|
||||
}
|
||||
|
||||
draw.SetHexColor("#333")
|
||||
draw.SetFontFace(urlFont)
|
||||
urlY := float64(imgHeight - imgPaddingY)
|
||||
draw.DrawStringWrapped(url, float64(imgPaddingX), urlY, 0, 0, textWidth, lineHeight, gg.AlignRight)
|
||||
|
||||
return draw.EncodePNG(out)
|
||||
}
|
84
errorcatcher/errorcatcher.go
Normal file
84
errorcatcher/errorcatcher.go
Normal file
|
@ -0,0 +1,84 @@
|
|||
package errorcatcher
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"prose/watcher"
|
||||
"strconv"
|
||||
|
||||
"github.com/aymerick/raymond"
|
||||
)
|
||||
|
||||
// errorCatcher is a wrapper for http.ResponseWriter that
|
||||
// captures 4xx and 5xx status codes and handles them in
|
||||
// a custom manner
|
||||
type errorCatcher struct {
|
||||
r *http.Request
|
||||
w http.ResponseWriter
|
||||
templates watcher.AutoMap[string, *raymond.Template]
|
||||
handled bool
|
||||
}
|
||||
|
||||
func New(w http.ResponseWriter, r *http.Request, templates watcher.AutoMap[string, *raymond.Template]) *errorCatcher {
|
||||
return &errorCatcher{
|
||||
r: r,
|
||||
w: w,
|
||||
templates: templates,
|
||||
}
|
||||
}
|
||||
|
||||
func (ec *errorCatcher) Header() http.Header {
|
||||
return ec.w.Header()
|
||||
}
|
||||
|
||||
func (ec *errorCatcher) Write(buf []byte) (int, error) {
|
||||
// if we have already sent a response, pretend that this was successful
|
||||
if ec.handled {
|
||||
return len(buf), nil
|
||||
}
|
||||
return ec.w.Write(buf)
|
||||
}
|
||||
|
||||
func (ec *errorCatcher) WriteHeader(statusCode int) {
|
||||
if ec.handled {
|
||||
return
|
||||
}
|
||||
|
||||
if statusCode == http.StatusNotFound {
|
||||
tpl, _ := ec.templates.Get("notfound.html")
|
||||
page, err := tpl.Exec(map[string]string{
|
||||
"path": ec.r.URL.Path,
|
||||
})
|
||||
// if we don't have a page to write, return before
|
||||
// we toggle the flag so we fall back to the original
|
||||
// error page
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
ec.w.Header().Set("Content-Type", "text/html; charset=utf-8")
|
||||
ec.w.WriteHeader(statusCode)
|
||||
ec.w.Write([]byte(page))
|
||||
ec.handled = true
|
||||
return
|
||||
}
|
||||
|
||||
if statusCode >= 400 && statusCode < 600 {
|
||||
ctx := map[string]string{
|
||||
"code": strconv.Itoa(statusCode),
|
||||
}
|
||||
tpl, _ := ec.templates.Get("error.html")
|
||||
page, err := tpl.Exec(ctx)
|
||||
// if we don't have a page to write, return before
|
||||
// we toggle the flag so we fall back to the original
|
||||
// error page
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
ec.w.Header().Set("Content-Type", "text/html; charset=utf-8")
|
||||
ec.w.WriteHeader(statusCode)
|
||||
ec.w.Write([]byte(page))
|
||||
ec.handled = true
|
||||
return
|
||||
}
|
||||
|
||||
ec.w.WriteHeader(statusCode)
|
||||
}
|
13
go.mod
13
go.mod
|
@ -1,18 +1,27 @@
|
|||
module prose
|
||||
|
||||
go 1.15
|
||||
go 1.23
|
||||
|
||||
require (
|
||||
github.com/aymerick/raymond v2.0.2+incompatible
|
||||
github.com/bep/godartsass/v2 v2.0.0
|
||||
github.com/fogleman/gg v1.3.0
|
||||
github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 // indirect
|
||||
github.com/mitchellh/mapstructure v1.4.1
|
||||
github.com/rjeczalik/notify v0.9.2
|
||||
github.com/yuin/goldmark v1.3.1
|
||||
github.com/yuin/goldmark-emoji v1.0.1
|
||||
github.com/yuin/goldmark-highlighting v0.0.0-20200307114337-60d527fdb691
|
||||
github.com/yuin/goldmark-meta v1.0.0
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/alecthomas/chroma v0.7.2-0.20200305040604-4f3623dce67a // indirect
|
||||
github.com/cli/safeexec v1.0.1 // indirect
|
||||
github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964 // indirect
|
||||
github.com/dlclark/regexp2 v1.2.0 // indirect
|
||||
github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 // indirect
|
||||
golang.org/x/image v0.0.0-20210628002857-a66eb6448b8d // indirect
|
||||
golang.org/x/sys v0.0.0-20210228012217-479acdf4ea46 // indirect
|
||||
google.golang.org/protobuf v1.30.0 // indirect
|
||||
gopkg.in/yaml.v2 v2.3.0 // indirect
|
||||
)
|
||||
|
|
11
go.sum
11
go.sum
|
@ -18,12 +18,10 @@ github.com/bep/godartsass/v2 v2.0.0 h1:Ruht+BpBWkpmW+yAM2dkp7RSSeN0VLaTobyW0CiSP
|
|||
github.com/bep/godartsass/v2 v2.0.0/go.mod h1:AcP8QgC+OwOXEq6im0WgDRYK7scDsmZCEW62o1prQLo=
|
||||
github.com/cli/safeexec v1.0.1 h1:e/C79PbXF4yYTN/wauC4tviMxEV13BwljGj0N9j+N00=
|
||||
github.com/cli/safeexec v1.0.1/go.mod h1:Z/D4tTN8Vs5gXYHDCbaM1S/anmEDnJb1iW0+EJ5zx3Q=
|
||||
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
||||
github.com/daaku/go.zipexe v1.0.0/go.mod h1:z8IiR6TsVLEYKwXAoE/I+8ys/sDkgTzSL0CLnGVd57E=
|
||||
github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964 h1:y5HC9v93H5EPKqaS1UYVg1uYah5Xf51mBfIoWehClUQ=
|
||||
github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964/go.mod h1:Xd9hchkHSWYkEqJwUGisez3G1QY8Ryz0sdWrLPMGjLk=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/dlclark/regexp2 v1.1.6/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
|
||||
github.com/dlclark/regexp2 v1.2.0 h1:8sAhBGEM0dRWogWqWyQeIJnxjWO6oIjl8FKqREDsGfk=
|
||||
|
@ -44,11 +42,8 @@ github.com/gorilla/mux v1.7.3/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2z
|
|||
github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4=
|
||||
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
|
||||
github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI=
|
||||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||
github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0=
|
||||
github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk=
|
||||
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||
github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
|
||||
|
@ -60,7 +55,6 @@ github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RR
|
|||
github.com/nkovacs/streamquote v0.0.0-20170412213628-49af9bddb229/go.mod h1:0aYXnNPJ8l7uZxf45rWW1a/uME32OF0rhiYGNQ2oF2E=
|
||||
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/rjeczalik/notify v0.9.2 h1:MiTWrPj55mNDHEiIX5YUSKefw/+lCQVoAFmD6oQm5w8=
|
||||
github.com/rjeczalik/notify v0.9.2/go.mod h1:aErll2f0sUX9PXZnVNyeiObbmTlk5jnMoCa4QEjJeqM=
|
||||
|
@ -70,7 +64,6 @@ github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ=
|
|||
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
|
||||
github.com/valyala/fasttemplate v1.0.1/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8=
|
||||
|
@ -97,9 +90,7 @@ golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8T
|
|||
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
|
||||
google.golang.org/protobuf v1.30.0 h1:kPPoIgf3TsEvrm0PFe15JQ+570QVxYzEvvHqChK+cng=
|
||||
google.golang.org/protobuf v1.30.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY=
|
||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
|
||||
gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU=
|
||||
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
|
|
213
postmap/postmap.go
Normal file
213
postmap/postmap.go
Normal file
|
@ -0,0 +1,213 @@
|
|||
package postmap
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"iter"
|
||||
"log"
|
||||
"os"
|
||||
"prose/common"
|
||||
"prose/watcher"
|
||||
"slices"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/aymerick/raymond"
|
||||
"github.com/mitchellh/mapstructure"
|
||||
"github.com/yuin/goldmark"
|
||||
emoji "github.com/yuin/goldmark-emoji"
|
||||
highlighting "github.com/yuin/goldmark-highlighting"
|
||||
meta "github.com/yuin/goldmark-meta"
|
||||
"github.com/yuin/goldmark/extension"
|
||||
"github.com/yuin/goldmark/parser"
|
||||
"github.com/yuin/goldmark/renderer/html"
|
||||
)
|
||||
|
||||
// Metadata stores the data about a post that needs to be visible
|
||||
// at the home page.
|
||||
type Metadata struct {
|
||||
Title string
|
||||
Summary string
|
||||
Time int64 // unix timestamp
|
||||
}
|
||||
|
||||
// Post stores the contents of a blog post.
|
||||
type Post struct {
|
||||
Slug string
|
||||
Metadata Metadata
|
||||
Contents string
|
||||
Image []byte
|
||||
}
|
||||
|
||||
func (p *Post) Render(tpl *raymond.Template) (string, error) {
|
||||
return tpl.Exec(p)
|
||||
}
|
||||
|
||||
func (p *Post) String() string {
|
||||
return p.Slug
|
||||
}
|
||||
|
||||
func postLess(a, b *Post) int {
|
||||
return int(b.Metadata.Time - a.Metadata.Time)
|
||||
}
|
||||
|
||||
// postList stores Posts in reverse order of Post.Metadata.Time.
|
||||
type postList struct {
|
||||
mu sync.RWMutex
|
||||
posts []*Post
|
||||
md goldmark.Markdown
|
||||
}
|
||||
|
||||
// New returns a new postList.
|
||||
func New() (watcher.OrderedAutoMap[string, *Post], error) {
|
||||
files, err := os.ReadDir("posts/")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
pl := &postList{
|
||||
posts: make([]*Post, 0, len(files)),
|
||||
md: goldmark.New(
|
||||
goldmark.WithExtensions(
|
||||
extension.Linkify,
|
||||
extension.Strikethrough,
|
||||
extension.Typographer,
|
||||
extension.Footnote,
|
||||
meta.Meta,
|
||||
highlighting.Highlighting,
|
||||
emoji.New(emoji.WithRenderingMethod(emoji.Unicode)),
|
||||
),
|
||||
goldmark.WithRendererOptions(
|
||||
html.WithUnsafe(),
|
||||
),
|
||||
),
|
||||
}
|
||||
for _, f := range files {
|
||||
filename := f.Name()
|
||||
|
||||
if filename == ".gitignore" {
|
||||
continue
|
||||
}
|
||||
|
||||
err := pl.fetchLocked(filename)
|
||||
if err != nil {
|
||||
log.Printf("could not render %q, skipping: %v", filename, err)
|
||||
} else {
|
||||
log.Printf("loaded post: %q", filename)
|
||||
}
|
||||
}
|
||||
|
||||
go watcher.Watch("posts/", pl)
|
||||
|
||||
return pl, nil
|
||||
}
|
||||
|
||||
func (pl *postList) newPost(filename string) (*Post, error) {
|
||||
slug, ok := strings.CutSuffix(filename, ".md")
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("unknown file extension in posts directory: %q", filename)
|
||||
}
|
||||
data, err := os.ReadFile("posts/" + filename)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not read file: %w", err)
|
||||
}
|
||||
|
||||
var converted bytes.Buffer
|
||||
ctx := parser.NewContext()
|
||||
err = pl.md.Convert(data, &converted, parser.WithContext(ctx))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not parse markdown: %w", err)
|
||||
}
|
||||
mdMap, err := meta.TryGet(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not parse metadata: %w", err)
|
||||
}
|
||||
var metadata Metadata
|
||||
err = mapstructure.Decode(mdMap, &metadata)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not destructure metadata: %w", err)
|
||||
}
|
||||
|
||||
post := &Post{
|
||||
Slug: slug,
|
||||
Metadata: metadata,
|
||||
Contents: converted.String(),
|
||||
}
|
||||
|
||||
url := common.BlogURL + "/" + slug
|
||||
var buf bytes.Buffer
|
||||
err = common.CreateImage(post.Metadata.Title, post.Metadata.Summary, url, &buf)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not create post image: %w", err)
|
||||
}
|
||||
post.Image, err = io.ReadAll(&buf)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return post, nil
|
||||
}
|
||||
|
||||
func (pl *postList) Get(slug string) (*Post, bool) {
|
||||
pl.mu.RLock()
|
||||
defer pl.mu.RUnlock()
|
||||
for _, p := range pl.posts {
|
||||
if p.Slug == slug {
|
||||
return p, true
|
||||
}
|
||||
}
|
||||
return nil, false
|
||||
}
|
||||
|
||||
func (pl *postList) Fetch(filename string) error {
|
||||
pl.mu.Lock()
|
||||
defer pl.mu.Unlock()
|
||||
return pl.fetchLocked(filename)
|
||||
}
|
||||
|
||||
func (pl *postList) fetchLocked(filename string) error {
|
||||
p, err := pl.newPost(filename)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer slices.SortFunc(pl.posts, postLess)
|
||||
for i, post := range pl.posts {
|
||||
if post.Slug == p.Slug {
|
||||
pl.posts[i] = p
|
||||
return nil
|
||||
}
|
||||
}
|
||||
pl.posts = append(pl.posts, p)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (pl *postList) Delete(filename string) error {
|
||||
pl.mu.Lock()
|
||||
defer pl.mu.Unlock()
|
||||
slug, ok := strings.CutSuffix(filename, ".md")
|
||||
if !ok {
|
||||
return fmt.Errorf("unknown file extension in posts directory: %q", filename)
|
||||
}
|
||||
for i, post := range pl.posts {
|
||||
if post.Slug == slug {
|
||||
pl.posts = append(pl.posts[:i], pl.posts[i+1:]...)
|
||||
break
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// All implements watcher.OrderedCompMap[string, *Post], providing Posts in
|
||||
// order of most recent first.
|
||||
func (pl *postList) All() iter.Seq2[string, *Post] {
|
||||
return func(yield func(string, *Post) bool) {
|
||||
pl.mu.RLock()
|
||||
defer pl.mu.RUnlock()
|
||||
for _, p := range pl.posts {
|
||||
if !yield(p.Slug, p) {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
201
server/server.go
Normal file
201
server/server.go
Normal file
|
@ -0,0 +1,201 @@
|
|||
package server
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"log"
|
||||
"net/http"
|
||||
"prose/common"
|
||||
"prose/errorcatcher"
|
||||
"prose/postmap"
|
||||
"prose/stylemap"
|
||||
"prose/tplmap"
|
||||
"prose/watcher"
|
||||
"strings"
|
||||
|
||||
"github.com/aymerick/raymond"
|
||||
)
|
||||
|
||||
type server struct {
|
||||
staticHandler http.Handler
|
||||
|
||||
templates watcher.AutoMap[string, *raymond.Template]
|
||||
posts watcher.OrderedAutoMap[string, *postmap.Post]
|
||||
styles watcher.AutoMap[string, string]
|
||||
homeImage []byte
|
||||
}
|
||||
|
||||
func New() error {
|
||||
s := &server{
|
||||
staticHandler: http.FileServer(http.Dir("static/")),
|
||||
}
|
||||
|
||||
var imgBuffer bytes.Buffer
|
||||
err := common.CreateImage(common.BlogTitle, common.BlogSummary, common.BlogURL, &imgBuffer)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
s.homeImage = imgBuffer.Bytes()
|
||||
|
||||
s.posts, err = postmap.New()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
s.templates, err = tplmap.New()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
s.styles, err = stylemap.New()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
handlers := map[string]http.HandlerFunc{
|
||||
"GET /{$}": s.serveGetHome,
|
||||
"GET /banner.png": s.serveGetImage(s.homeImage),
|
||||
"GET /rss.xml": s.serveGetRSS,
|
||||
"GET /{slug}": s.serveGetPost,
|
||||
"GET /img/banner/{img}": s.serveGetPostImage,
|
||||
"GET /css/{filename}": s.serveGetStylesheet,
|
||||
"GET /": s.staticHandler.ServeHTTP,
|
||||
}
|
||||
|
||||
for pattern, handler := range handlers {
|
||||
http.Handle(pattern, http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w = errorcatcher.New(w, r, s.templates)
|
||||
handler(w, r)
|
||||
}))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *server) errorInRequest(res http.ResponseWriter, req *http.Request, err error) {
|
||||
res.WriteHeader(http.StatusInternalServerError)
|
||||
log.Printf("ERR %s: %s", req.URL.Path, err)
|
||||
}
|
||||
|
||||
func (s *server) createWebPage(title, subtitle, contents, banner string) (string, error) {
|
||||
ctx := map[string]interface{}{
|
||||
"title": title,
|
||||
"subtitle": subtitle,
|
||||
"contents": contents,
|
||||
"banner": banner,
|
||||
}
|
||||
tpl, _ := s.templates.Get("page.html")
|
||||
return tpl.Exec(ctx)
|
||||
}
|
||||
|
||||
func (s *server) serveGetPost(w http.ResponseWriter, r *http.Request) {
|
||||
p, ok := s.posts.Get(r.PathValue("slug"))
|
||||
if !ok {
|
||||
w.WriteHeader(http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
w.Header().Add("content-type", "text/html; charset=utf-8")
|
||||
tpl, _ := s.templates.Get("fullpost.html")
|
||||
contents, err := p.Render(tpl)
|
||||
if err != nil {
|
||||
s.errorInRequest(w, r, err)
|
||||
return
|
||||
}
|
||||
page, err := s.createWebPage(p.Metadata.Title, p.Metadata.Summary, contents, "/img/banner/"+p.Slug+".png")
|
||||
if err != nil {
|
||||
s.errorInRequest(w, r, err)
|
||||
return
|
||||
}
|
||||
w.Write([]byte(page))
|
||||
}
|
||||
|
||||
func (s *server) serveGetHome(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Add("content-type", "text/html; charset=utf-8")
|
||||
|
||||
var posts string
|
||||
|
||||
summaryTpl, _ := s.templates.Get("summary.html")
|
||||
for _, p := range s.posts.All() {
|
||||
summary, err := p.Render(summaryTpl)
|
||||
if err != nil {
|
||||
log.Printf("could not render post summary for %s", p.Slug)
|
||||
}
|
||||
posts = posts + summary
|
||||
}
|
||||
|
||||
page, err := s.createWebPage("Home", common.BlogSummary, posts, "/banner.png")
|
||||
|
||||
if err != nil {
|
||||
s.errorInRequest(w, r, err)
|
||||
return
|
||||
}
|
||||
|
||||
w.Write([]byte(page))
|
||||
}
|
||||
|
||||
func (s *server) serveGetPostImage(w http.ResponseWriter, r *http.Request) {
|
||||
slug, ok := strings.CutSuffix(r.PathValue("img"), ".png")
|
||||
if !ok {
|
||||
w.WriteHeader(http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
post, ok := s.posts.Get(slug)
|
||||
if !ok {
|
||||
w.WriteHeader(http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
s.serveGetImage(post.Image)(w, r)
|
||||
}
|
||||
|
||||
func (s *server) serveGetImage(img []byte) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Add("content-type", "image/png")
|
||||
w.Write(img)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *server) serveGetRSS(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Add("content-type", "text/xml; charset=utf-8")
|
||||
|
||||
var posts, pubDate string
|
||||
|
||||
rssItemTpl, _ := s.templates.Get("rss-item.xml")
|
||||
for _, p := range s.posts.All() {
|
||||
if pubDate != "" {
|
||||
pubDate = common.RSSDatetime(p.Metadata.Time)
|
||||
}
|
||||
summary, err := p.Render(rssItemTpl)
|
||||
if err != nil {
|
||||
log.Printf("could not render post summary for %s", p.Slug)
|
||||
}
|
||||
posts = posts + summary
|
||||
}
|
||||
if pubDate == "" {
|
||||
pubDate = common.RSSDatetime(0)
|
||||
}
|
||||
|
||||
rssPageTpl, _ := s.templates.Get("rss-channel.xml")
|
||||
page, err := rssPageTpl.Exec(map[string]string{
|
||||
"title": common.BlogTitle,
|
||||
"description": common.BlogSummary,
|
||||
"link": common.BlogURL,
|
||||
"pubDate": pubDate,
|
||||
"items": posts,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
s.errorInRequest(w, r, err)
|
||||
return
|
||||
}
|
||||
|
||||
w.Write([]byte(page))
|
||||
}
|
||||
|
||||
func (s *server) serveGetStylesheet(w http.ResponseWriter, r *http.Request) {
|
||||
contents, ok := s.styles.Get(r.PathValue("filename"))
|
||||
if !ok {
|
||||
w.WriteHeader(http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
w.Header().Add("content-type", "text/css")
|
||||
w.Write([]byte(contents))
|
||||
}
|
117
stylemap/stylemap.go
Normal file
117
stylemap/stylemap.go
Normal file
|
@ -0,0 +1,117 @@
|
|||
package stylemap
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"prose/watcher"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/bep/godartsass/v2"
|
||||
)
|
||||
|
||||
type styleMap struct {
|
||||
mu sync.RWMutex
|
||||
stylesheets map[string]string
|
||||
transpiler *godartsass.Transpiler
|
||||
}
|
||||
|
||||
// New returns a new styleMap.
|
||||
func New() (watcher.AutoMap[string, string], error) {
|
||||
folder, err := os.ReadDir("styles/")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not load styles directory: %w", err)
|
||||
}
|
||||
|
||||
sassTranspiler, err := godartsass.Start(godartsass.Options{})
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not start sass transpiler: %w", err)
|
||||
}
|
||||
|
||||
sm := &styleMap{
|
||||
stylesheets: make(map[string]string),
|
||||
transpiler: sassTranspiler,
|
||||
}
|
||||
|
||||
for _, s := range folder {
|
||||
err := sm.fetchLocked(s.Name())
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not generate styles for %s: %w", s.Name(), err)
|
||||
}
|
||||
log.Printf("loaded stylesheet %q", s.Name())
|
||||
}
|
||||
|
||||
go watcher.Watch("styles/", sm)
|
||||
|
||||
return sm, nil
|
||||
}
|
||||
|
||||
func (sm *styleMap) Get(filename string) (string, bool) {
|
||||
sm.mu.RLock()
|
||||
defer sm.mu.RUnlock()
|
||||
contents, ok := sm.stylesheets[filename]
|
||||
return contents, ok
|
||||
}
|
||||
|
||||
func (sm *styleMap) Delete(filename string) error {
|
||||
sm.mu.Lock()
|
||||
defer sm.mu.Unlock()
|
||||
if s, ok := strings.CutSuffix(filename, ".scss"); ok {
|
||||
filename = s + ".css"
|
||||
}
|
||||
delete(sm.stylesheets, filename)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (sm *styleMap) Fetch(filename string) (err error) {
|
||||
sm.mu.Lock()
|
||||
defer sm.mu.Unlock()
|
||||
return sm.fetchLocked(filename)
|
||||
}
|
||||
|
||||
func (sm *styleMap) fetchLocked(filename string) (err error) {
|
||||
var outfile, contents string
|
||||
if strings.HasSuffix(filename, ".scss") {
|
||||
outfile, contents, err = sm.loadSCSS(filename)
|
||||
} else {
|
||||
outfile, contents, err = sm.loadCSS(filename)
|
||||
}
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
sm.stylesheets[outfile] = contents
|
||||
return
|
||||
}
|
||||
|
||||
func (sm *styleMap) loadSCSS(filename string) (string, string, error) {
|
||||
in, err := os.Open("styles/" + filename)
|
||||
if err != nil {
|
||||
return "", "", fmt.Errorf("could not open stylesheet %s: %w", filename, err)
|
||||
}
|
||||
stylesheet, err := io.ReadAll(in)
|
||||
if err != nil {
|
||||
return "", "", fmt.Errorf("could not read stylesheet %s: %w", filename, err)
|
||||
}
|
||||
res, err := sm.transpiler.Execute(godartsass.Args{
|
||||
Source: string(stylesheet),
|
||||
})
|
||||
if err != nil {
|
||||
return "", "", fmt.Errorf("could not generate stylesheet %s: %w", filename, err)
|
||||
}
|
||||
return strings.TrimSuffix(filename, ".scss") + ".css", res.CSS, nil
|
||||
}
|
||||
|
||||
func (sm *styleMap) loadCSS(filename string) (string, string, error) {
|
||||
in, err := os.Open("styles/" + filename)
|
||||
if err != nil {
|
||||
return "", "", fmt.Errorf("could not open style infile %s: %w", filename, err)
|
||||
}
|
||||
var buf strings.Builder
|
||||
_, err = io.Copy(&buf, in)
|
||||
if err != nil {
|
||||
return "", "", fmt.Errorf("could not copy stylesheet %s: %w", filename, err)
|
||||
}
|
||||
return filename, buf.String(), nil
|
||||
}
|
|
@ -8,7 +8,7 @@
|
|||
<meta property="og:type" content="website">
|
||||
<meta property="og:title" content="{{title}} – Prose">
|
||||
<meta property="og:description" content="{{subtitle}}">
|
||||
<meta property="og:image" content="{{path}}/about.png">
|
||||
<meta property="og:image" content="{{banner}}">
|
||||
<meta name="twitter:creator" content="@tendstofortytwo">
|
||||
<meta name="twitter:card" content="summary_large_image">
|
||||
<meta name="color-scheme" content="dark light">
|
||||
|
|
90
tplmap/tplmap.go
Normal file
90
tplmap/tplmap.go
Normal file
|
@ -0,0 +1,90 @@
|
|||
package tplmap
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"prose/common"
|
||||
"prose/watcher"
|
||||
"strconv"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/aymerick/raymond"
|
||||
)
|
||||
|
||||
type tplMap struct {
|
||||
mu sync.RWMutex
|
||||
templates map[string]*raymond.Template
|
||||
}
|
||||
|
||||
func New() (watcher.AutoMap[string, *raymond.Template], error) {
|
||||
folder, err := os.ReadDir("templates/")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not load templates directory: %w", err)
|
||||
}
|
||||
|
||||
tm := &tplMap{
|
||||
templates: make(map[string]*raymond.Template),
|
||||
}
|
||||
|
||||
for _, s := range folder {
|
||||
err := tm.fetchLocked(s.Name())
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not load template %q: %w", s.Name(), err)
|
||||
}
|
||||
log.Printf("loaded template %q", s.Name())
|
||||
}
|
||||
|
||||
go watcher.Watch("templates/", tm)
|
||||
|
||||
return tm, nil
|
||||
}
|
||||
|
||||
func (tm *tplMap) Get(filename string) (*raymond.Template, bool) {
|
||||
tm.mu.RLock()
|
||||
defer tm.mu.RUnlock()
|
||||
got, ok := tm.templates[filename]
|
||||
return got, ok
|
||||
}
|
||||
|
||||
func (tm *tplMap) Delete(filename string) error {
|
||||
tm.mu.Lock()
|
||||
defer tm.mu.Unlock()
|
||||
delete(tm.templates, filename)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (tm *tplMap) Fetch(filename string) error {
|
||||
tm.mu.Lock()
|
||||
defer tm.mu.Unlock()
|
||||
return tm.fetchLocked(filename)
|
||||
}
|
||||
|
||||
func (tm *tplMap) fetchLocked(filename string) error {
|
||||
tpl, err := raymond.ParseFile("templates/" + filename)
|
||||
if err != nil {
|
||||
return fmt.Errorf("could not parse template %q: %w", filename, err)
|
||||
}
|
||||
tpl.RegisterHelper("datetime", func(timeStr string) string {
|
||||
timestamp, err := strconv.ParseInt(timeStr, 10, 64)
|
||||
if err != nil {
|
||||
log.Printf("Could not parse timestamp '%v', falling back to current time", timeStr)
|
||||
timestamp = time.Now().Unix()
|
||||
}
|
||||
return time.Unix(timestamp, 0).Format("Jan 2 2006, 3:04 PM")
|
||||
})
|
||||
tpl.RegisterHelper("rssDatetime", func(timeStr string) string {
|
||||
timestamp, err := strconv.ParseInt(timeStr, 10, 64)
|
||||
if err != nil {
|
||||
log.Printf("Could not parse timestamp '%v', falling back to current time", timeStr)
|
||||
timestamp = time.Now().Unix()
|
||||
}
|
||||
return common.RSSDatetime(timestamp)
|
||||
})
|
||||
tpl.RegisterHelper("getFullUrl", func(slug string) string {
|
||||
return common.BlogURL + "/" + slug
|
||||
})
|
||||
tm.templates[filename] = tpl
|
||||
return nil
|
||||
}
|
119
watcher/watcher.go
Normal file
119
watcher/watcher.go
Normal file
|
@ -0,0 +1,119 @@
|
|||
package watcher
|
||||
|
||||
import (
|
||||
"iter"
|
||||
"log"
|
||||
"os"
|
||||
"runtime"
|
||||
"strings"
|
||||
|
||||
"github.com/rjeczalik/notify"
|
||||
)
|
||||
|
||||
type WatchEventKind int
|
||||
|
||||
const (
|
||||
Update WatchEventKind = iota
|
||||
Clean
|
||||
)
|
||||
|
||||
// WatchEvent represents a change to a watched folder. It notes which file
|
||||
// changed and what change happened to it.
|
||||
type WatchEvent struct {
|
||||
File string
|
||||
Kind WatchEventKind
|
||||
}
|
||||
|
||||
func updated(f string) *WatchEvent {
|
||||
return &WatchEvent{
|
||||
File: f,
|
||||
Kind: Update,
|
||||
}
|
||||
}
|
||||
|
||||
func cleaned(f string) *WatchEvent {
|
||||
return &WatchEvent{
|
||||
File: f,
|
||||
Kind: Clean,
|
||||
}
|
||||
}
|
||||
|
||||
// Watcher classifies notify.Events into updates and deletes, and calls the
|
||||
// respective functions for a file when those events happen to that file.
|
||||
func Watch(folder string, up Updater[string]) {
|
||||
cwd, err := os.Getwd()
|
||||
if err != nil {
|
||||
log.Fatal("could not get current working directory for listener!")
|
||||
}
|
||||
cwd = cwd + "/"
|
||||
|
||||
c := make(chan notify.EventInfo, 1)
|
||||
|
||||
var events []notify.Event
|
||||
|
||||
// inotify events prevent double-firing of
|
||||
// certain events in Linux.
|
||||
if runtime.GOOS == "linux" {
|
||||
events = []notify.Event{
|
||||
notify.InCloseWrite,
|
||||
notify.InMovedFrom,
|
||||
notify.InMovedTo,
|
||||
notify.InDelete,
|
||||
}
|
||||
} else {
|
||||
events = []notify.Event{
|
||||
notify.Create,
|
||||
notify.Remove,
|
||||
notify.Rename,
|
||||
notify.Write,
|
||||
}
|
||||
}
|
||||
|
||||
err = notify.Watch(folder, c, events...)
|
||||
|
||||
if err != nil {
|
||||
log.Fatalf("could not setup watcher for folder %s: %s", folder, err)
|
||||
}
|
||||
|
||||
defer notify.Stop(c)
|
||||
for {
|
||||
ei := <-c
|
||||
log.Printf("event: %s", ei.Event())
|
||||
switch ei.Event() {
|
||||
case notify.InCloseWrite, notify.InMovedTo, notify.Create, notify.Rename, notify.Write:
|
||||
filePath := strings.TrimPrefix(ei.Path(), cwd)
|
||||
log.Printf("updating file %s", filePath)
|
||||
err := up.Fetch(strings.TrimPrefix(filePath, folder))
|
||||
if err != nil {
|
||||
log.Printf("up.Fetch(%q): %v", filePath, err)
|
||||
}
|
||||
case notify.InMovedFrom, notify.InDelete, notify.Remove:
|
||||
filePath := strings.TrimPrefix(ei.Path(), cwd)
|
||||
log.Printf("cleaning file %s", filePath)
|
||||
err := up.Delete(strings.TrimPrefix(filePath, folder))
|
||||
if err != nil {
|
||||
log.Printf("up.Delete(%q): %v", filePath, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Updater is a key-value store which can be informed when to recompute values
|
||||
// for a particular key. Updaters are normally also AutoMaps.
|
||||
type Updater[K any] interface {
|
||||
Fetch(K) error
|
||||
Delete(K) error
|
||||
}
|
||||
|
||||
// AutoMap is a key-value store where the values are automatically computed by
|
||||
// the store itself, based on the key.
|
||||
type AutoMap[K, V any] interface {
|
||||
Get(K) (V, bool)
|
||||
}
|
||||
|
||||
// OrderedAutoMap is an AutoMap that provides an iterator over its
|
||||
// currently-existing keys in a known order.
|
||||
type OrderedAutoMap[K, V any] interface {
|
||||
AutoMap[K, V]
|
||||
All() iter.Seq2[K, V]
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue