repos / pico

pico services mono repo
git clone https://github.com/picosh/pico.git

commit
87407ab
parent
0c80ce2
author
Eric Bower
date
2025-01-14 10:12:22 -0500 EST
feat(prose): experimental ssg

Instead of having a web server that serves our blogs on prose.sh, we can
generate the sites and publish them to pgs.sh

This has some benefits:
- HTTP caching
- No need for a web server
- No need for an SSH server

Everything should work the exact same with the one exception for how
users upload their markdown files.

```
rsync *.md prose.sh:/
rsync *.md pgs.sh:/prose
```

Then we have a pipe `prose-drain` that we will use to listen for new
files being uploaded and generate the HTML after that.
21 files changed,  +1082, -32
M go.mod
M go.sum
A cmd/scripts/prose-ssg/main.go
+29, -0
 1@@ -0,0 +1,29 @@
 2+package main
 3+
 4+import (
 5+	"github.com/picosh/pico/db/postgres"
 6+	"github.com/picosh/pico/prose"
 7+	"github.com/picosh/pico/shared/storage"
 8+)
 9+
10+func bail(err error) {
11+	if err != nil {
12+		panic(err)
13+	}
14+}
15+
16+func main() {
17+	cfg := prose.NewConfigSite()
18+	picoDb := postgres.NewDB(cfg.DbURL, cfg.Logger)
19+	st, err := storage.NewStorageFS(cfg.Logger, cfg.StorageDir)
20+	bail(err)
21+	ssg := &prose.SSG{
22+		Cfg:       cfg,
23+		DB:        picoDb,
24+		Storage:   st,
25+		Logger:    cfg.Logger,
26+		TmplDir:   "./prose/html",
27+		StaticDir: "./prose/public",
28+	}
29+	bail(ssg.Prose())
30+}
M db/db.go
+1, -0
1@@ -381,6 +381,7 @@ type DB interface {
2 	FindPostsByTag(pager *Pager, tag, space string) (*Paginate[*Post], error)
3 	FindPopularTags(space string) ([]string, error)
4 	FindTagsForPost(postID string) ([]string, error)
5+	FindTagsForUser(userID string, space string) ([]string, error)
6 
7 	ReplaceAliasesForPost(aliases []string, postID string) error
8 
M db/postgres/storage.go
+28, -0
 1@@ -219,6 +219,12 @@ const (
 2 	GROUP BY name
 3 	ORDER BY tally DESC
 4 	LIMIT 5`
 5+	sqlSelectTagsForUser = `
 6+	SELECT name
 7+	FROM post_tags
 8+	LEFT JOIN posts ON posts.id = post_id
 9+	WHERE posts.user_id = $1 AND posts.cur_space = $2
10+	GROUP BY name`
11 	sqlSelectTagsForPost     = `SELECT name FROM post_tags WHERE post_id=$1`
12 	sqlSelectFeedItemsByPost = `SELECT id, post_id, guid, data, created_at FROM feed_items WHERE post_id=$1`
13 
14@@ -1300,6 +1306,7 @@ func (me *PsqlDB) insertAliasesForPost(tx *sql.Tx, aliases []string, postID stri
15 	denyList := []string{
16 		"rss",
17 		"rss.xml",
18+		"rss.atom",
19 		"atom.xml",
20 		"feed.xml",
21 		"smol.css",
22@@ -1435,6 +1442,27 @@ func (me *PsqlDB) FindPopularTags(space string) ([]string, error) {
23 	return tags, nil
24 }
25 
26+func (me *PsqlDB) FindTagsForUser(userID string, space string) ([]string, error) {
27+	tags := []string{}
28+	rs, err := me.Db.Query(sqlSelectTagsForUser, userID, space)
29+	if err != nil {
30+		return tags, err
31+	}
32+	for rs.Next() {
33+		name := ""
34+		err := rs.Scan(&name)
35+		if err != nil {
36+			return tags, err
37+		}
38+
39+		tags = append(tags, name)
40+	}
41+	if rs.Err() != nil {
42+		return tags, rs.Err()
43+	}
44+	return tags, nil
45+}
46+
47 func (me *PsqlDB) FindTagsForPost(postID string) ([]string, error) {
48 	tags := make([]string, 0)
49 	rs, err := me.Db.Query(sqlSelectTagsForPost, postID)
M db/stub/stub.go
+4, -0
1@@ -284,3 +284,7 @@ func (me *StubDB) RemoveFeature(userID string, name string) error {
2 func (me *StubDB) AddPicoPlusUser(username, email, paymentType, txId string) error {
3 	return notImpl
4 }
5+
6+func (me *StubDB) FindTagsForUser(userID string, tag string) ([]string, error) {
7+	return []string{}, notImpl
8+}
M feeds/scp_hooks.go
+4, -0
1@@ -81,3 +81,7 @@ func (p *FeedHooks) FileMeta(s ssh.Session, data *filehandlers.PostMetaData) err
2 
3 	return nil
4 }
5+
6+func (p *FeedHooks) FileSuccess(s ssh.Session, data *filehandlers.SuccesHook) error {
7+	return nil
8+}
M filehandlers/imgs/handler.go
+28, -2
 1@@ -2,6 +2,7 @@ package uploadimgs
 2 
 3 import (
 4 	"encoding/binary"
 5+	"encoding/json"
 6 	"fmt"
 7 	"io"
 8 	"net/http"
 9@@ -14,11 +15,13 @@ import (
10 	"github.com/charmbracelet/ssh"
11 	exifremove "github.com/neurosnap/go-exif-remove"
12 	"github.com/picosh/pico/db"
13+	"github.com/picosh/pico/filehandlers"
14 	"github.com/picosh/pico/shared"
15 	"github.com/picosh/pico/shared/storage"
16 	"github.com/picosh/pobj"
17 	sendutils "github.com/picosh/send/utils"
18 	"github.com/picosh/utils"
19+	pipeUtil "github.com/picosh/utils/pipe"
20 )
21 
22 var Space = "imgs"
23@@ -37,13 +40,15 @@ type UploadImgHandler struct {
24 	DBPool  db.DB
25 	Cfg     *shared.ConfigSite
26 	Storage storage.StorageServe
27+	Pipe    *pipeUtil.ReconnectReadWriteCloser
28 }
29 
30-func NewUploadImgHandler(dbpool db.DB, cfg *shared.ConfigSite, storage storage.StorageServe) *UploadImgHandler {
31+func NewUploadImgHandler(dbpool db.DB, cfg *shared.ConfigSite, storage storage.StorageServe, pipeClient *pipeUtil.ReconnectReadWriteCloser) *UploadImgHandler {
32 	return &UploadImgHandler{
33 		DBPool:  dbpool,
34 		Cfg:     cfg,
35 		Storage: storage,
36+		Pipe:    pipeClient,
37 	}
38 }
39 
40@@ -86,6 +91,16 @@ func (h *UploadImgHandler) Read(s ssh.Session, entry *sendutils.FileEntry) (os.F
41 	return fileInfo, reader, nil
42 }
43 
44+func (h *UploadImgHandler) Success(s ssh.Session, data *filehandlers.SuccesHook) error {
45+	out, err := json.Marshal(data)
46+	if err != nil {
47+		return err
48+	}
49+	out = append(out, '\n')
50+	_, err = h.Pipe.Write(out)
51+	return err
52+}
53+
54 func (h *UploadImgHandler) Write(s ssh.Session, entry *sendutils.FileEntry) (string, error) {
55 	logger := h.Cfg.Logger
56 	user, err := h.DBPool.FindUser(s.Permissions().Extensions["user_id"])
57@@ -171,6 +186,12 @@ func (h *UploadImgHandler) Write(s ssh.Session, entry *sendutils.FileEntry) (str
58 		return "", err
59 	}
60 
61+	_ = h.Success(s, &filehandlers.SuccesHook{
62+		UserID:   user.ID,
63+		Action:   "create",
64+		Filename: metadata.Filename,
65+	})
66+
67 	curl := shared.NewCreateURL(h.Cfg)
68 	url := h.Cfg.FullPostURL(
69 		curl,
70@@ -223,12 +244,17 @@ func (h *UploadImgHandler) Delete(s ssh.Session, entry *sendutils.FileEntry) err
71 		return err
72 	}
73 
74+	logger.Info("deleting image")
75 	err = h.Storage.DeleteObject(bucket, filename)
76 	if err != nil {
77 		return err
78 	}
79 
80-	logger.Info("deleting image")
81+	_ = h.Success(s, &filehandlers.SuccesHook{
82+		UserID:   user.ID,
83+		Action:   "delete",
84+		Filename: filename,
85+	})
86 
87 	return nil
88 }
M filehandlers/post_handler.go
+23, -1
 1@@ -27,9 +27,17 @@ type PostMetaData struct {
 2 	Aliases   []string
 3 }
 4 
 5+type SuccesHook struct {
 6+	UserID   string `json:"user_id"`
 7+	PostID   string `json:"post_id"`
 8+	Action   string `json:"action"`
 9+	Filename string `json:"filename"`
10+}
11+
12 type ScpFileHooks interface {
13 	FileValidate(s ssh.Session, data *PostMetaData) (bool, error)
14 	FileMeta(s ssh.Session, data *PostMetaData) error
15+	FileSuccess(s ssh.Session, data *SuccesHook) error
16 }
17 
18 type ScpUploadHandler struct {
19@@ -155,8 +163,9 @@ func (h *ScpUploadHandler) Write(s ssh.Session, entry *sendutils.FileEntry) (str
20 		modTime = time.Unix(entry.Mtime, 0)
21 	}
22 
23-	// if the file is empty we remove it from our database
24+	action := ""
25 	if post == nil {
26+		action = "create"
27 		logger.Info("file not found, adding record")
28 		insertPost := db.Post{
29 			UserID: userID,
30@@ -213,6 +222,7 @@ func (h *ScpUploadHandler) Write(s ssh.Session, entry *sendutils.FileEntry) (str
31 			return h.Cfg.FullPostURL(curl, user.Name, metadata.Slug), nil
32 		}
33 
34+		action = "update"
35 		logger.Info("file found, updating record")
36 
37 		updatePost := db.Post{
38@@ -257,6 +267,12 @@ func (h *ScpUploadHandler) Write(s ssh.Session, entry *sendutils.FileEntry) (str
39 		}
40 	}
41 
42+	_ = h.Hooks.FileSuccess(s, &SuccesHook{
43+		UserID:   user.ID,
44+		PostID:   post.ID,
45+		Action:   action,
46+		Filename: metadata.Filename,
47+	})
48 	curl := shared.NewCreateURL(h.Cfg)
49 	return h.Cfg.FullPostURL(curl, user.Name, metadata.Slug), nil
50 }
51@@ -291,5 +307,11 @@ func (h *ScpUploadHandler) Delete(s ssh.Session, entry *sendutils.FileEntry) err
52 		logger.Error("post could not remove", "err", err.Error())
53 		return fmt.Errorf("error for %s: %v", filename, err)
54 	}
55+	_ = h.Hooks.FileSuccess(s, &SuccesHook{
56+		UserID:   user.ID,
57+		PostID:   post.ID,
58+		Action:   "delete",
59+		Filename: filename,
60+	})
61 	return nil
62 }
M go.mod
+1, -1
1@@ -48,7 +48,7 @@ require (
2 	github.com/muesli/reflow v0.3.0
3 	github.com/muesli/termenv v0.15.3-0.20240912151726-82936c5ea257
4 	github.com/neurosnap/go-exif-remove v0.0.0-20221010134343-50d1e3c35577
5-	github.com/picosh/pobj v0.0.0-20241016194248-c39198b2ff23
6+	github.com/picosh/pobj v0.0.0-20250115045405-73c816ed76c2
7 	github.com/picosh/pubsub v0.0.0-20241114191831-ec8f16c0eb88
8 	github.com/picosh/send v0.0.0-20241218031305-056b1fe8ff80
9 	github.com/picosh/tunkit v0.0.0-20240905223921-532404cef9d9
M go.sum
+2, -2
 1@@ -711,8 +711,8 @@ github.com/philhofer/fwd v1.1.3-0.20240916144458-20a13a1f6b7c h1:dAMKvw0MlJT1Gsh
 2 github.com/philhofer/fwd v1.1.3-0.20240916144458-20a13a1f6b7c/go.mod h1:RqIHx9QI14HlwKwm98g9Re5prTQ6LdeRQn+gXJFxsJM=
 3 github.com/picosh/go-rsync-receiver v0.0.0-20240709135253-1daf4b12a9fc h1:bvcsoOvaNHPquFnRkdraEo7+8t6bW7nWEhlALnwZPdI=
 4 github.com/picosh/go-rsync-receiver v0.0.0-20240709135253-1daf4b12a9fc/go.mod h1:i0iR3W4GSm1PuvVxB9OH32E5jP+CYkVb2NQSe0JCtlo=
 5-github.com/picosh/pobj v0.0.0-20241016194248-c39198b2ff23 h1:NEJ5a4UXeF0/X7xmYNzXcwLQID9DwgazlqkMMC5zZ3M=
 6-github.com/picosh/pobj v0.0.0-20241016194248-c39198b2ff23/go.mod h1:cF+eAl4G1vU+WOD8cYCKaxokHo6MWmbR8J4/SJnvESg=
 7+github.com/picosh/pobj v0.0.0-20250115045405-73c816ed76c2 h1:fOz+o8pymr93p5OeJkehxkunWeFyVranWBsOmEE0OkI=
 8+github.com/picosh/pobj v0.0.0-20250115045405-73c816ed76c2/go.mod h1:cF+eAl4G1vU+WOD8cYCKaxokHo6MWmbR8J4/SJnvESg=
 9 github.com/picosh/pubsub v0.0.0-20241114191831-ec8f16c0eb88 h1:hdxE6rquHHw1/eeqS1b+ojLaxGtN8zOiTUclPwaVbPg=
10 github.com/picosh/pubsub v0.0.0-20241114191831-ec8f16c0eb88/go.mod h1:+9hDKIDHQCvGFigCVlIl589BwpT9R4boKhUVc/OgRU4=
11 github.com/picosh/send v0.0.0-20241218031305-056b1fe8ff80 h1:m0x9UOipmz0HCMNuhpzOgxRgOHefgNebmpcTwu0CwxU=
M pastes/scp_hooks.go
+4, -0
1@@ -97,3 +97,7 @@ func (p *FileHooks) FileMeta(s ssh.Session, data *filehandlers.PostMetaData) err
2 
3 	return nil
4 }
5+
6+func (p *FileHooks) FileSuccess(s ssh.Session, data *filehandlers.SuccesHook) error {
7+	return nil
8+}
M pgs/redirect_test.go
+15, -0
 1@@ -29,6 +29,20 @@ func TestParseRedirectText(t *testing.T) {
 2 		},
 3 	}
 4 
 5+	rss := RedirectFixture{
 6+		name:  "rss",
 7+		input: "/rss /rss.atom 200",
 8+		expect: []*RedirectRule{
 9+			{
10+				From:       "/rss",
11+				To:         "/rss.atom",
12+				Status:     200,
13+				Query:      empty,
14+				Conditions: empty,
15+			},
16+		},
17+	}
18+
19 	withStatus := RedirectFixture{
20 		name:  "with-status",
21 		input: "/wow     /index.html     301",
22@@ -80,6 +94,7 @@ func TestParseRedirectText(t *testing.T) {
23 
24 	fixtures := []RedirectFixture{
25 		spa,
26+		rss,
27 		withStatus,
28 		noStatus,
29 		absoluteUriNoProto,
M prose/api.go
+1, -0
1@@ -98,6 +98,7 @@ type HeaderTxt struct {
2 	ImageCard  string
3 	Favicon    template.URL
4 	WithStyles bool
5+	Domain     string
6 }
7 
8 type ReadmeTxt struct {
M prose/config.go
+1, -1
1@@ -10,7 +10,7 @@ func NewConfigSite() *shared.ConfigSite {
2 	domain := utils.GetEnv("PROSE_DOMAIN", "prose.sh")
3 	port := utils.GetEnv("PROSE_WEB_PORT", "3000")
4 	protocol := utils.GetEnv("PROSE_PROTOCOL", "https")
5-	storageDir := utils.GetEnv("IMGS_STORAGE_DIR", ".storage")
6+	storageDir := utils.GetEnv("PROSE_STORAGE_DIR", ".storage")
7 	minioURL := utils.GetEnv("MINIO_URL", "")
8 	minioUser := utils.GetEnv("MINIO_ROOT_USER", "")
9 	minioPass := utils.GetEnv("MINIO_ROOT_PASSWORD", "")
M prose/html/marketing-footer.partial.tmpl
+1, -1
1@@ -3,7 +3,7 @@
2     <hr />
3     <p class="font-italic">Built and maintained by <a href="https://pico.sh">pico.sh</a>.</p>
4     <div>
5-        <a href="/rss">rss</a>
6+        <a href="/rss.atom">rss</a>
7     </div>
8 </footer>
9 {{end}}
M prose/html/read.page.tmpl
+1, -16
 1@@ -39,6 +39,7 @@
 2     </div>
 3     <hr class="mt-2" />
 4 </header>
 5+
 6 <main>
 7     {{range .Posts}}
 8     <article class="my">
 9@@ -53,22 +54,6 @@
10         </div>
11     </article>
12     {{end}}
13-
14-    <div class="mt">
15-        {{if .PrevPage}}<a href="{{.PrevPage}}" class="link-alt">prev</a>{{else}}<span class="text-grey">prev</span>{{end}}
16-        {{if .NextPage}}<a href="{{.NextPage}}" class="link-alt">next</a>{{else}}<span class="text-grey">next</span>{{end}}
17-    </div>
18-
19-    <div class="flex items-center">
20-        <div class="post-date"></div>
21-        <div class="flex-1">
22-        {{range .Tags}}
23-        <code class="pill"><a href="/read?tag={{.}}">#{{.}}</a></code>
24-        {{end}}
25-        </div>
26-    </div>
27-    {{if .HasFilter}}<a href="/read">clear filter</a>{{end}}
28-
29 </main>
30 {{template "marketing-footer" .}}
31 {{end}}
D prose/public/apple-touch-icon.png
+0, -0
M prose/scp_hooks.go
+15, -2
 1@@ -1,6 +1,7 @@
 2 package prose
 3 
 4 import (
 5+	"encoding/json"
 6 	"fmt"
 7 	"strings"
 8 
 9@@ -11,11 +12,13 @@ import (
10 	"github.com/picosh/pico/filehandlers"
11 	"github.com/picosh/pico/shared"
12 	"github.com/picosh/utils"
13+	pipeUtil "github.com/picosh/utils/pipe"
14 )
15 
16 type MarkdownHooks struct {
17-	Cfg *shared.ConfigSite
18-	Db  db.DB
19+	Cfg  *shared.ConfigSite
20+	Db   db.DB
21+	Pipe *pipeUtil.ReconnectReadWriteCloser
22 }
23 
24 func (p *MarkdownHooks) FileValidate(s ssh.Session, data *filehandlers.PostMetaData) (bool, error) {
25@@ -72,3 +75,13 @@ func (p *MarkdownHooks) FileMeta(s ssh.Session, data *filehandlers.PostMetaData)
26 
27 	return nil
28 }
29+
30+func (p *MarkdownHooks) FileSuccess(s ssh.Session, data *filehandlers.SuccesHook) error {
31+	out, err := json.Marshal(data)
32+	if err != nil {
33+		return err
34+	}
35+	out = append(out, '\n')
36+	_, err = p.Pipe.Write(out)
37+	return err
38+}
A prose/ssg.go
+889, -0
  1@@ -0,0 +1,889 @@
  2+package prose
  3+
  4+import (
  5+	"bytes"
  6+	"fmt"
  7+	"html/template"
  8+	"io"
  9+	"log/slog"
 10+	"os"
 11+	"path/filepath"
 12+	"strings"
 13+	"time"
 14+
 15+	"slices"
 16+
 17+	"github.com/gorilla/feeds"
 18+	"github.com/picosh/pico/db"
 19+	"github.com/picosh/pico/shared"
 20+	"github.com/picosh/pico/shared/storage"
 21+	sst "github.com/picosh/pobj/storage"
 22+	sendUtils "github.com/picosh/send/utils"
 23+	"github.com/picosh/utils"
 24+)
 25+
 26+type SSG struct {
 27+	Logger    *slog.Logger
 28+	DB        db.DB
 29+	Cfg       *shared.ConfigSite
 30+	Storage   storage.StorageServe
 31+	TmplDir   string
 32+	StaticDir string
 33+}
 34+
 35+var Space = "prose"
 36+
 37+func getPostTitle(post *db.Post) string {
 38+	if post.Description == "" {
 39+		return post.Title
 40+	}
 41+
 42+	return fmt.Sprintf("%s: %s", post.Title, post.Description)
 43+}
 44+
 45+func getBlogName(username string) string {
 46+	return fmt.Sprintf("%s's blog", username)
 47+}
 48+
 49+func getBlogDomain(username, domain string) string {
 50+	return fmt.Sprintf("%s.%s", username, domain)
 51+}
 52+
 53+func (ssg *SSG) tmpl(fpath string) string {
 54+	return filepath.Join(ssg.TmplDir, fpath)
 55+}
 56+
 57+func (ssg *SSG) blogPage(w io.Writer, user *db.User, tag string) error {
 58+	pager := &db.Pager{Num: 250, Page: 0}
 59+	var err error
 60+	var posts []*db.Post
 61+	var p *db.Paginate[*db.Post]
 62+	if tag == "" {
 63+		p, err = ssg.DB.FindPostsForUser(pager, user.ID, Space)
 64+	} else {
 65+		p, err = ssg.DB.FindUserPostsByTag(pager, tag, user.ID, Space)
 66+	}
 67+	posts = p.Data
 68+
 69+	if err != nil {
 70+		return err
 71+	}
 72+
 73+	files := []string{
 74+		ssg.tmpl("blog.page.tmpl"),
 75+		ssg.tmpl("blog-default.partial.tmpl"),
 76+		ssg.tmpl("blog-aside.partial.tmpl"),
 77+		ssg.tmpl("footer.partial.tmpl"),
 78+		ssg.tmpl("marketing-footer.partial.tmpl"),
 79+		ssg.tmpl("base.layout.tmpl"),
 80+	}
 81+	ts, err := template.ParseFiles(files...)
 82+	if err != nil {
 83+		return err
 84+	}
 85+
 86+	headerTxt := &HeaderTxt{
 87+		Title:      getBlogName(user.Name),
 88+		Bio:        "",
 89+		Layout:     "default",
 90+		ImageCard:  "summary",
 91+		WithStyles: true,
 92+		Domain:     getBlogDomain(user.Name, ssg.Cfg.Domain),
 93+	}
 94+	readmeTxt := &ReadmeTxt{}
 95+
 96+	readme, err := ssg.DB.FindPostWithFilename("_readme.md", user.ID, Space)
 97+	if err == nil {
 98+		parsedText, err := shared.ParseText(readme.Text)
 99+		if err != nil {
100+			return err
101+		}
102+		headerTxt.Bio = parsedText.Description
103+		headerTxt.Layout = parsedText.Layout
104+		headerTxt.Image = template.URL(parsedText.Image)
105+		headerTxt.ImageCard = parsedText.ImageCard
106+		headerTxt.WithStyles = parsedText.WithStyles
107+		headerTxt.Favicon = template.URL(parsedText.Favicon)
108+		if parsedText.Title != "" {
109+			headerTxt.Title = parsedText.Title
110+		}
111+		if parsedText.Domain != "" {
112+			headerTxt.Domain = parsedText.Domain
113+		}
114+
115+		headerTxt.Nav = []shared.Link{}
116+		for _, nav := range parsedText.Nav {
117+			finURL := nav.URL
118+			headerTxt.Nav = append(headerTxt.Nav, shared.Link{
119+				URL:  finURL,
120+				Text: nav.Text,
121+			})
122+		}
123+
124+		readmeTxt.Contents = template.HTML(parsedText.Html)
125+		if len(readmeTxt.Contents) > 0 {
126+			readmeTxt.HasText = true
127+		}
128+	}
129+
130+	hasCSS := false
131+	_, err = ssg.DB.FindPostWithFilename("_styles.css", user.ID, Space)
132+	if err == nil {
133+		hasCSS = true
134+	}
135+
136+	postCollection := make([]PostItemData, 0, len(posts))
137+	for _, post := range posts {
138+		p := PostItemData{
139+			URL: template.URL(
140+				fmt.Sprintf("/%s.html", post.Slug),
141+			),
142+			BlogURL:        template.URL("/"),
143+			Title:          utils.FilenameToTitle(post.Filename, post.Title),
144+			PublishAt:      post.PublishAt.Format(time.DateOnly),
145+			PublishAtISO:   post.PublishAt.Format(time.RFC3339),
146+			UpdatedTimeAgo: utils.TimeAgo(post.UpdatedAt),
147+			UpdatedAtISO:   post.UpdatedAt.Format(time.RFC3339),
148+		}
149+		postCollection = append(postCollection, p)
150+	}
151+
152+	rssIdx := "/rss.atom"
153+	data := BlogPageData{
154+		Site:       *ssg.Cfg.GetSiteData(),
155+		PageTitle:  headerTxt.Title,
156+		URL:        template.URL(fmt.Sprintf("%s://%s", ssg.Cfg.Protocol, headerTxt.Domain)),
157+		RSSURL:     template.URL(rssIdx),
158+		Readme:     readmeTxt,
159+		Header:     headerTxt,
160+		Username:   user.Name,
161+		Posts:      postCollection,
162+		HasCSS:     hasCSS,
163+		CssURL:     template.URL("/_styles.css"),
164+		HasFilter:  tag != "",
165+		WithStyles: headerTxt.WithStyles,
166+	}
167+
168+	return ts.Execute(w, data)
169+}
170+
171+func (ssg *SSG) rssBlogPage(w io.Writer, user *db.User, tag string) error {
172+	var err error
173+	pager := &db.Pager{Num: 10, Page: 0}
174+	var posts []*db.Post
175+	var p *db.Paginate[*db.Post]
176+	if tag == "" {
177+		p, err = ssg.DB.FindPostsForUser(pager, user.ID, Space)
178+	} else {
179+		p, err = ssg.DB.FindUserPostsByTag(pager, tag, user.ID, Space)
180+	}
181+
182+	if err != nil {
183+		return err
184+	}
185+
186+	posts = p.Data
187+
188+	ts, err := template.ParseFiles(ssg.tmpl("rss.page.tmpl"))
189+	if err != nil {
190+		return err
191+	}
192+
193+	headerTxt := &HeaderTxt{
194+		Title:  getBlogName(user.Name),
195+		Domain: getBlogDomain(user.Name, ssg.Cfg.Domain),
196+	}
197+
198+	readme, err := ssg.DB.FindPostWithFilename("_readme.md", user.ID, Space)
199+	if err == nil {
200+		parsedText, err := shared.ParseText(readme.Text)
201+		if err != nil {
202+			return err
203+		}
204+		if parsedText.Title != "" {
205+			headerTxt.Title = parsedText.Title
206+		}
207+
208+		if parsedText.Description != "" {
209+			headerTxt.Bio = parsedText.Description
210+		}
211+
212+		if parsedText.Domain != "" {
213+			headerTxt.Domain = parsedText.Domain
214+		}
215+	}
216+
217+	blogUrl := fmt.Sprintf("%s://%s", ssg.Cfg.Protocol, headerTxt.Domain)
218+
219+	feed := &feeds.Feed{
220+		Id:          blogUrl,
221+		Title:       headerTxt.Title,
222+		Link:        &feeds.Link{Href: blogUrl},
223+		Description: headerTxt.Bio,
224+		Author:      &feeds.Author{Name: user.Name},
225+		Created:     *user.CreatedAt,
226+	}
227+
228+	var feedItems []*feeds.Item
229+	for _, post := range posts {
230+		if slices.Contains(ssg.Cfg.HiddenPosts, post.Filename) {
231+			continue
232+		}
233+		parsed, err := shared.ParseText(post.Text)
234+		if err != nil {
235+			return err
236+		}
237+
238+		footer, err := ssg.DB.FindPostWithFilename("_footer.md", user.ID, Space)
239+		var footerHTML string
240+		if err == nil {
241+			footerParsed, err := shared.ParseText(footer.Text)
242+			if err != nil {
243+				return err
244+			}
245+			footerHTML = footerParsed.Html
246+		}
247+
248+		var tpl bytes.Buffer
249+		data := &PostPageData{
250+			Contents: template.HTML(parsed.Html + footerHTML),
251+		}
252+		if err := ts.Execute(&tpl, data); err != nil {
253+			continue
254+		}
255+
256+		realUrl := fmt.Sprintf("%s://%s/%s", ssg.Cfg.Protocol, headerTxt.Domain, post.Slug)
257+		feedId := realUrl
258+
259+		item := &feeds.Item{
260+			Id:          feedId,
261+			Title:       utils.FilenameToTitle(post.Filename, post.Title),
262+			Link:        &feeds.Link{Href: realUrl},
263+			Content:     tpl.String(),
264+			Updated:     *post.UpdatedAt,
265+			Created:     *post.CreatedAt,
266+			Description: post.Description,
267+		}
268+
269+		if post.Description != "" {
270+			item.Description = post.Description
271+		}
272+
273+		feedItems = append(feedItems, item)
274+	}
275+	feed.Items = feedItems
276+
277+	rss, err := feed.ToAtom()
278+	if err != nil {
279+		return err
280+	}
281+
282+	_, err = w.Write([]byte(rss))
283+	return err
284+}
285+
286+func (ssg *SSG) postPage(w io.Writer, user *db.User, post *db.Post) ([]string, error) {
287+	blogName := getBlogName(user.Name)
288+	favicon := ""
289+	ogImage := ""
290+	ogImageCard := ""
291+	hasCSS := false
292+	withStyles := true
293+	domain := getBlogDomain(user.Name, ssg.Cfg.Domain)
294+	var data PostPageData
295+	aliases := []string{}
296+
297+	css, err := ssg.DB.FindPostWithFilename("_styles.css", user.ID, Space)
298+	if err == nil {
299+		if len(css.Text) > 0 {
300+			hasCSS = true
301+		}
302+	}
303+
304+	footer, err := ssg.DB.FindPostWithFilename("_footer.md", user.ID, Space)
305+	var footerHTML template.HTML
306+	if err == nil {
307+		footerParsed, err := shared.ParseText(footer.Text)
308+		if err != nil {
309+			return aliases, err
310+		}
311+		footerHTML = template.HTML(footerParsed.Html)
312+	}
313+
314+	// we need the blog name from the readme unfortunately
315+	readme, err := ssg.DB.FindPostWithFilename("_readme.md", user.ID, Space)
316+	if err == nil {
317+		readmeParsed, err := shared.ParseText(readme.Text)
318+		if err != nil {
319+			return aliases, err
320+		}
321+		if readmeParsed.MetaData.Title != "" {
322+			blogName = readmeParsed.MetaData.Title
323+		}
324+		if readmeParsed.MetaData.Domain != "" {
325+			domain = readmeParsed.MetaData.Domain
326+		}
327+		withStyles = readmeParsed.WithStyles
328+		ogImage = readmeParsed.Image
329+		ogImageCard = readmeParsed.ImageCard
330+		favicon = readmeParsed.Favicon
331+	}
332+
333+	diff := ""
334+	parsedText, err := shared.ParseText(post.Text)
335+	if err != nil {
336+		return aliases, err
337+	}
338+
339+	if parsedText.Image != "" {
340+		ogImage = parsedText.Image
341+	}
342+
343+	if parsedText.ImageCard != "" {
344+		ogImageCard = parsedText.ImageCard
345+	}
346+
347+	aliases = parsedText.Aliases
348+
349+	unlisted := false
350+	if post.Hidden || post.PublishAt.After(time.Now()) {
351+		unlisted = true
352+	}
353+
354+	data = PostPageData{
355+		Site:      *ssg.Cfg.GetSiteData(),
356+		PageTitle: getPostTitle(post),
357+		URL: template.URL(
358+			fmt.Sprintf("%s://%s/%s", ssg.Cfg.Protocol, domain, post.Slug),
359+		),
360+		BlogURL:      "/",
361+		Description:  post.Description,
362+		Title:        utils.FilenameToTitle(post.Filename, post.Title),
363+		Slug:         post.Slug,
364+		PublishAt:    post.PublishAt.Format(time.DateOnly),
365+		PublishAtISO: post.PublishAt.Format(time.RFC3339),
366+		Username:     user.Name,
367+		BlogName:     blogName,
368+		Contents:     template.HTML(parsedText.Html),
369+		HasCSS:       hasCSS,
370+		CssURL:       template.URL("/_styles.css"),
371+		Tags:         parsedText.Tags,
372+		Image:        template.URL(ogImage),
373+		ImageCard:    ogImageCard,
374+		Favicon:      template.URL(favicon),
375+		Footer:       footerHTML,
376+		Unlisted:     unlisted,
377+		Diff:         template.HTML(diff),
378+		WithStyles:   withStyles,
379+	}
380+
381+	files := []string{
382+		ssg.tmpl("post.page.tmpl"),
383+		ssg.tmpl("footer.partial.tmpl"),
384+		ssg.tmpl("marketing-footer.partial.tmpl"),
385+		ssg.tmpl("base.layout.tmpl"),
386+	}
387+	ts, err := template.ParseFiles(files...)
388+	if err != nil {
389+		return aliases, err
390+	}
391+
392+	return aliases, ts.Execute(w, data)
393+}
394+
395+func (ssg *SSG) discoverPage(w io.Writer) error {
396+	pager, err := ssg.DB.FindAllPosts(&db.Pager{Num: 50, Page: 0}, Space)
397+	if err != nil {
398+		return err
399+	}
400+
401+	data := ReadPageData{
402+		Site: *ssg.Cfg.GetSiteData(),
403+	}
404+
405+	for _, post := range pager.Data {
406+		item := PostItemData{
407+			URL: template.URL(
408+				fmt.Sprintf(
409+					"%s://%s/%s",
410+					ssg.Cfg.Protocol,
411+					getBlogDomain(post.Username, ssg.Cfg.Domain),
412+					post.Slug,
413+				),
414+			),
415+			BlogURL:        template.URL(getBlogDomain(post.Username, ssg.Cfg.Domain)),
416+			Title:          utils.FilenameToTitle(post.Filename, post.Title),
417+			Description:    post.Description,
418+			Username:       post.Username,
419+			PublishAt:      post.PublishAt.Format(time.DateOnly),
420+			PublishAtISO:   post.PublishAt.Format(time.RFC3339),
421+			UpdatedTimeAgo: utils.TimeAgo(post.UpdatedAt),
422+			UpdatedAtISO:   post.UpdatedAt.Format(time.RFC3339),
423+		}
424+		data.Posts = append(data.Posts, item)
425+	}
426+
427+	files := []string{
428+		ssg.tmpl("read.page.tmpl"),
429+		ssg.tmpl("footer.partial.tmpl"),
430+		ssg.tmpl("marketing-footer.partial.tmpl"),
431+		ssg.tmpl("base.layout.tmpl"),
432+	}
433+	ts, err := template.ParseFiles(files...)
434+	if err != nil {
435+		return err
436+	}
437+
438+	return ts.Execute(w, data)
439+}
440+
441+func (ssg *SSG) discoverRssPage(w io.Writer) error {
442+	pager, err := ssg.DB.FindAllPosts(&db.Pager{Num: 25, Page: 0}, Space)
443+	if err != nil {
444+		return err
445+	}
446+
447+	files := []string{
448+		ssg.tmpl("rss.page.tmpl"),
449+	}
450+	ts, err := template.ParseFiles(files...)
451+	if err != nil {
452+		return err
453+	}
454+
455+	feed := &feeds.Feed{
456+		Title: fmt.Sprintf("%s discovery feed", ssg.Cfg.Domain),
457+		Link: &feeds.Link{
458+			Href: fmt.Sprintf("%s://%s", ssg.Cfg.Protocol, ssg.Cfg.Domain),
459+		},
460+		Description: fmt.Sprintf("%s latest posts", ssg.Cfg.Domain),
461+		Author:      &feeds.Author{Name: ssg.Cfg.Domain},
462+		Created:     time.Now(),
463+	}
464+
465+	var feedItems []*feeds.Item
466+	for _, post := range pager.Data {
467+		parsed, err := shared.ParseText(post.Text)
468+		if err != nil {
469+			return err
470+		}
471+
472+		var tpl bytes.Buffer
473+		data := &PostPageData{
474+			Contents: template.HTML(parsed.Html),
475+		}
476+		if err := ts.Execute(&tpl, data); err != nil {
477+			continue
478+		}
479+
480+		realUrl := fmt.Sprintf(
481+			"%s://%s/%s",
482+			ssg.Cfg.Protocol,
483+			getBlogDomain(post.Username, ssg.Cfg.Domain),
484+			post.Slug,
485+		)
486+
487+		item := &feeds.Item{
488+			Id:          realUrl,
489+			Title:       post.Title,
490+			Link:        &feeds.Link{Href: realUrl},
491+			Content:     tpl.String(),
492+			Created:     *post.PublishAt,
493+			Updated:     *post.UpdatedAt,
494+			Description: post.Description,
495+			Author:      &feeds.Author{Name: post.Username},
496+		}
497+
498+		if post.Description != "" {
499+			item.Description = post.Description
500+		}
501+
502+		feedItems = append(feedItems, item)
503+	}
504+	feed.Items = feedItems
505+
506+	rss, err := feed.ToAtom()
507+	if err != nil {
508+		return err
509+	}
510+
511+	_, err = w.Write([]byte(rss))
512+	return err
513+}
514+
515+func (ssg *SSG) upload(bucket sst.Bucket, fpath string, rdr io.Reader) error {
516+	toSite := filepath.Join("prose-blog", fpath)
517+	ssg.Logger.Info("uploading object", "bucket", bucket.Name, "object", toSite)
518+	buf := &bytes.Buffer{}
519+	size, err := io.Copy(buf, rdr)
520+	if err != nil {
521+		return err
522+	}
523+
524+	_, _, err = ssg.Storage.PutObject(bucket, toSite, buf, &sendUtils.FileEntry{
525+		Mtime: time.Now().Unix(),
526+		Size:  size,
527+	})
528+	return err
529+}
530+
531+func (ssg *SSG) notFoundPage(w io.Writer, user *db.User) error {
532+	ogImage := ""
533+	ogImageCard := ""
534+	favicon := ""
535+	contents := template.HTML("Oops!  we can't seem to find this post.")
536+	title := "Post not found"
537+	desc := "Post not found"
538+	hasCSS := false
539+
540+	css, err := ssg.DB.FindPostWithFilename("_styles.css", user.ID, Space)
541+	if err == nil {
542+		if len(css.Text) > 0 {
543+			hasCSS = true
544+		}
545+	}
546+
547+	footer, err := ssg.DB.FindPostWithFilename("_footer.md", user.ID, Space)
548+	var footerHTML template.HTML
549+	if err == nil {
550+		footerParsed, err := shared.ParseText(footer.Text)
551+		if err != nil {
552+			return err
553+		}
554+		footerHTML = template.HTML(footerParsed.Html)
555+	}
556+
557+	// we need the blog name from the readme unfortunately
558+	readme, err := ssg.DB.FindPostWithFilename("_readme.md", user.ID, Space)
559+	if err == nil {
560+		readmeParsed, err := shared.ParseText(readme.Text)
561+		if err != nil {
562+			return err
563+		}
564+		ogImage = readmeParsed.Image
565+		ogImageCard = readmeParsed.ImageCard
566+		favicon = readmeParsed.Favicon
567+	}
568+
569+	notFound, err := ssg.DB.FindPostWithFilename("_404.md", user.ID, Space)
570+	if err == nil {
571+		notFoundParsed, err := shared.ParseText(notFound.Text)
572+		if err != nil {
573+			ssg.Logger.Error("could not parse markdown", "err", err.Error())
574+			return err
575+		}
576+		if notFoundParsed.MetaData.Title != "" {
577+			title = notFoundParsed.MetaData.Title
578+		}
579+		if notFoundParsed.MetaData.Description != "" {
580+			desc = notFoundParsed.MetaData.Description
581+		}
582+		ogImage = notFoundParsed.Image
583+		ogImageCard = notFoundParsed.ImageCard
584+		favicon = notFoundParsed.Favicon
585+		contents = template.HTML(notFoundParsed.Html)
586+	}
587+
588+	data := PostPageData{
589+		Site:         *ssg.Cfg.GetSiteData(),
590+		BlogURL:      "/",
591+		PageTitle:    title,
592+		Description:  desc,
593+		Title:        title,
594+		PublishAt:    time.Now().Format(time.DateOnly),
595+		PublishAtISO: time.Now().Format(time.RFC3339),
596+		Username:     user.Name,
597+		BlogName:     getBlogName(user.Name),
598+		HasCSS:       hasCSS,
599+		CssURL:       template.URL("/_styles.css"),
600+		Image:        template.URL(ogImage),
601+		ImageCard:    ogImageCard,
602+		Favicon:      template.URL(favicon),
603+		Footer:       footerHTML,
604+		Contents:     contents,
605+		Unlisted:     true,
606+	}
607+	files := []string{
608+		ssg.tmpl("post.page.tmpl"),
609+		ssg.tmpl("footer.partial.tmpl"),
610+		ssg.tmpl("marketing-footer.partial.tmpl"),
611+		ssg.tmpl("base.layout.tmpl"),
612+	}
613+	ts, err := template.ParseFiles(files...)
614+	if err != nil {
615+		return err
616+	}
617+	return ts.Execute(w, data)
618+}
619+
620+func (ssg *SSG) images(user *db.User, bucket sst.Bucket) error {
621+	imgBucket, err := ssg.Storage.GetBucket(shared.GetImgsBucketName(user.ID))
622+	if err != nil {
623+		ssg.Logger.Info("user does not have an images dir, skipping")
624+		return nil
625+	}
626+	imgs, err := ssg.Storage.ListObjects(imgBucket, "/", false)
627+	if err != nil {
628+		return err
629+	}
630+
631+	for _, inf := range imgs {
632+		rdr, _, err := ssg.Storage.GetObject(imgBucket, inf.Name())
633+		if err != nil {
634+			return err
635+		}
636+		err = ssg.upload(bucket, inf.Name(), rdr)
637+		if err != nil {
638+			return err
639+		}
640+	}
641+
642+	return nil
643+}
644+
645+func (ssg *SSG) static(bucket sst.Bucket) error {
646+	files, err := os.ReadDir(ssg.StaticDir)
647+	if err != nil {
648+		return err
649+	}
650+	for _, file := range files {
651+		if file.IsDir() {
652+			continue
653+		}
654+		fpath := filepath.Join(ssg.StaticDir, file.Name())
655+		fp, err := os.Open(fpath)
656+		if err != nil {
657+			return err
658+		}
659+		err = ssg.upload(bucket, file.Name(), fp)
660+		if err != nil {
661+			return err
662+		}
663+	}
664+
665+	return nil
666+}
667+
668+func (ssg *SSG) Prose() error {
669+	ssg.Logger.Info("generating discover page")
670+	rdr, wtr := io.Pipe()
671+	go func() {
672+		err := ssg.discoverPage(wtr)
673+		wtr.Close()
674+		if err != nil {
675+			ssg.Logger.Error("discover page", "err", err)
676+		}
677+	}()
678+
679+	user, err := ssg.DB.FindUserForName("pico")
680+	if err != nil {
681+		return err
682+	}
683+
684+	bucketName := shared.GetAssetBucketName(user.ID)
685+	bucket, err := ssg.Storage.UpsertBucket(bucketName)
686+	if err != nil {
687+		return err
688+	}
689+
690+	redirectsFile := "/rss /rss.atom 200\n"
691+	ssg.Logger.Info("generating _redirects file", "text", redirectsFile)
692+	// create redirects file
693+	redirects := strings.NewReader(redirectsFile)
694+	err = ssg.upload(bucket, "_redirects", redirects)
695+	if err != nil {
696+		return err
697+	}
698+
699+	err = ssg.upload(bucket, "index.html", rdr)
700+	if err != nil {
701+		return err
702+	}
703+
704+	ssg.Logger.Info("generating discover rss page")
705+	rdr, wtr = io.Pipe()
706+	go func() {
707+		err = ssg.discoverRssPage(wtr)
708+		wtr.Close()
709+		if err != nil {
710+			ssg.Logger.Error("discover rss page", "err", err)
711+		}
712+	}()
713+
714+	err = ssg.upload(bucket, "rss.atom", rdr)
715+	if err != nil {
716+		return err
717+	}
718+
719+	ssg.Logger.Info("copying static folder for root", "dir", ssg.StaticDir)
720+	err = ssg.static(bucket)
721+	if err != nil {
722+		return err
723+	}
724+
725+	users, err := ssg.DB.FindUsers()
726+	if err != nil {
727+		return err
728+	}
729+
730+	for _, user := range users {
731+		if user.Name != "erock" {
732+			continue
733+		}
734+
735+		bucket, err := ssg.Storage.UpsertBucket(shared.GetAssetBucketName(user.ID))
736+		if err != nil {
737+			return err
738+		}
739+
740+		err = ssg.ProseBlog(user, bucket)
741+		if err != nil {
742+			log := shared.LoggerWithUser(ssg.Logger, user)
743+			log.Error("could not generate blog for user", "err", err)
744+		}
745+	}
746+
747+	return nil
748+}
749+
750+func (ssg *SSG) ProseBlog(user *db.User, bucket sst.Bucket) error {
751+	// programmatically generate redirects file based on aliases
752+	// and other routes that were in prose that need to be available
753+	redirectsFile := "/rss /rss.atom 301\n"
754+	logger := shared.LoggerWithUser(ssg.Logger, user)
755+
756+	data, err := ssg.DB.FindPostsForUser(&db.Pager{Num: 1000, Page: 0}, user.ID, Space)
757+	if err != nil {
758+		return err
759+	}
760+
761+	// don't generate a site with 0 posts
762+	if data.Total == 0 {
763+		return nil
764+	}
765+
766+	for _, post := range data.Data {
767+		if post.Slug == "" {
768+			logger.Warn("post slug empty, skipping")
769+			continue
770+		}
771+
772+		logger.Info("generating post", "slug", post.Slug)
773+		fpath := fmt.Sprintf("%s.html", post.Slug)
774+
775+		// create post file
776+		rdr, wtr := io.Pipe()
777+		go func() {
778+			aliases, err := ssg.postPage(wtr, user, post)
779+			wtr.Close()
780+			if err != nil {
781+				ssg.Logger.Error("post page", "err", err)
782+			}
783+			// add aliases to redirects file
784+			for _, alias := range aliases {
785+				redirectsFile += fmt.Sprintf("%s %s 200\n", alias, "/"+fpath)
786+			}
787+		}()
788+
789+		err = ssg.upload(bucket, fpath, rdr)
790+		if err != nil {
791+			return err
792+		}
793+
794+		// create raw post file
795+		fpath = post.Slug + ".md"
796+		mdRdr := strings.NewReader(post.Text)
797+		err = ssg.upload(bucket, fpath, mdRdr)
798+		if err != nil {
799+			return err
800+		}
801+	}
802+
803+	// create 404 page
804+	logger.Info("generating 404 page")
805+	rdr, wtr := io.Pipe()
806+	go func() {
807+		err = ssg.notFoundPage(wtr, user)
808+		wtr.Close()
809+		if err != nil {
810+			ssg.Logger.Error("not found page", "err", err)
811+		}
812+	}()
813+
814+	err = ssg.upload(bucket, "404.html", rdr)
815+	if err != nil {
816+		return err
817+	}
818+
819+	tags, err := ssg.DB.FindTagsForUser(user.ID, Space)
820+	tags = append(tags, "")
821+
822+	// create index files
823+	for _, tag := range tags {
824+		logger.Info("generating blog index page", "tag", tag)
825+		rdr, wtr := io.Pipe()
826+		go func() {
827+			err = ssg.blogPage(wtr, user, tag)
828+			wtr.Close()
829+			if err != nil {
830+				ssg.Logger.Error("blog page", "err", err)
831+			}
832+		}()
833+
834+		fpath := "index.html"
835+		if tag != "" {
836+			fpath = fmt.Sprintf("index-%s.html", tag)
837+		}
838+		err = ssg.upload(bucket, fpath, rdr)
839+		if err != nil {
840+			return err
841+		}
842+	}
843+
844+	logger.Info("generating blog rss page", "tag", "")
845+	rdr, wtr = io.Pipe()
846+	go func() {
847+		err = ssg.rssBlogPage(wtr, user, "")
848+		wtr.Close()
849+		if err != nil {
850+			ssg.Logger.Error("blog rss page", "err", err)
851+		}
852+	}()
853+
854+	fpath := "rss.atom"
855+	err = ssg.upload(bucket, fpath, rdr)
856+	if err != nil {
857+		return err
858+	}
859+
860+	logger.Info("generating _redirects file", "text", redirectsFile)
861+	// create redirects file
862+	redirects := strings.NewReader(redirectsFile)
863+	err = ssg.upload(bucket, "_redirects", redirects)
864+	if err != nil {
865+		return err
866+	}
867+
868+	post, _ := ssg.DB.FindPostWithFilename("_styles.css", user.ID, Space)
869+	if post != nil {
870+		stylerdr := strings.NewReader(post.Text)
871+		err = ssg.upload(bucket, "_styles.css", stylerdr)
872+		if err != nil {
873+			return err
874+		}
875+	}
876+
877+	logger.Info("copying static folder", "dir", ssg.StaticDir)
878+	err = ssg.static(bucket)
879+	if err != nil {
880+		return err
881+	}
882+
883+	logger.Info("copying images")
884+	err = ssg.images(user, bucket)
885+	if err != nil {
886+		return err
887+	}
888+
889+	return nil
890+}
M prose/ssh.go
+27, -5
 1@@ -3,6 +3,7 @@ package prose
 2 import (
 3 	"context"
 4 	"fmt"
 5+	"log/slog"
 6 	"os"
 7 	"os/signal"
 8 	"syscall"
 9@@ -25,6 +26,7 @@ import (
10 	"github.com/picosh/send/protocols/sftp"
11 	"github.com/picosh/send/proxy"
12 	"github.com/picosh/utils"
13+	pipeUtil "github.com/picosh/utils/pipe"
14 )
15 
16 func createRouter(handler *filehandlers.FileHandlerRouter, cliHandler *CliHandler) proxy.Router {
17@@ -53,6 +55,20 @@ func withProxy(handler *filehandlers.FileHandlerRouter, cliHandler *CliHandler,
18 	}
19 }
20 
21+func createPubProseDrain(ctx context.Context, logger *slog.Logger) *pipeUtil.ReconnectReadWriteCloser {
22+	info := shared.NewPicoPipeClient()
23+	send := pipeUtil.NewReconnectReadWriteCloser(
24+		ctx,
25+		logger,
26+		info,
27+		"pub to prose-drain",
28+		"pub prose-drain -b=false",
29+		100,
30+		-1,
31+	)
32+	return send
33+}
34+
35 func StartSshServer() {
36 	host := utils.GetEnv("PROSE_HOST", "0.0.0.0")
37 	port := utils.GetEnv("PROSE_SSH_PORT", "2222")
38@@ -61,9 +77,15 @@ func StartSshServer() {
39 	logger := cfg.Logger
40 	dbh := postgres.NewDB(cfg.DbURL, cfg.Logger)
41 	defer dbh.Close()
42+
43+	ctx := context.Background()
44+	defer ctx.Done()
45+	pipeClient := createPubProseDrain(ctx, logger)
46+
47 	hooks := &MarkdownHooks{
48-		Cfg: cfg,
49-		Db:  dbh,
50+		Cfg:  cfg,
51+		Db:   dbh,
52+		Pipe: pipeClient,
53 	}
54 
55 	var st storage.StorageServe
56@@ -75,14 +97,14 @@ func StartSshServer() {
57 	}
58 
59 	if err != nil {
60-		logger.Error(err.Error())
61+		logger.Error("storage", "err", err.Error())
62 		return
63 	}
64 
65 	fileMap := map[string]filehandlers.ReadWriteHandler{
66 		".md":      filehandlers.NewScpPostHandler(dbh, cfg, hooks, st),
67 		".css":     filehandlers.NewScpPostHandler(dbh, cfg, hooks, st),
68-		"fallback": uploadimgs.NewUploadImgHandler(dbh, cfg, st),
69+		"fallback": uploadimgs.NewUploadImgHandler(dbh, cfg, st, pipeClient),
70 	}
71 	handler := filehandlers.NewFileHandlerRouter(cfg, dbh, fileMap)
72 	handler.Spaces = []string{cfg.Space, "imgs"}
73@@ -104,7 +126,7 @@ func StartSshServer() {
74 		),
75 	)
76 	if err != nil {
77-		logger.Error(err.Error())
78+		logger.Error("wish server", "err", err.Error())
79 		return
80 	}
81 
M shared/mdparser.go
+7, -0
 1@@ -41,6 +41,7 @@ type MetaData struct {
 2 	Favicon     string
 3 	Hidden      bool
 4 	WithStyles  bool
 5+	Domain      string
 6 }
 7 
 8 type ParsedText struct {
 9@@ -275,6 +276,12 @@ func ParseText(text string) (*ParsedText, error) {
10 	}
11 	parsed.MetaData.Description = description
12 
13+	domain, err := toString(metaData["domain"])
14+	if err != nil {
15+		return &parsed, fmt.Errorf("front-matter field (%s): %w", "domain", err)
16+	}
17+	parsed.MetaData.Domain = domain
18+
19 	layout, err := toString(metaData["layout"])
20 	if err != nil {
21 		return &parsed, fmt.Errorf("front-matter field (%s): %w", "layout", err)
M shared/storage/fs.go
+1, -1
1@@ -17,7 +17,7 @@ type StorageFS struct {
2 }
3 
4 func NewStorageFS(logger *slog.Logger, dir string) (*StorageFS, error) {
5-	st, err := sst.NewStorageFS(dir)
6+	st, err := sst.NewStorageFS(logger, dir)
7 	if err != nil {
8 		return nil, err
9 	}