- commit
- e6c6e2f
- parent
- b4b879b
- author
- Eric Bower
- date
- 2025-01-18 08:09:08 -0500 EST
revert: ssg
14 files changed,
+13,
-1356
+0,
-148
1@@ -1,148 +0,0 @@
2-package main
3-
4-import (
5- "bufio"
6- "context"
7- "encoding/json"
8- "sync"
9- "time"
10-
11- "github.com/picosh/pico/db/postgres"
12- fileshared "github.com/picosh/pico/filehandlers/shared"
13- "github.com/picosh/pico/prose"
14- "github.com/picosh/pico/shared"
15- "github.com/picosh/pico/shared/storage"
16-)
17-
18-func bail(err error) {
19- if err != nil {
20- panic(err)
21- }
22-}
23-
24-type RenderEvent struct {
25- UserID string
26- Service string
27-}
28-
29-// run queue on an interval to merge file uploads from same user.
30-func render(ssg *prose.SSG, ch chan RenderEvent) {
31- var pendingFlushes sync.Map
32- tick := time.Tick(10 * time.Second)
33- for {
34- select {
35- case event := <-ch:
36- ssg.Logger.Info("received request to generate blog", "userId", event.UserID)
37- pendingFlushes.Store(event.UserID, event.Service)
38- case <-tick:
39- ssg.Logger.Info("flushing ssg requests")
40- go func() {
41- pendingFlushes.Range(func(key, value any) bool {
42- pendingFlushes.Delete(key)
43- event := value.(RenderEvent)
44- user, err := ssg.DB.FindUser(event.UserID)
45- if err != nil {
46- ssg.Logger.Error("cannot find user", "err", err)
47- return true
48- }
49-
50- bucket, err := ssg.Storage.GetBucket(shared.GetAssetBucketName(user.ID))
51- if err != nil {
52- ssg.Logger.Error("cannot find bucket", "err", err)
53- return true
54- }
55-
56- err = ssg.ProseBlog(user, bucket, event.Service)
57- if err != nil {
58- ssg.Logger.Error("cannot generate blog", "err", err)
59- }
60- return true
61- })
62- }()
63- }
64- }
65-}
66-
67-func main() {
68- cfg := prose.NewConfigSite()
69- logger := cfg.Logger
70- picoDb := postgres.NewDB(cfg.DbURL, logger)
71- st, err := storage.NewStorageMinio(logger, cfg.MinioURL, cfg.MinioUser, cfg.MinioPass)
72- bail(err)
73-
74- ssg := &prose.SSG{
75- Cfg: cfg,
76- DB: picoDb,
77- Storage: st,
78- Logger: cfg.Logger,
79- TmplDir: "./prose/html",
80- StaticDir: "./prose/public",
81- }
82-
83- ctx := context.Background()
84- drain := fileshared.CreateSubUploadDrain(ctx, cfg.Logger)
85-
86- ch := make(chan RenderEvent)
87- go render(ssg, ch)
88-
89- for {
90- scanner := bufio.NewScanner(drain)
91- for scanner.Scan() {
92- var data fileshared.FileUploaded
93-
94- err := json.Unmarshal(scanner.Bytes(), &data)
95- if err != nil {
96- logger.Error("json unmarshal", "err", err)
97- continue
98- }
99-
100- // we don't care about any other pgs sites so ignore them
101- if data.Service == "pgs" && data.ProjectName != "prose" {
102- continue
103- }
104-
105- logger = logger.With(
106- "userId", data.UserID,
107- "filename", data.Filename,
108- "action", data.Action,
109- "project", data.ProjectName,
110- "service", data.Service,
111- )
112-
113- bucket, err := ssg.Storage.GetBucket(shared.GetAssetBucketName(data.UserID))
114- if err != nil {
115- ssg.Logger.Error("cannot find bucket", "err", err)
116- continue
117- }
118- user, err := ssg.DB.FindUser(data.UserID)
119- if err != nil {
120- logger.Error("cannot find user", "err", err)
121- continue
122- }
123-
124- if data.Action == "delete" {
125- err = st.DeleteObject(bucket, data.Filename)
126- if err != nil {
127- logger.Error("cannot delete object", "err", err)
128- }
129- post, err := ssg.DB.FindPostWithFilename(data.Filename, data.UserID, "prose")
130- if err != nil {
131- logger.Error("cannot find post", "err", err)
132- } else {
133- err = ssg.DB.RemovePosts([]string{post.ID})
134- if err != nil {
135- logger.Error("cannot delete post", "err", err)
136- }
137- }
138- ch <- RenderEvent{data.UserID, data.Service}
139- } else if data.Action == "create" || data.Action == "update" {
140- _, err := ssg.UpsertPost(user.ID, user.Name, bucket, data.Filename)
141- if err != nil {
142- logger.Error("cannot upsert post", "err", err)
143- continue
144- }
145- ch <- RenderEvent{data.UserID, data.Service}
146- }
147- }
148- }
149-}
+2,
-16
1@@ -8,7 +8,6 @@ import (
2
3 "github.com/picosh/pico/db/postgres"
4 "github.com/picosh/pico/shared"
5- "github.com/picosh/pico/shared/storage"
6 )
7
8 func createStaticRoutes() []shared.Route {
9@@ -78,18 +77,6 @@ func StartApiServer() {
10 defer db.Close()
11 logger := cfg.Logger
12
13- var st storage.StorageServe
14- var err error
15- if cfg.MinioURL == "" {
16- st, err = storage.NewStorageFS(cfg.Logger, cfg.StorageDir)
17- } else {
18- st, err = storage.NewStorageMinio(cfg.Logger, cfg.MinioURL, cfg.MinioUser, cfg.MinioPass)
19- }
20-
21- if err != nil {
22- logger.Error(err.Error())
23- }
24-
25 // cron daily digest
26 fetcher := NewFetcher(db, cfg)
27 go fetcher.Loop()
28@@ -103,9 +90,8 @@ func StartApiServer() {
29 mainRoutes := createMainRoutes(staticRoutes)
30
31 apiConfig := &shared.ApiConfig{
32- Cfg: cfg,
33- Dbpool: db,
34- Storage: st,
35+ Cfg: cfg,
36+ Dbpool: db,
37 }
38 handler := shared.CreateServe(mainRoutes, []shared.Route{}, apiConfig)
39 router := http.HandlerFunc(handler)
+0,
-5
1@@ -11,7 +11,6 @@ import (
2 "github.com/charmbracelet/ssh"
3 "github.com/picosh/pico/db"
4 "github.com/picosh/pico/filehandlers"
5- fileshared "github.com/picosh/pico/filehandlers/shared"
6 "github.com/picosh/pico/shared"
7 "github.com/picosh/utils"
8 )
9@@ -82,7 +81,3 @@ func (p *FeedHooks) FileMeta(s ssh.Session, data *filehandlers.PostMetaData) err
10
11 return nil
12 }
13-
14-func (p *FeedHooks) FileSuccess(s ssh.Session, data *fileshared.FileUploaded) error {
15- return nil
16-}
+1,
-15
1@@ -14,7 +14,6 @@ import (
2 "github.com/picosh/pico/db/postgres"
3 "github.com/picosh/pico/filehandlers"
4 "github.com/picosh/pico/shared"
5- "github.com/picosh/pico/shared/storage"
6 wsh "github.com/picosh/pico/wish"
7 "github.com/picosh/send/auth"
8 "github.com/picosh/send/list"
9@@ -65,21 +64,8 @@ func StartSshServer() {
10 Db: dbh,
11 }
12
13- var st storage.StorageServe
14- var err error
15- if cfg.MinioURL == "" {
16- st, err = storage.NewStorageFS(cfg.Logger, cfg.StorageDir)
17- } else {
18- st, err = storage.NewStorageMinio(cfg.Logger, cfg.MinioURL, cfg.MinioUser, cfg.MinioPass)
19- }
20-
21- if err != nil {
22- logger.Error(err.Error())
23- return
24- }
25-
26 fileMap := map[string]filehandlers.ReadWriteHandler{
27- "fallback": filehandlers.NewScpPostHandler(dbh, cfg, hooks, st),
28+ "fallback": filehandlers.NewScpPostHandler(dbh, cfg, hooks),
29 }
30 handler := filehandlers.NewFileHandlerRouter(cfg, dbh, fileMap)
31
+1,
-30
1@@ -2,7 +2,6 @@ package uploadimgs
2
3 import (
4 "encoding/binary"
5- "encoding/json"
6 "fmt"
7 "io"
8 "net/http"
9@@ -15,13 +14,11 @@ import (
10 "github.com/charmbracelet/ssh"
11 exifremove "github.com/neurosnap/go-exif-remove"
12 "github.com/picosh/pico/db"
13- fileshared "github.com/picosh/pico/filehandlers/shared"
14 "github.com/picosh/pico/shared"
15 "github.com/picosh/pico/shared/storage"
16 "github.com/picosh/pobj"
17 sendutils "github.com/picosh/send/utils"
18 "github.com/picosh/utils"
19- pipeUtil "github.com/picosh/utils/pipe"
20 )
21
22 var Space = "imgs"
23@@ -40,15 +37,13 @@ type UploadImgHandler struct {
24 DBPool db.DB
25 Cfg *shared.ConfigSite
26 Storage storage.StorageServe
27- Pipe *pipeUtil.ReconnectReadWriteCloser
28 }
29
30-func NewUploadImgHandler(dbpool db.DB, cfg *shared.ConfigSite, storage storage.StorageServe, pipeClient *pipeUtil.ReconnectReadWriteCloser) *UploadImgHandler {
31+func NewUploadImgHandler(dbpool db.DB, cfg *shared.ConfigSite, storage storage.StorageServe) *UploadImgHandler {
32 return &UploadImgHandler{
33 DBPool: dbpool,
34 Cfg: cfg,
35 Storage: storage,
36- Pipe: pipeClient,
37 }
38 }
39
40@@ -91,16 +86,6 @@ func (h *UploadImgHandler) Read(s ssh.Session, entry *sendutils.FileEntry) (os.F
41 return fileInfo, reader, nil
42 }
43
44-func (h *UploadImgHandler) Success(s ssh.Session, data *fileshared.FileUploaded) error {
45- out, err := json.Marshal(data)
46- if err != nil {
47- return err
48- }
49- out = append(out, '\n')
50- _, err = h.Pipe.Write(out)
51- return err
52-}
53-
54 func (h *UploadImgHandler) Write(s ssh.Session, entry *sendutils.FileEntry) (string, error) {
55 logger := h.Cfg.Logger
56 user, err := h.DBPool.FindUser(s.Permissions().Extensions["user_id"])
57@@ -186,13 +171,6 @@ func (h *UploadImgHandler) Write(s ssh.Session, entry *sendutils.FileEntry) (str
58 return "", err
59 }
60
61- _ = h.Success(s, &fileshared.FileUploaded{
62- UserID: user.ID,
63- Action: "create",
64- Filename: metadata.Filename,
65- Service: "prose",
66- })
67-
68 curl := shared.NewCreateURL(h.Cfg)
69 url := h.Cfg.FullPostURL(
70 curl,
71@@ -251,12 +229,5 @@ func (h *UploadImgHandler) Delete(s ssh.Session, entry *sendutils.FileEntry) err
72 return err
73 }
74
75- _ = h.Success(s, &fileshared.FileUploaded{
76- UserID: user.ID,
77- Action: "delete",
78- Filename: filename,
79- Service: "prose",
80- })
81-
82 return nil
83 }
+1,
-19
1@@ -12,9 +12,7 @@ import (
2
3 "github.com/charmbracelet/ssh"
4 "github.com/picosh/pico/db"
5- fileshared "github.com/picosh/pico/filehandlers/shared"
6 "github.com/picosh/pico/shared"
7- "github.com/picosh/pico/shared/storage"
8 sendutils "github.com/picosh/send/utils"
9 "github.com/picosh/utils"
10 )
11@@ -31,7 +29,6 @@ type PostMetaData struct {
12 type ScpFileHooks interface {
13 FileValidate(s ssh.Session, data *PostMetaData) (bool, error)
14 FileMeta(s ssh.Session, data *PostMetaData) error
15- FileSuccess(s ssh.Session, data *fileshared.FileUploaded) error
16 }
17
18 type ScpUploadHandler struct {
19@@ -40,7 +37,7 @@ type ScpUploadHandler struct {
20 Hooks ScpFileHooks
21 }
22
23-func NewScpPostHandler(dbpool db.DB, cfg *shared.ConfigSite, hooks ScpFileHooks, st storage.StorageServe) *ScpUploadHandler {
24+func NewScpPostHandler(dbpool db.DB, cfg *shared.ConfigSite, hooks ScpFileHooks) *ScpUploadHandler {
25 return &ScpUploadHandler{
26 DBPool: dbpool,
27 Cfg: cfg,
28@@ -157,9 +154,7 @@ func (h *ScpUploadHandler) Write(s ssh.Session, entry *sendutils.FileEntry) (str
29 modTime = time.Unix(entry.Mtime, 0)
30 }
31
32- action := ""
33 if post == nil {
34- action = "create"
35 logger.Info("file not found, adding record")
36 insertPost := db.Post{
37 UserID: userID,
38@@ -216,7 +211,6 @@ func (h *ScpUploadHandler) Write(s ssh.Session, entry *sendutils.FileEntry) (str
39 return h.Cfg.FullPostURL(curl, user.Name, metadata.Slug), nil
40 }
41
42- action = "update"
43 logger.Info("file found, updating record")
44
45 updatePost := db.Post{
46@@ -261,12 +255,6 @@ func (h *ScpUploadHandler) Write(s ssh.Session, entry *sendutils.FileEntry) (str
47 }
48 }
49
50- _ = h.Hooks.FileSuccess(s, &fileshared.FileUploaded{
51- UserID: user.ID,
52- Action: action,
53- Filename: metadata.Filename,
54- Service: h.Cfg.Space,
55- })
56 curl := shared.NewCreateURL(h.Cfg)
57 return h.Cfg.FullPostURL(curl, user.Name, metadata.Slug), nil
58 }
59@@ -301,11 +289,5 @@ func (h *ScpUploadHandler) Delete(s ssh.Session, entry *sendutils.FileEntry) err
60 logger.Error("post could not remove", "err", err.Error())
61 return fmt.Errorf("error for %s: %v", filename, err)
62 }
63- _ = h.Hooks.FileSuccess(s, &fileshared.FileUploaded{
64- UserID: user.ID,
65- Action: "delete",
66- Filename: filename,
67- Service: h.Cfg.Space,
68- })
69 return nil
70 }
1@@ -1,57 +0,0 @@
2-package fileshared
3-
4-import (
5- "context"
6- "encoding/json"
7- "log/slog"
8-
9- "github.com/picosh/pico/shared"
10- pipeUtil "github.com/picosh/utils/pipe"
11-)
12-
13-type FileUploaded struct {
14- UserID string `json:"user_id"`
15- Action string `json:"action"`
16- Filename string `json:"filename"`
17- Service string `json:"service"`
18- ProjectName string `json:"project_name"`
19-}
20-
21-func CreatePubUploadDrain(ctx context.Context, logger *slog.Logger) *pipeUtil.ReconnectReadWriteCloser {
22- info := shared.NewPicoPipeClient()
23- send := pipeUtil.NewReconnectReadWriteCloser(
24- ctx,
25- logger,
26- info,
27- "pub to upload-drain",
28- "pub upload-drain -b=false",
29- 100,
30- -1,
31- )
32- return send
33-}
34-
35-func WriteUploadDrain(drain *pipeUtil.ReconnectReadWriteCloser, upload *FileUploaded) error {
36- jso, err := json.Marshal(upload)
37- if err != nil {
38- return err
39- }
40-
41- jso = append(jso, '\n')
42- _, err = drain.Write(jso)
43- return err
44-}
45-
46-func CreateSubUploadDrain(ctx context.Context, logger *slog.Logger) *pipeUtil.ReconnectReadWriteCloser {
47- info := shared.NewPicoPipeClient()
48- send := pipeUtil.NewReconnectReadWriteCloser(
49- ctx,
50- logger,
51- info,
52- "sub to upload-drain",
53- "sub upload-drain -k",
54- 100,
55- -1,
56- )
57- return send
58-}
+2,
-17
1@@ -11,7 +11,6 @@ import (
2 "github.com/picosh/pico/db"
3 "github.com/picosh/pico/db/postgres"
4 "github.com/picosh/pico/shared"
5- "github.com/picosh/pico/shared/storage"
6 "github.com/picosh/utils"
7 )
8
9@@ -370,19 +369,6 @@ func StartApiServer() {
10 defer db.Close()
11 logger := cfg.Logger
12
13- var st storage.StorageServe
14- var err error
15- if cfg.MinioURL == "" {
16- st, err = storage.NewStorageFS(cfg.Logger, cfg.StorageDir)
17- } else {
18- st, err = storage.NewStorageMinio(cfg.Logger, cfg.MinioURL, cfg.MinioUser, cfg.MinioPass)
19- }
20-
21- if err != nil {
22- logger.Error("could not create storage adapter", "err", err.Error())
23- return
24- }
25-
26 go CronDeleteExpiredPosts(cfg, db)
27
28 staticRoutes := createStaticRoutes()
29@@ -395,9 +381,8 @@ func StartApiServer() {
30 subdomainRoutes := createSubdomainRoutes(staticRoutes)
31
32 apiConfig := &shared.ApiConfig{
33- Cfg: cfg,
34- Dbpool: db,
35- Storage: st,
36+ Cfg: cfg,
37+ Dbpool: db,
38 }
39 handler := shared.CreateServe(mainRoutes, subdomainRoutes, apiConfig)
40 router := http.HandlerFunc(handler)
+0,
-5
1@@ -10,7 +10,6 @@ import (
2 "github.com/charmbracelet/ssh"
3 "github.com/picosh/pico/db"
4 "github.com/picosh/pico/filehandlers"
5- fileshared "github.com/picosh/pico/filehandlers/shared"
6 "github.com/picosh/pico/shared"
7 "github.com/picosh/utils"
8 )
9@@ -98,7 +97,3 @@ func (p *FileHooks) FileMeta(s ssh.Session, data *filehandlers.PostMetaData) err
10
11 return nil
12 }
13-
14-func (p *FileHooks) FileSuccess(s ssh.Session, data *fileshared.FileUploaded) error {
15- return nil
16-}
+1,
-15
1@@ -14,7 +14,6 @@ import (
2 "github.com/picosh/pico/db/postgres"
3 "github.com/picosh/pico/filehandlers"
4 "github.com/picosh/pico/shared"
5- "github.com/picosh/pico/shared/storage"
6 wsh "github.com/picosh/pico/wish"
7 "github.com/picosh/send/auth"
8 "github.com/picosh/send/list"
9@@ -64,21 +63,8 @@ func StartSshServer() {
10 Db: dbh,
11 }
12
13- var st storage.StorageServe
14- var err error
15- if cfg.MinioURL == "" {
16- st, err = storage.NewStorageFS(cfg.Logger, cfg.StorageDir)
17- } else {
18- st, err = storage.NewStorageMinio(cfg.Logger, cfg.MinioURL, cfg.MinioUser, cfg.MinioPass)
19- }
20-
21- if err != nil {
22- logger.Error(err.Error())
23- return
24- }
25-
26 fileMap := map[string]filehandlers.ReadWriteHandler{
27- "fallback": filehandlers.NewScpPostHandler(dbh, cfg, hooks, st),
28+ "fallback": filehandlers.NewScpPostHandler(dbh, cfg, hooks),
29 }
30 handler := filehandlers.NewFileHandlerRouter(cfg, dbh, fileMap)
31 sshAuth := shared.NewSshAuthHandler(dbh, logger, cfg)
+0,
-28
1@@ -18,13 +18,11 @@ import (
2 "github.com/charmbracelet/ssh"
3 "github.com/charmbracelet/wish"
4 "github.com/picosh/pico/db"
5- fileshared "github.com/picosh/pico/filehandlers/shared"
6 "github.com/picosh/pico/shared"
7 "github.com/picosh/pobj"
8 sst "github.com/picosh/pobj/storage"
9 sendutils "github.com/picosh/send/utils"
10 "github.com/picosh/utils"
11- pipeutils "github.com/picosh/utils/pipe"
12 ignore "github.com/sabhiram/go-gitignore"
13 )
14
15@@ -105,7 +103,6 @@ type UploadAssetHandler struct {
16 Cfg *shared.ConfigSite
17 Storage sst.ObjectStorage
18 CacheClearingQueue chan string
19- UploadDrain *pipeutils.ReconnectReadWriteCloser
20 }
21
22 func NewUploadAssetHandler(dbpool db.DB, cfg *shared.ConfigSite, storage sst.ObjectStorage, ctx context.Context) *UploadAssetHandler {
23@@ -113,13 +110,11 @@ func NewUploadAssetHandler(dbpool db.DB, cfg *shared.ConfigSite, storage sst.Obj
24 ch := make(chan string, 100)
25 go runCacheQueue(cfg, ctx, ch)
26 // publish all file uploads to a pipe topic
27- drain := fileshared.CreatePubUploadDrain(ctx, cfg.Logger)
28 return &UploadAssetHandler{
29 DBPool: dbpool,
30 Cfg: cfg,
31 Storage: storage,
32 CacheClearingQueue: ch,
33- UploadDrain: drain,
34 }
35 }
36
37@@ -418,21 +413,6 @@ func (h *UploadAssetHandler) Write(s ssh.Session, entry *sendutils.FileEntry) (s
38 surrogate := getSurrogateKey(user.Name, projectName)
39 h.CacheClearingQueue <- surrogate
40
41- action := ""
42- if curFileSize == 0 {
43- action = "create"
44- } else {
45- action = "updated"
46- }
47- upload := &fileshared.FileUploaded{
48- UserID: user.ID,
49- Action: action,
50- Filename: assetFilename,
51- Service: h.Cfg.Space,
52- ProjectName: projectName,
53- }
54- err = fileshared.WriteUploadDrain(h.UploadDrain, upload)
55-
56 return str, err
57 }
58
59@@ -507,14 +487,6 @@ func (h *UploadAssetHandler) Delete(s ssh.Session, entry *sendutils.FileEntry) e
60 return err
61 }
62
63- upload := &fileshared.FileUploaded{
64- UserID: user.ID,
65- Action: "delete",
66- Filename: assetFilepath,
67- Service: h.Cfg.Space,
68- ProjectName: projectName,
69- }
70- err = fileshared.WriteUploadDrain(h.UploadDrain, upload)
71 return err
72 }
73
+0,
-5
1@@ -9,7 +9,6 @@ import (
2 "github.com/charmbracelet/ssh"
3 "github.com/picosh/pico/db"
4 "github.com/picosh/pico/filehandlers"
5- fileshared "github.com/picosh/pico/filehandlers/shared"
6 "github.com/picosh/pico/shared"
7 "github.com/picosh/utils"
8 pipeUtil "github.com/picosh/utils/pipe"
9@@ -75,7 +74,3 @@ func (p *MarkdownHooks) FileMeta(s ssh.Session, data *filehandlers.PostMetaData)
10
11 return nil
12 }
13-
14-func (p *MarkdownHooks) FileSuccess(s ssh.Session, data *fileshared.FileUploaded) error {
15- return fileshared.WriteUploadDrain(p.Pipe, data)
16-}
+0,
-985
1@@ -1,985 +0,0 @@
2-package prose
3-
4-import (
5- "bytes"
6- "fmt"
7- "html/template"
8- "io"
9- "log/slog"
10- "os"
11- "path/filepath"
12- "strings"
13- "time"
14-
15- "slices"
16-
17- "github.com/gorilla/feeds"
18- "github.com/picosh/pico/db"
19- "github.com/picosh/pico/shared"
20- "github.com/picosh/pico/shared/storage"
21- sst "github.com/picosh/pobj/storage"
22- sendUtils "github.com/picosh/send/utils"
23- "github.com/picosh/utils"
24-)
25-
26-type SSG struct {
27- Logger *slog.Logger
28- DB db.DB
29- Cfg *shared.ConfigSite
30- Storage storage.StorageServe
31- TmplDir string
32- StaticDir string
33-}
34-
35-var Space = "prose"
36-
37-func getPostTitle(post *db.Post) string {
38- if post.Description == "" {
39- return post.Title
40- }
41-
42- return fmt.Sprintf("%s: %s", post.Title, post.Description)
43-}
44-
45-func getBlogName(username string) string {
46- return fmt.Sprintf("%s's blog", username)
47-}
48-
49-func getBlogDomain(username, domain string) string {
50- return fmt.Sprintf("%s.%s", username, domain)
51-}
52-
53-func (ssg *SSG) tmpl(fpath string) string {
54- return filepath.Join(ssg.TmplDir, fpath)
55-}
56-
57-func (ssg *SSG) blogPage(w io.Writer, user *db.User, blog *UserBlogData, tag string) error {
58- files := []string{
59- ssg.tmpl("blog.page.tmpl"),
60- ssg.tmpl("blog-default.partial.tmpl"),
61- ssg.tmpl("blog-aside.partial.tmpl"),
62- ssg.tmpl("footer.partial.tmpl"),
63- ssg.tmpl("marketing-footer.partial.tmpl"),
64- ssg.tmpl("base.layout.tmpl"),
65- }
66- ts, err := template.ParseFiles(files...)
67- if err != nil {
68- return err
69- }
70-
71- headerTxt := &HeaderTxt{
72- Title: getBlogName(user.Name),
73- Bio: "",
74- Layout: "default",
75- ImageCard: "summary",
76- WithStyles: true,
77- Domain: getBlogDomain(user.Name, ssg.Cfg.Domain),
78- }
79- readmeTxt := &ReadmeTxt{}
80- readme := blog.Readme
81- if readme != nil {
82- parsedText, err := shared.ParseText(readme.Text)
83- if err != nil {
84- return err
85- }
86- headerTxt.Bio = parsedText.Description
87- headerTxt.Layout = parsedText.Layout
88- headerTxt.Image = template.URL(parsedText.Image)
89- headerTxt.ImageCard = parsedText.ImageCard
90- headerTxt.WithStyles = parsedText.WithStyles
91- headerTxt.Favicon = template.URL(parsedText.Favicon)
92- if parsedText.Title != "" {
93- headerTxt.Title = parsedText.Title
94- }
95- if parsedText.Domain != "" {
96- headerTxt.Domain = parsedText.Domain
97- }
98-
99- headerTxt.Nav = []shared.Link{}
100- for _, nav := range parsedText.Nav {
101- finURL := nav.URL
102- headerTxt.Nav = append(headerTxt.Nav, shared.Link{
103- URL: finURL,
104- Text: nav.Text,
105- })
106- }
107-
108- readmeTxt.Contents = template.HTML(parsedText.Html)
109- if len(readmeTxt.Contents) > 0 {
110- readmeTxt.HasText = true
111- }
112- }
113-
114- hasCSS := blog.CSS != nil
115- postCollection := []PostItemData{}
116- for _, post := range blog.Posts {
117- if tag != "" {
118- parsed, err := shared.ParseText(post.Text)
119- if err != nil {
120- blog.Logger.Error("post parse text", "err", err)
121- continue
122- }
123- if !slices.Contains(parsed.Tags, tag) {
124- continue
125- }
126- }
127-
128- p := PostItemData{
129- URL: template.URL(
130- fmt.Sprintf("/%s", post.Slug),
131- ),
132- BlogURL: template.URL("/"),
133- Title: utils.FilenameToTitle(post.Filename, post.Title),
134- PublishAt: post.PublishAt.Format(time.DateOnly),
135- PublishAtISO: post.PublishAt.Format(time.RFC3339),
136- UpdatedTimeAgo: utils.TimeAgo(post.UpdatedAt),
137- UpdatedAtISO: post.UpdatedAt.Format(time.RFC3339),
138- }
139- postCollection = append(postCollection, p)
140- }
141-
142- rssIdx := "/rss.atom"
143- data := BlogPageData{
144- Site: *ssg.Cfg.GetSiteData(),
145- PageTitle: headerTxt.Title,
146- URL: template.URL(fmt.Sprintf("%s://%s", ssg.Cfg.Protocol, headerTxt.Domain)),
147- RSSURL: template.URL(rssIdx),
148- Readme: readmeTxt,
149- Header: headerTxt,
150- Username: user.Name,
151- Posts: postCollection,
152- HasCSS: hasCSS,
153- CssURL: template.URL("/_styles.css"),
154- HasFilter: tag != "",
155- WithStyles: headerTxt.WithStyles,
156- }
157-
158- return ts.Execute(w, data)
159-}
160-
161-func (ssg *SSG) rssBlogPage(w io.Writer, user *db.User, blog *UserBlogData) error {
162- ts, err := template.ParseFiles(ssg.tmpl("rss.page.tmpl"))
163- if err != nil {
164- return err
165- }
166-
167- headerTxt := &HeaderTxt{
168- Title: getBlogName(user.Name),
169- Domain: getBlogDomain(user.Name, ssg.Cfg.Domain),
170- }
171-
172- readme := blog.Readme
173- if readme != nil {
174- parsedText, err := shared.ParseText(readme.Text)
175- if err != nil {
176- return err
177- }
178- if parsedText.Title != "" {
179- headerTxt.Title = parsedText.Title
180- }
181-
182- if parsedText.Description != "" {
183- headerTxt.Bio = parsedText.Description
184- }
185-
186- if parsedText.Domain != "" {
187- headerTxt.Domain = parsedText.Domain
188- }
189- }
190-
191- blogUrl := fmt.Sprintf("%s://%s", ssg.Cfg.Protocol, headerTxt.Domain)
192-
193- feed := &feeds.Feed{
194- Id: blogUrl,
195- Title: headerTxt.Title,
196- Link: &feeds.Link{Href: blogUrl},
197- Description: headerTxt.Bio,
198- Author: &feeds.Author{Name: user.Name},
199- Created: *user.CreatedAt,
200- }
201-
202- var feedItems []*feeds.Item
203- for _, post := range blog.Posts {
204- if slices.Contains(ssg.Cfg.HiddenPosts, post.Filename) {
205- continue
206- }
207- parsed, err := shared.ParseText(post.Text)
208- if err != nil {
209- return err
210- }
211-
212- footer := blog.Footer
213- var footerHTML string
214- if footer != nil {
215- footerParsed, err := shared.ParseText(footer.Text)
216- if err != nil {
217- return err
218- }
219- footerHTML = footerParsed.Html
220- }
221-
222- var tpl bytes.Buffer
223- data := &PostPageData{
224- Contents: template.HTML(parsed.Html + footerHTML),
225- }
226- if err := ts.Execute(&tpl, data); err != nil {
227- continue
228- }
229-
230- realUrl := fmt.Sprintf("%s://%s/%s", ssg.Cfg.Protocol, headerTxt.Domain, post.Slug)
231- feedId := realUrl
232-
233- item := &feeds.Item{
234- Id: feedId,
235- Title: utils.FilenameToTitle(post.Filename, post.Title),
236- Link: &feeds.Link{Href: realUrl},
237- Content: tpl.String(),
238- Updated: *post.UpdatedAt,
239- Created: *post.PublishAt,
240- Description: post.Description,
241- }
242-
243- if post.Description != "" {
244- item.Description = post.Description
245- }
246-
247- feedItems = append(feedItems, item)
248- }
249- feed.Items = feedItems
250-
251- rss, err := feed.ToAtom()
252- if err != nil {
253- return err
254- }
255-
256- _, err = w.Write([]byte(rss))
257- return err
258-}
259-
260-func (ssg *SSG) writePostPage(w io.Writer, user *db.User, post *db.Post, blog *UserBlogData) (*shared.ParsedText, error) {
261- blogName := getBlogName(user.Name)
262- favicon := ""
263- ogImage := ""
264- ogImageCard := ""
265- withStyles := true
266- domain := getBlogDomain(user.Name, ssg.Cfg.Domain)
267- var data PostPageData
268-
269- footer := blog.Footer
270- var footerHTML template.HTML
271- if footer != nil {
272- footerParsed, err := shared.ParseText(footer.Text)
273- if err != nil {
274- return nil, err
275- }
276- footerHTML = template.HTML(footerParsed.Html)
277- }
278-
279- // we need the blog name from the readme unfortunately
280- readme := blog.Readme
281- if readme != nil {
282- readmeParsed, err := shared.ParseText(readme.Text)
283- if err != nil {
284- return nil, err
285- }
286- if readmeParsed.MetaData.Title != "" {
287- blogName = readmeParsed.MetaData.Title
288- }
289- if readmeParsed.MetaData.Domain != "" {
290- domain = readmeParsed.MetaData.Domain
291- }
292- withStyles = readmeParsed.WithStyles
293- ogImage = readmeParsed.Image
294- ogImageCard = readmeParsed.ImageCard
295- favicon = readmeParsed.Favicon
296- }
297-
298- diff := ""
299- parsedText, err := shared.ParseText(post.Text)
300- if err != nil {
301- return nil, err
302- }
303-
304- if parsedText.Image != "" {
305- ogImage = parsedText.Image
306- }
307-
308- if parsedText.ImageCard != "" {
309- ogImageCard = parsedText.ImageCard
310- }
311-
312- unlisted := false
313- if post.Hidden || post.PublishAt.After(time.Now()) {
314- unlisted = true
315- }
316-
317- data = PostPageData{
318- Site: *ssg.Cfg.GetSiteData(),
319- PageTitle: getPostTitle(post),
320- URL: template.URL(
321- fmt.Sprintf("%s://%s/%s", ssg.Cfg.Protocol, domain, post.Slug),
322- ),
323- BlogURL: "/",
324- Description: post.Description,
325- Title: utils.FilenameToTitle(post.Filename, post.Title),
326- Slug: post.Slug,
327- PublishAt: post.PublishAt.Format(time.DateOnly),
328- PublishAtISO: post.PublishAt.Format(time.RFC3339),
329- Username: user.Name,
330- BlogName: blogName,
331- Contents: template.HTML(parsedText.Html),
332- HasCSS: blog.CSS != nil,
333- CssURL: template.URL("/_styles.css"),
334- Tags: parsedText.Tags,
335- Image: template.URL(ogImage),
336- ImageCard: ogImageCard,
337- Favicon: template.URL(favicon),
338- Footer: footerHTML,
339- Unlisted: unlisted,
340- Diff: template.HTML(diff),
341- WithStyles: withStyles,
342- }
343-
344- files := []string{
345- ssg.tmpl("post.page.tmpl"),
346- ssg.tmpl("footer.partial.tmpl"),
347- ssg.tmpl("marketing-footer.partial.tmpl"),
348- ssg.tmpl("base.layout.tmpl"),
349- }
350- ts, err := template.ParseFiles(files...)
351- if err != nil {
352- return nil, err
353- }
354-
355- return parsedText, ts.Execute(w, data)
356-}
357-
358-func (ssg *SSG) discoverPage(w io.Writer) error {
359- pager, err := ssg.DB.FindAllPosts(&db.Pager{Num: 50, Page: 0}, Space)
360- if err != nil {
361- return err
362- }
363-
364- data := ReadPageData{
365- Site: *ssg.Cfg.GetSiteData(),
366- }
367-
368- for _, post := range pager.Data {
369- item := PostItemData{
370- URL: template.URL(
371- fmt.Sprintf(
372- "%s://%s/%s",
373- ssg.Cfg.Protocol,
374- getBlogDomain(post.Username, ssg.Cfg.Domain),
375- post.Slug,
376- ),
377- ),
378- BlogURL: template.URL(getBlogDomain(post.Username, ssg.Cfg.Domain)),
379- Title: utils.FilenameToTitle(post.Filename, post.Title),
380- Description: post.Description,
381- Username: post.Username,
382- PublishAt: post.PublishAt.Format(time.DateOnly),
383- PublishAtISO: post.PublishAt.Format(time.RFC3339),
384- UpdatedTimeAgo: utils.TimeAgo(post.UpdatedAt),
385- UpdatedAtISO: post.UpdatedAt.Format(time.RFC3339),
386- }
387- data.Posts = append(data.Posts, item)
388- }
389-
390- files := []string{
391- ssg.tmpl("read.page.tmpl"),
392- ssg.tmpl("footer.partial.tmpl"),
393- ssg.tmpl("marketing-footer.partial.tmpl"),
394- ssg.tmpl("base.layout.tmpl"),
395- }
396- ts, err := template.ParseFiles(files...)
397- if err != nil {
398- return err
399- }
400-
401- return ts.Execute(w, data)
402-}
403-
404-func (ssg *SSG) discoverRssPage(w io.Writer) error {
405- pager, err := ssg.DB.FindAllPosts(&db.Pager{Num: 25, Page: 0}, Space)
406- if err != nil {
407- return err
408- }
409-
410- files := []string{
411- ssg.tmpl("rss.page.tmpl"),
412- }
413- ts, err := template.ParseFiles(files...)
414- if err != nil {
415- return err
416- }
417-
418- feed := &feeds.Feed{
419- Title: fmt.Sprintf("%s discovery feed", ssg.Cfg.Domain),
420- Link: &feeds.Link{
421- Href: fmt.Sprintf("%s://%s", ssg.Cfg.Protocol, ssg.Cfg.Domain),
422- },
423- Description: fmt.Sprintf("%s latest posts", ssg.Cfg.Domain),
424- Author: &feeds.Author{Name: ssg.Cfg.Domain},
425- Created: time.Now(),
426- }
427-
428- var feedItems []*feeds.Item
429- for _, post := range pager.Data {
430- parsed, err := shared.ParseText(post.Text)
431- if err != nil {
432- return err
433- }
434-
435- var tpl bytes.Buffer
436- data := &PostPageData{
437- Contents: template.HTML(parsed.Html),
438- }
439- if err := ts.Execute(&tpl, data); err != nil {
440- continue
441- }
442-
443- realUrl := fmt.Sprintf(
444- "%s://%s/%s",
445- ssg.Cfg.Protocol,
446- getBlogDomain(post.Username, ssg.Cfg.Domain),
447- post.Slug,
448- )
449-
450- item := &feeds.Item{
451- Id: realUrl,
452- Title: post.Title,
453- Link: &feeds.Link{Href: realUrl},
454- Content: tpl.String(),
455- Created: *post.PublishAt,
456- Updated: *post.UpdatedAt,
457- Description: post.Description,
458- Author: &feeds.Author{Name: post.Username},
459- }
460-
461- if post.Description != "" {
462- item.Description = post.Description
463- }
464-
465- feedItems = append(feedItems, item)
466- }
467- feed.Items = feedItems
468-
469- rss, err := feed.ToAtom()
470- if err != nil {
471- return err
472- }
473-
474- _, err = w.Write([]byte(rss))
475- return err
476-}
477-
478-func (ssg *SSG) upload(logger *slog.Logger, bucket sst.Bucket, fpath string, rdr io.Reader) error {
479- toSite := filepath.Join("prose", fpath)
480- logger.Info("uploading object", "bucket", bucket.Name, "object", toSite)
481- buf := &bytes.Buffer{}
482- size, err := io.Copy(buf, rdr)
483- if err != nil {
484- return err
485- }
486-
487- _, _, err = ssg.Storage.PutObject(bucket, toSite, buf, &sendUtils.FileEntry{
488- Mtime: time.Now().Unix(),
489- Size: size,
490- })
491- return err
492-}
493-
494-func (ssg *SSG) notFoundPage(w io.Writer, user *db.User, blog *UserBlogData) error {
495- ogImage := ""
496- ogImageCard := ""
497- favicon := ""
498- contents := template.HTML("Oops! we can't seem to find this post.")
499- title := "Post not found"
500- desc := "Post not found"
501- hasCSS := blog.CSS != nil
502-
503- footer := blog.Footer
504- var footerHTML template.HTML
505- if footer != nil {
506- footerParsed, err := shared.ParseText(footer.Text)
507- if err != nil {
508- return err
509- }
510- footerHTML = template.HTML(footerParsed.Html)
511- }
512-
513- // we need the blog name from the readme unfortunately
514- readme := blog.Readme
515- if readme != nil {
516- readmeParsed, err := shared.ParseText(readme.Text)
517- if err != nil {
518- return err
519- }
520- ogImage = readmeParsed.Image
521- ogImageCard = readmeParsed.ImageCard
522- favicon = readmeParsed.Favicon
523- }
524-
525- notFound := blog.NotFound
526- if notFound != nil {
527- notFoundParsed, err := shared.ParseText(notFound.Text)
528- if err != nil {
529- blog.Logger.Error("could not parse markdown", "err", err.Error())
530- return err
531- }
532- if notFoundParsed.MetaData.Title != "" {
533- title = notFoundParsed.MetaData.Title
534- }
535- if notFoundParsed.MetaData.Description != "" {
536- desc = notFoundParsed.MetaData.Description
537- }
538- ogImage = notFoundParsed.Image
539- ogImageCard = notFoundParsed.ImageCard
540- favicon = notFoundParsed.Favicon
541- contents = template.HTML(notFoundParsed.Html)
542- }
543-
544- data := PostPageData{
545- Site: *ssg.Cfg.GetSiteData(),
546- BlogURL: "/",
547- PageTitle: title,
548- Description: desc,
549- Title: title,
550- PublishAt: time.Now().Format(time.DateOnly),
551- PublishAtISO: time.Now().Format(time.RFC3339),
552- Username: user.Name,
553- BlogName: getBlogName(user.Name),
554- HasCSS: hasCSS,
555- CssURL: template.URL("/_styles.css"),
556- Image: template.URL(ogImage),
557- ImageCard: ogImageCard,
558- Favicon: template.URL(favicon),
559- Footer: footerHTML,
560- Contents: contents,
561- Unlisted: true,
562- }
563- files := []string{
564- ssg.tmpl("post.page.tmpl"),
565- ssg.tmpl("footer.partial.tmpl"),
566- ssg.tmpl("marketing-footer.partial.tmpl"),
567- ssg.tmpl("base.layout.tmpl"),
568- }
569- ts, err := template.ParseFiles(files...)
570- if err != nil {
571- return err
572- }
573- return ts.Execute(w, data)
574-}
575-
576-func (ssg *SSG) images(user *db.User, blog *UserBlogData) error {
577- imgBucket, err := ssg.Storage.GetBucket(shared.GetImgsBucketName(user.ID))
578- if err != nil {
579- blog.Logger.Info("user does not have an images dir, skipping")
580- return nil
581- }
582- imgs, err := ssg.Storage.ListObjects(imgBucket, "/", false)
583- if err != nil {
584- return err
585- }
586-
587- for _, inf := range imgs {
588- rdr, _, err := ssg.Storage.GetObject(imgBucket, inf.Name())
589- if err != nil {
590- return err
591- }
592- err = ssg.upload(blog.Logger, blog.Bucket, inf.Name(), rdr)
593- if err != nil {
594- return err
595- }
596- }
597-
598- return nil
599-}
600-
601-func (ssg *SSG) static(logger *slog.Logger, bucket sst.Bucket) error {
602- files, err := os.ReadDir(ssg.StaticDir)
603- if err != nil {
604- return err
605- }
606- for _, file := range files {
607- if file.IsDir() {
608- continue
609- }
610- fpath := filepath.Join(ssg.StaticDir, file.Name())
611- fp, err := os.Open(fpath)
612- if err != nil {
613- return err
614- }
615- err = ssg.upload(logger, bucket, file.Name(), fp)
616- if err != nil {
617- return err
618- }
619- }
620-
621- return nil
622-}
623-
624-func (ssg *SSG) Prose() error {
625- ssg.Logger.Info("generating discover page")
626- rdr, wtr := io.Pipe()
627- go func() {
628- err := ssg.discoverPage(wtr)
629- wtr.Close()
630- if err != nil {
631- ssg.Logger.Error("discover page", "err", err)
632- }
633- }()
634-
635- user, err := ssg.DB.FindUserForName("pico")
636- if err != nil {
637- return err
638- }
639-
640- bucketName := shared.GetAssetBucketName(user.ID)
641- bucket, err := ssg.Storage.UpsertBucket(bucketName)
642- if err != nil {
643- return err
644- }
645-
646- redirectsFile := "/rss /rss.atom 200\n"
647- ssg.Logger.Info("generating _redirects file", "text", redirectsFile)
648- // create redirects file
649- redirects := strings.NewReader(redirectsFile)
650- err = ssg.upload(ssg.Logger, bucket, "_redirects", redirects)
651- if err != nil {
652- return err
653- }
654-
655- err = ssg.upload(ssg.Logger, bucket, "index.html", rdr)
656- if err != nil {
657- return err
658- }
659-
660- ssg.Logger.Info("generating discover rss page")
661- rdr, wtr = io.Pipe()
662- go func() {
663- err = ssg.discoverRssPage(wtr)
664- wtr.Close()
665- if err != nil {
666- ssg.Logger.Error("discover rss page", "err", err)
667- }
668- }()
669-
670- err = ssg.upload(ssg.Logger, bucket, "rss.atom", rdr)
671- if err != nil {
672- return err
673- }
674-
675- ssg.Logger.Info("copying static folder for root", "dir", ssg.StaticDir)
676- err = ssg.static(ssg.Logger, bucket)
677- if err != nil {
678- return err
679- }
680-
681- users, err := ssg.DB.FindUsers()
682- if err != nil {
683- return err
684- }
685-
686- for _, user := range users {
687- bucket, err := ssg.Storage.UpsertBucket(shared.GetAssetBucketName(user.ID))
688- if err != nil {
689- return err
690- }
691-
692- err = ssg.ProseBlog(user, bucket, "prose")
693- if err != nil {
694- log := shared.LoggerWithUser(ssg.Logger, user)
695- log.Error("could not generate blog for user", "err", err)
696- }
697- }
698-
699- return nil
700-}
701-
702-func (ssg *SSG) PostPage(user *db.User, blog *UserBlogData, post *db.Post) (pt *shared.ParsedText, err error) {
703- // create post file
704- rdr, wtr := io.Pipe()
705- var parsed *shared.ParsedText
706- go func() {
707- parsed, err = ssg.writePostPage(wtr, user, post, blog)
708- wtr.Close()
709- if err != nil {
710- blog.Logger.Error("post page", "err", err)
711- }
712- }()
713-
714- fname := post.Slug + ".html"
715- err = ssg.upload(blog.Logger, blog.Bucket, fname, rdr)
716- if err != nil {
717- return parsed, err
718- }
719- return parsed, nil
720-}
721-
722-func (ssg *SSG) NotFoundPage(logger *slog.Logger, user *db.User, blog *UserBlogData) error {
723- // create 404 page
724- logger.Info("generating 404 page")
725- rdr, wtr := io.Pipe()
726- go func() {
727- err := ssg.notFoundPage(wtr, user, blog)
728- wtr.Close()
729- if err != nil {
730- blog.Logger.Error("not found page", "err", err)
731- }
732- }()
733-
734- err := ssg.upload(blog.Logger, blog.Bucket, "404.html", rdr)
735- if err != nil {
736- return err
737- }
738-
739- return nil
740-}
741-
742-func (ssg *SSG) UpsertPost(userID, username string, bucket sst.Bucket, filename string) (*db.Post, error) {
743- slug := utils.SanitizeFileExt(filename)
744- updatedAt := time.Now()
745- fp := filepath.Join("prose/", filename)
746- logger := ssg.Logger.With("filename", fp)
747- rdr, _, err := ssg.Storage.GetObject(bucket, fp)
748- if err != nil {
749- logger.Error("get object", "err", err)
750- return nil, err
751- }
752- txtb, err := io.ReadAll(rdr)
753- if err != nil {
754- logger.Error("reader to string", "err", err)
755- return nil, err
756- }
757- txt := string(txtb)
758- parsed, err := shared.ParseText(txt)
759- if err != nil {
760- logger.Error("parse text", "err", err)
761- return nil, err
762- }
763-
764- post := &db.Post{
765- IsVirtual: true,
766- Slug: slug,
767- Filename: filename,
768- FileSize: len(txt),
769- Text: txt,
770- PublishAt: parsed.PublishAt,
771- UpdatedAt: &updatedAt,
772- Hidden: parsed.Hidden,
773- Description: parsed.Description,
774- Title: utils.FilenameToTitle(filename, parsed.Title),
775- Username: username,
776- }
777-
778- origPost, _ := ssg.DB.FindPostWithSlug(slug, userID, "prose")
779- if origPost != nil {
780- post.PublishAt = origPost.PublishAt
781- return ssg.DB.UpdatePost(post)
782- }
783- return ssg.DB.InsertPost(post)
784-}
785-
786-func (ssg *SSG) findPostByName(userID, username string, bucket sst.Bucket, filename string, modTime time.Time) (*db.Post, error) {
787- return ssg.DB.FindPostWithFilename(filename, userID, Space)
788-}
789-
790-func (ssg *SSG) findPosts(blog *UserBlogData, service string) ([]*db.Post, bool, error) {
791- blog.Logger.Info("finding posts")
792- data, err := ssg.DB.FindPostsForUser(&db.Pager{Num: 1000, Page: 0}, blog.User.ID, Space)
793- if err != nil {
794- return nil, false, err
795- }
796- return data.Data, false, nil
797-}
798-
799-type UserBlogData struct {
800- Bucket sst.Bucket
801- User *db.User
802- Posts []*db.Post
803- Readme *db.Post
804- Footer *db.Post
805- CSS *db.Post
806- NotFound *db.Post
807- Logger *slog.Logger
808-}
809-
810-func (ssg *SSG) ProseBlog(user *db.User, bucket sst.Bucket, service string) error {
811- // programmatically generate redirects file based on aliases
812- // and other routes that were in prose that need to be available
813- redirectsFile := "/rss /rss.atom 301\n"
814- logger := shared.LoggerWithUser(ssg.Logger, user)
815- logger.Info("generating blog for user")
816-
817- _, err := ssg.DB.FindProjectByName(user.ID, "prose")
818- if err != nil {
819- _, err := ssg.DB.InsertProject(user.ID, "prose", "prose")
820- if err != nil {
821- return err
822- }
823- return ssg.ProseBlog(user, bucket, service)
824- }
825-
826- blog := &UserBlogData{
827- User: user,
828- Bucket: bucket,
829- Logger: logger,
830- }
831-
832- posts, isVirtual, err := ssg.findPosts(blog, service)
833- if err != nil {
834- // no posts found, bail on generating an empty blog
835- // TODO: gen the index anyway?
836- return nil
837- }
838-
839- blog.Posts = posts
840-
841- css, _ := ssg.findPostByName(user.ID, user.Name, bucket, "_styles.css", time.Time{})
842- if css != nil && !css.IsVirtual {
843- stylerdr := strings.NewReader(css.Text)
844- err = ssg.upload(blog.Logger, bucket, "_styles.css", stylerdr)
845- if err != nil {
846- return err
847- }
848- }
849- blog.CSS = css
850-
851- readme, _ := ssg.findPostByName(user.ID, user.Name, bucket, "_readme.md", time.Time{})
852- if readme != nil && !readme.IsVirtual {
853- rdr := strings.NewReader(readme.Text)
854- err = ssg.upload(blog.Logger, bucket, "_readme.md", rdr)
855- if err != nil {
856- return err
857- }
858- }
859- blog.Readme = readme
860-
861- footer, _ := ssg.findPostByName(user.ID, user.Name, bucket, "_footer.md", time.Time{})
862- if readme != nil && !readme.IsVirtual {
863- rdr := strings.NewReader(footer.Text)
864- err = ssg.upload(blog.Logger, bucket, "_footer.md", rdr)
865- if err != nil {
866- return err
867- }
868- }
869- blog.Footer = footer
870-
871- notFound, _ := ssg.findPostByName(user.ID, user.Name, bucket, "_404.md", time.Time{})
872- if notFound != nil && !notFound.IsVirtual {
873- rdr := strings.NewReader(notFound.Text)
874- err = ssg.upload(blog.Logger, bucket, "_404.md", rdr)
875- if err != nil {
876- return err
877- }
878- }
879- blog.NotFound = notFound
880-
881- tagMap := map[string]string{}
882- for _, post := range posts {
883- if post.Slug == "" {
884- logger.Warn("post slug empty, skipping")
885- continue
886- }
887-
888- logger.Info("generating post", "slug", post.Slug)
889-
890- parsed, err := ssg.PostPage(user, blog, post)
891- if err != nil {
892- return err
893- }
894- // add aliases to redirects file
895- for _, alias := range parsed.Aliases {
896- redirectsFile += fmt.Sprintf("%s %s 301\n", alias, "/"+post.Slug)
897- }
898- for _, tag := range parsed.Tags {
899- tagMap[tag] = tag
900- }
901-
902- // create raw post file
903- // only generate md file if we dont already have it in our pgs site
904- if !post.IsVirtual {
905- fpath := post.Slug + ".md"
906- mdRdr := strings.NewReader(post.Text)
907- err = ssg.upload(blog.Logger, bucket, fpath, mdRdr)
908- if err != nil {
909- return err
910- }
911- }
912- }
913-
914- err = ssg.NotFoundPage(logger, user, blog)
915- if err != nil {
916- return err
917- }
918-
919- tags := []string{""}
920- for k := range tagMap {
921- tags = append(tags, k)
922- }
923-
924- // create index files
925- for _, tag := range tags {
926- logger.Info("generating blog index page", "tag", tag)
927- rdr, wtr := io.Pipe()
928- go func() {
929- err = ssg.blogPage(wtr, user, blog, tag)
930- wtr.Close()
931- if err != nil {
932- blog.Logger.Error("blog page", "err", err)
933- }
934- }()
935-
936- fpath := "index.html"
937- if tag != "" {
938- fpath = fmt.Sprintf("index-%s.html", tag)
939- }
940- err = ssg.upload(blog.Logger, bucket, fpath, rdr)
941- if err != nil {
942- return err
943- }
944- }
945-
946- logger.Info("generating blog rss page", "tag", "")
947- rdr, wtr := io.Pipe()
948- go func() {
949- err = ssg.rssBlogPage(wtr, user, blog)
950- wtr.Close()
951- if err != nil {
952- blog.Logger.Error("blog rss page", "err", err)
953- }
954- }()
955-
956- fpath := "rss.atom"
957- err = ssg.upload(blog.Logger, bucket, fpath, rdr)
958- if err != nil {
959- return err
960- }
961-
962- logger.Info("generating _redirects file", "text", redirectsFile)
963- // create redirects file
964- redirects := strings.NewReader(redirectsFile)
965- err = ssg.upload(blog.Logger, bucket, "_redirects", redirects)
966- if err != nil {
967- return err
968- }
969-
970- logger.Info("copying static folder", "dir", ssg.StaticDir)
971- err = ssg.static(blog.Logger, bucket)
972- if err != nil {
973- return err
974- }
975-
976- if !isVirtual {
977- logger.Info("copying images")
978- err = ssg.images(user, blog)
979- if err != nil {
980- return err
981- }
982- }
983-
984- logger.Info("success!")
985- return nil
986-}
+5,
-11
1@@ -14,7 +14,6 @@ import (
2 "github.com/picosh/pico/db/postgres"
3 "github.com/picosh/pico/filehandlers"
4 uploadimgs "github.com/picosh/pico/filehandlers/imgs"
5- fileshared "github.com/picosh/pico/filehandlers/shared"
6 "github.com/picosh/pico/shared"
7 "github.com/picosh/pico/shared/storage"
8 wsh "github.com/picosh/pico/wish"
9@@ -63,14 +62,9 @@ func StartSshServer() {
10 dbh := postgres.NewDB(cfg.DbURL, cfg.Logger)
11 defer dbh.Close()
12
13- ctx := context.Background()
14- defer ctx.Done()
15- pipeClient := fileshared.CreatePubUploadDrain(ctx, logger)
16-
17 hooks := &MarkdownHooks{
18- Cfg: cfg,
19- Db: dbh,
20- Pipe: pipeClient,
21+ Cfg: cfg,
22+ Db: dbh,
23 }
24
25 var st storage.StorageServe
26@@ -87,9 +81,9 @@ func StartSshServer() {
27 }
28
29 fileMap := map[string]filehandlers.ReadWriteHandler{
30- ".md": filehandlers.NewScpPostHandler(dbh, cfg, hooks, st),
31- ".css": filehandlers.NewScpPostHandler(dbh, cfg, hooks, st),
32- "fallback": uploadimgs.NewUploadImgHandler(dbh, cfg, st, pipeClient),
33+ ".md": filehandlers.NewScpPostHandler(dbh, cfg, hooks),
34+ ".css": filehandlers.NewScpPostHandler(dbh, cfg, hooks),
35+ "fallback": uploadimgs.NewUploadImgHandler(dbh, cfg, st),
36 }
37 handler := filehandlers.NewFileHandlerRouter(cfg, dbh, fileMap)
38 handler.Spaces = []string{cfg.Space, "imgs"}