From 24364a26b0055c4d9336d5148f11dc7cd481d781 Mon Sep 17 00:00:00 2001 From: Alessio Date: Sun, 31 Dec 2023 15:56:12 -0600 Subject: [PATCH] REFACTOR: rework the rendering helpers - rendering helpers moved to their own file (separate from response helpers) - create a unified render helper instead of "buffered_render_basic_X" and "buffered_render_tweet_X" - this helper takes 2 data objects: one with global data (tweet trove, logged in user, etc) and one page-specific - this lets us remove the disgusting interface type - modify the User List template to use UserIDs indexing into a global data object instead of a list of Users --- internal/webserver/handler_follow_unfollow.go | 4 +- internal/webserver/handler_lists.go | 21 +- internal/webserver/handler_login.go | 4 +- internal/webserver/handler_messages.go | 23 +- internal/webserver/handler_search.go | 32 +-- internal/webserver/handler_timeline.go | 6 +- internal/webserver/handler_tweet_detail.go | 26 +-- internal/webserver/handler_user_feed.go | 54 ++--- internal/webserver/renderer_helpers.go | 212 ++++++++++++++++++ internal/webserver/response_helpers.go | 212 ------------------ internal/webserver/tpl/list.tpl | 2 +- internal/webserver/tpl/search.tpl | 2 +- .../list.tpl | 5 +- 13 files changed, 278 insertions(+), 325 deletions(-) create mode 100644 internal/webserver/renderer_helpers.go rename internal/webserver/tpl/{includes => tweet_page_includes}/list.tpl (55%) diff --git a/internal/webserver/handler_follow_unfollow.go b/internal/webserver/handler_follow_unfollow.go index 9b3775a..977e3b8 100644 --- a/internal/webserver/handler_follow_unfollow.go +++ b/internal/webserver/handler_follow_unfollow.go @@ -28,7 +28,7 @@ func (app *Application) UserFollow(w http.ResponseWriter, r *http.Request) { app.Profile.SetUserFollowed(&user, true) - app.buffered_render_basic_htmx(w, "following-button", user) + app.buffered_render_htmx(w, "following-button", PageGlobalData{}, user) } func (app *Application) UserUnfollow(w http.ResponseWriter, r *http.Request) { @@ -51,5 +51,5 @@ func (app *Application) UserUnfollow(w http.ResponseWriter, r *http.Request) { } app.Profile.SetUserFollowed(&user, false) - app.buffered_render_basic_htmx(w, "following-button", user) + app.buffered_render_htmx(w, "following-button", PageGlobalData{}, user) } diff --git a/internal/webserver/handler_lists.go b/internal/webserver/handler_lists.go index 76ef564..99947ef 100644 --- a/internal/webserver/handler_lists.go +++ b/internal/webserver/handler_lists.go @@ -6,6 +6,23 @@ import ( "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper" ) +type ListData struct { + Title string + UserIDs []scraper.UserID +} + +func NewListData(users []scraper.User) (ListData, scraper.TweetTrove) { + trove := scraper.NewTweetTrove() + data := ListData{ + UserIDs: []scraper.UserID{}, + } + for _, u := range users { + trove.Users[u.ID] = u + data.UserIDs = append(data.UserIDs, u.ID) + } + return data, trove +} + func (app *Application) Lists(w http.ResponseWriter, r *http.Request) { app.traceLog.Printf("'Lists' handler (path: %q)", r.URL.Path) @@ -18,5 +35,7 @@ func (app *Application) Lists(w http.ResponseWriter, r *http.Request) { where is_followed = 1`) panic_if(err) - app.buffered_render_basic_page(w, "tpl/list.tpl", ListData{Title: "Offline Follows", Users: users}) + data, trove := NewListData(users) + data.Title = "Offline Follows" + app.buffered_render_page(w, "tpl/list.tpl", PageGlobalData{TweetTrove: trove}, data) } diff --git a/internal/webserver/handler_login.go b/internal/webserver/handler_login.go index d2dd31a..4b8e2fb 100644 --- a/internal/webserver/handler_login.go +++ b/internal/webserver/handler_login.go @@ -57,7 +57,7 @@ func (app *Application) Login(w http.ResponseWriter, r *http.Request) { LoginForm: form, ExistingSessions: app.Profile.ListSessions(), } - app.buffered_render_basic_page(w, "tpl/login.tpl", &data) + app.buffered_render_page(w, "tpl/login.tpl", PageGlobalData{}, &data) } func (app *Application) ChangeSession(w http.ResponseWriter, r *http.Request) { @@ -76,5 +76,5 @@ func (app *Application) ChangeSession(w http.ResponseWriter, r *http.Request) { app.error_400_with_message(w, fmt.Sprintf("User not in database: %s", form.AccountName)) return } - app.buffered_render_basic_htmx(w, "nav-sidebar", nil) + app.buffered_render_htmx(w, "nav-sidebar", PageGlobalData{}, nil) } diff --git a/internal/webserver/handler_messages.go b/internal/webserver/handler_messages.go index 3612ea7..929aee6 100644 --- a/internal/webserver/handler_messages.go +++ b/internal/webserver/handler_messages.go @@ -16,22 +16,6 @@ type MessageData struct { LatestPollingTimestamp int } -func (t MessageData) Tweet(id scraper.TweetID) scraper.Tweet { - return t.Tweets[id] -} -func (t MessageData) User(id scraper.UserID) scraper.User { - return t.Users[id] -} -func (t MessageData) Retweet(id scraper.TweetID) scraper.Retweet { - return t.Retweets[id] -} -func (t MessageData) Space(id scraper.SpaceID) scraper.Space { - return t.Spaces[id] -} -func (t MessageData) FocusedTweetID() scraper.TweetID { - return scraper.TweetID(0) -} - func (app *Application) Messages(w http.ResponseWriter, r *http.Request) { app.traceLog.Printf("'Messages' handler (path: %q)", r.URL.Path) @@ -49,6 +33,7 @@ func (app *Application) Messages(w http.ResponseWriter, r *http.Request) { } chat_view_data := MessageData{DMChatView: app.Profile.GetChatRoomsPreview(app.ActiveUser.ID)} // Get message list previews + global_data := PageGlobalData{TweetTrove: chat_view_data.DMChatView.TweetTrove} if room_id != "" { // First send a message, if applicable @@ -61,7 +46,7 @@ func (app *Application) Messages(w http.ResponseWriter, r *http.Request) { panic_if(json.Unmarshal(body, &message_data)) trove := scraper.SendDMMessage(room_id, message_data.Text, 0) app.Profile.SaveDMTrove(trove, false) - app.buffered_render_tweet_htmx(w, "dm-composer", chat_view_data) // Wipe the chat box + app.buffered_render_htmx(w, "dm-composer", global_data, chat_view_data) // Wipe the chat box go app.Profile.SaveDMTrove(trove, true) } chat_view_data.ActiveRoomID = room_id @@ -80,10 +65,10 @@ func (app *Application) Messages(w http.ResponseWriter, r *http.Request) { } if r.URL.Query().Has("poll") || len(parts) == 2 && parts[1] == "send" { - app.buffered_render_tweet_htmx(w, "messages-with-poller", chat_view_data) + app.buffered_render_htmx(w, "messages-with-poller", global_data, chat_view_data) return } } - app.buffered_render_tweet_page(w, "tpl/messages.tpl", chat_view_data) + app.buffered_render_page(w, "tpl/messages.tpl", global_data, chat_view_data) } diff --git a/internal/webserver/handler_search.go b/internal/webserver/handler_search.go index 09ce9d0..941534c 100644 --- a/internal/webserver/handler_search.go +++ b/internal/webserver/handler_search.go @@ -18,40 +18,28 @@ type SearchPageData struct { SortOrder persistence.SortOrder SortOrderOptions []string IsUsersSearch bool - UsersList []scraper.User + UserIDs []scraper.UserID // TODO: fill out the search text in the search bar as well (needs modifying the base template) } func NewSearchPageData() SearchPageData { - ret := SearchPageData{SortOrderOptions: []string{}} + ret := SearchPageData{SortOrderOptions: []string{}, Feed: persistence.NewFeed()} for i := 0; i < 4; i++ { // Don't include "Liked At" option which is #4 ret.SortOrderOptions = append(ret.SortOrderOptions, persistence.SortOrder(i).String()) } return ret } -func (t SearchPageData) Tweet(id scraper.TweetID) scraper.Tweet { - return t.Tweets[id] -} -func (t SearchPageData) User(id scraper.UserID) scraper.User { - return t.Users[id] -} -func (t SearchPageData) Retweet(id scraper.TweetID) scraper.Retweet { - return t.Retweets[id] -} -func (t SearchPageData) Space(id scraper.SpaceID) scraper.Space { - return t.Spaces[id] -} -func (t SearchPageData) FocusedTweetID() scraper.TweetID { - return scraper.TweetID(0) -} - func (app *Application) SearchUsers(w http.ResponseWriter, r *http.Request) { ret := NewSearchPageData() ret.IsUsersSearch = true ret.SearchText = strings.Trim(r.URL.Path, "/") - ret.UsersList = app.Profile.SearchUsers(ret.SearchText) - app.buffered_render_tweet_page(w, "tpl/search.tpl", ret) + ret.UserIDs = []scraper.UserID{} + for _, u := range app.Profile.SearchUsers(ret.SearchText) { + ret.TweetTrove.Users[u.ID] = u + ret.UserIDs = append(ret.UserIDs, u.ID) + } + app.buffered_render_page(w, "tpl/search.tpl", PageGlobalData{TweetTrove: ret.Feed.TweetTrove, SearchText: ret.SearchText}, ret) } func (app *Application) Search(w http.ResponseWriter, r *http.Request) { @@ -138,8 +126,8 @@ func (app *Application) Search(w http.ResponseWriter, r *http.Request) { if r.Header.Get("HX-Request") == "true" && c.CursorPosition == persistence.CURSOR_MIDDLE { // It's a Show More request - app.buffered_render_tweet_htmx(w, "timeline", data) + app.buffered_render_htmx(w, "timeline", PageGlobalData{TweetTrove: data.Feed.TweetTrove, SearchText: search_text}, data) } else { - app.buffered_render_tweet_page(w, "tpl/search.tpl", data) + app.buffered_render_page(w, "tpl/search.tpl", PageGlobalData{TweetTrove: data.Feed.TweetTrove, SearchText: search_text}, data) } } diff --git a/internal/webserver/handler_timeline.go b/internal/webserver/handler_timeline.go index ca56b51..d20e234 100644 --- a/internal/webserver/handler_timeline.go +++ b/internal/webserver/handler_timeline.go @@ -26,12 +26,10 @@ func (app *Application) Timeline(w http.ResponseWriter, r *http.Request) { } } - data := UserProfileData{Feed: feed} // TODO: wrong struct - if r.Header.Get("HX-Request") == "true" && c.CursorPosition == persistence.CURSOR_MIDDLE { // It's a Show More request - app.buffered_render_tweet_htmx(w, "timeline", data) + app.buffered_render_htmx(w, "timeline", PageGlobalData{TweetTrove: feed.TweetTrove}, feed) } else { - app.buffered_render_tweet_page(w, "tpl/offline_timeline.tpl", data) + app.buffered_render_page(w, "tpl/offline_timeline.tpl", PageGlobalData{TweetTrove: feed.TweetTrove}, feed) } } diff --git a/internal/webserver/handler_tweet_detail.go b/internal/webserver/handler_tweet_detail.go index 833ff43..391f930 100644 --- a/internal/webserver/handler_tweet_detail.go +++ b/internal/webserver/handler_tweet_detail.go @@ -24,21 +24,6 @@ func NewTweetDetailData() TweetDetailData { TweetDetailView: persistence.NewTweetDetailView(), } } -func (t TweetDetailData) Tweet(id scraper.TweetID) scraper.Tweet { - return t.Tweets[id] -} -func (t TweetDetailData) User(id scraper.UserID) scraper.User { - return t.Users[id] -} -func (t TweetDetailData) Retweet(id scraper.TweetID) scraper.Retweet { - return t.Retweets[id] -} -func (t TweetDetailData) Space(id scraper.SpaceID) scraper.Space { - return t.Spaces[id] -} -func (t TweetDetailData) FocusedTweetID() scraper.TweetID { - return t.MainTweetID -} func (app *Application) ensure_tweet(id scraper.TweetID, is_forced bool, is_conversation_required bool) (scraper.Tweet, error) { is_available := false @@ -95,7 +80,7 @@ func (app *Application) LikeTweet(w http.ResponseWriter, r *http.Request) { panic_if(err) tweet.IsLikedByCurrentUser = true - app.buffered_render_basic_htmx(w, "likes-count", tweet) + app.buffered_render_htmx(w, "likes-count", PageGlobalData{}, tweet) } func (app *Application) UnlikeTweet(w http.ResponseWriter, r *http.Request) { tweet := get_tweet_from_context(r.Context()) @@ -109,7 +94,7 @@ func (app *Application) UnlikeTweet(w http.ResponseWriter, r *http.Request) { panic_if(err) tweet.IsLikedByCurrentUser = false - app.buffered_render_basic_htmx(w, "likes-count", tweet) + app.buffered_render_htmx(w, "likes-count", PageGlobalData{}, tweet) } func (app *Application) TweetDetail(w http.ResponseWriter, r *http.Request) { @@ -144,13 +129,12 @@ func (app *Application) TweetDetail(w http.ResponseWriter, r *http.Request) { return } - trove, err := app.Profile.GetTweetDetail(data.MainTweetID, app.ActiveUser.ID) + twt_detail, err := app.Profile.GetTweetDetail(data.MainTweetID, app.ActiveUser.ID) panic_if(err) // ErrNotInDB should be impossible, since we already fetched the single tweet successfully - data.TweetDetailView = trove - // fmt.Println(to_json(data)) + data.TweetDetailView = twt_detail - app.buffered_render_tweet_page(w, "tpl/tweet_detail.tpl", data) + app.buffered_render_page(w, "tpl/tweet_detail.tpl", PageGlobalData{TweetTrove: twt_detail.TweetTrove}, data) } type key string diff --git a/internal/webserver/handler_user_feed.go b/internal/webserver/handler_user_feed.go index ad5ff2b..1b1e3f0 100644 --- a/internal/webserver/handler_user_feed.go +++ b/internal/webserver/handler_user_feed.go @@ -10,28 +10,6 @@ import ( "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper" ) -type UserProfileData struct { - persistence.Feed - scraper.UserID - FeedType string -} - -func (t UserProfileData) Tweet(id scraper.TweetID) scraper.Tweet { - return t.Tweets[id] -} -func (t UserProfileData) User(id scraper.UserID) scraper.User { - return t.Users[id] -} -func (t UserProfileData) Retweet(id scraper.TweetID) scraper.Retweet { - return t.Retweets[id] -} -func (t UserProfileData) Space(id scraper.SpaceID) scraper.Space { - return t.Spaces[id] -} -func (t UserProfileData) FocusedTweetID() scraper.TweetID { - return scraper.TweetID(0) -} - func (app *Application) UserFeed(w http.ResponseWriter, r *http.Request) { app.traceLog.Printf("'UserFeed' handler (path: %q)", r.URL.Path) @@ -114,7 +92,12 @@ func (app *Application) UserFeed(w http.ResponseWriter, r *http.Request) { } feed.Users[user.ID] = user - data := UserProfileData{Feed: feed, UserID: user.ID} + data := struct { + persistence.Feed + scraper.UserID + FeedType string + }{Feed: feed, UserID: user.ID} + if len(parts) == 2 { data.FeedType = parts[1] } else { @@ -123,26 +106,21 @@ func (app *Application) UserFeed(w http.ResponseWriter, r *http.Request) { if r.Header.Get("HX-Request") == "true" && c.CursorPosition == persistence.CURSOR_MIDDLE { // It's a Show More request - app.buffered_render_tweet_htmx(w, "timeline", data) + app.buffered_render_htmx(w, "timeline", PageGlobalData{TweetTrove: feed.TweetTrove}, data) } else { - app.buffered_render_tweet_page(w, "tpl/user_feed.tpl", data) + app.buffered_render_page(w, "tpl/user_feed.tpl", PageGlobalData{TweetTrove: feed.TweetTrove}, data) } } -type ListData struct { - Title string - Users []scraper.User -} - func (app *Application) UserFollowees(w http.ResponseWriter, r *http.Request, user scraper.User) { - app.buffered_render_basic_page(w, "tpl/list.tpl", ListData{ - Title: fmt.Sprintf("Followed by @%s", user.Handle), - Users: app.Profile.GetFollowees(user.ID), - }) + data, trove := NewListData(app.Profile.GetFollowees(user.ID)) + trove.Users[user.ID] = user // Not loaded otherwise; needed to profile image in the login button on the sidebar + data.Title = fmt.Sprintf("Followed by @%s", user.Handle) + app.buffered_render_page(w, "tpl/list.tpl", PageGlobalData{TweetTrove: trove}, data) } func (app *Application) UserFollowers(w http.ResponseWriter, r *http.Request, user scraper.User) { - app.buffered_render_basic_page(w, "tpl/list.tpl", ListData{ - Title: fmt.Sprintf("Followers of @%s", user.Handle), - Users: app.Profile.GetFollowers(user.ID), - }) + data, trove := NewListData(app.Profile.GetFollowers(user.ID)) + trove.Users[user.ID] = user + data.Title = fmt.Sprintf("@%s's followers", user.Handle) + app.buffered_render_page(w, "tpl/list.tpl", PageGlobalData{TweetTrove: trove}, data) } diff --git a/internal/webserver/renderer_helpers.go b/internal/webserver/renderer_helpers.go new file mode 100644 index 0000000..c46e4de --- /dev/null +++ b/internal/webserver/renderer_helpers.go @@ -0,0 +1,212 @@ +package webserver + +import ( + "bytes" + "fmt" + "html/template" + "io" + "io/fs" + "net/http" + "net/url" + "path" + "path/filepath" + "regexp" + "runtime" + + "github.com/Masterminds/sprig/v3" + + "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence" + "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper" +) + +var this_dir string + +func init() { + _, this_file, _, _ := runtime.Caller(0) // `this_file` is absolute path to this source file + this_dir = path.Dir(this_file) +} + +func get_filepath(s string) string { + if use_embedded == "true" { + return s + } + return path.Join(this_dir, s) +} + +func glob(path string) []string { + var ret []string + var err error + if use_embedded == "true" { + ret, err = fs.Glob(embedded_files, get_filepath(path)) + } else { + ret, err = filepath.Glob(get_filepath(path)) + } + panic_if(err) + return ret +} + +// Config object for buffered rendering +type renderer struct { + Funcs template.FuncMap + Filenames []string + TplName string + Data interface{} +} + +// Render the given template using a bytes.Buffer. This avoids the possibility of failing partway +// through the rendering, and sending an imcomplete response with "Bad Request" or "Server Error" at the end. +func (r renderer) BufferedRender(w io.Writer) { + var tpl *template.Template + var err error + + funcs := sprig.FuncMap() + for i := range r.Funcs { + funcs[i] = r.Funcs[i] + } + if use_embedded == "true" { + tpl, err = template.New("").Funcs(funcs).ParseFS(embedded_files, r.Filenames...) + } else { + tpl, err = template.New("").Funcs(funcs).ParseFiles(r.Filenames...) + } + panic_if(err) + + buf := new(bytes.Buffer) + err = tpl.ExecuteTemplate(buf, r.TplName, r.Data) + panic_if(err) + + _, err = buf.WriteTo(w) + panic_if(err) +} + +// Render the "base" template, creating a full HTML page corresponding to the given template file, +// with all available partials. +func (app *Application) buffered_render_page(w http.ResponseWriter, tpl_file string, global_data PageGlobalData, tpl_data interface{}) { + partials := append(glob("tpl/includes/*.tpl"), glob("tpl/tweet_page_includes/*.tpl")...) + + r := renderer{ + Funcs: template.FuncMap{ + "tweet": global_data.Tweet, + "user": global_data.User, + "retweet": global_data.Retweet, + "space": global_data.Space, + "active_user": app.get_active_user, + "focused_tweet_id": global_data.GetFocusedTweetID, + "search_text": global_data.GetSearchText, + "get_entities": get_entities, + "get_tombstone_text": get_tombstone_text, + "cursor_to_query_params": cursor_to_query_params, + }, + Filenames: append(partials, get_filepath(tpl_file)), + TplName: "base", + Data: tpl_data, + } + r.BufferedRender(w) +} + +// Render a particular template (HTMX response, i.e., not a full page) +func (app *Application) buffered_render_htmx(w http.ResponseWriter, tpl_name string, global_data PageGlobalData, tpl_data interface{}) { + partials := append(glob("tpl/includes/*.tpl"), glob("tpl/tweet_page_includes/*.tpl")...) + + r := renderer{ + Funcs: template.FuncMap{ + "tweet": global_data.Tweet, + "user": global_data.User, + "retweet": global_data.Retweet, + "space": global_data.Space, + "active_user": app.get_active_user, + "focused_tweet_id": global_data.GetFocusedTweetID, + "search_text": global_data.GetSearchText, + "get_entities": get_entities, + "get_tombstone_text": get_tombstone_text, + "cursor_to_query_params": cursor_to_query_params, + }, + Filenames: partials, + TplName: tpl_name, + Data: tpl_data, + } + r.BufferedRender(w) +} + +func (app *Application) get_active_user() scraper.User { + return app.ActiveUser +} + +func cursor_to_query_params(c persistence.Cursor) string { + result := url.Values{} + result.Set("cursor", fmt.Sprint(c.CursorValue)) + result.Set("sort-order", c.SortOrder.String()) + return result.Encode() +} + +type EntityType int + +const ( + ENTITY_TYPE_TEXT EntityType = iota + ENTITY_TYPE_MENTION + ENTITY_TYPE_HASHTAG +) + +type Entity struct { + EntityType + Contents string +} + +func get_entities(text string) []Entity { + ret := []Entity{} + start := 0 + for _, idxs := range regexp.MustCompile(`(\W|^)[@#]\w+`).FindAllStringIndex(text, -1) { + // Handle leading whitespace. Only match start-of-string or leading whitespace to avoid matching, e.g., emails + if text[idxs[0]] == ' ' || text[idxs[0]] == '\n' { + idxs[0] += 1 + } + if start != idxs[0] { + ret = append(ret, Entity{ENTITY_TYPE_TEXT, text[start:idxs[0]]}) + } + piece := text[idxs[0]+1 : idxs[1]] // Chop off the "#" or "@" + if text[idxs[0]] == '@' { + ret = append(ret, Entity{ENTITY_TYPE_MENTION, piece}) + } else { + ret = append(ret, Entity{ENTITY_TYPE_HASHTAG, piece}) + } + start = idxs[1] + } + if start < len(text) { + ret = append(ret, Entity{ENTITY_TYPE_TEXT, text[start:]}) + } + + return ret +} + +func get_tombstone_text(t scraper.Tweet) string { + if t.TombstoneText != "" { + return t.TombstoneText + } + return t.TombstoneType +} + +// TODO: this name sucks +type PageGlobalData struct { + scraper.TweetTrove + SearchText string + FocusedTweetID scraper.TweetID +} + +func (d PageGlobalData) Tweet(id scraper.TweetID) scraper.Tweet { + return d.Tweets[id] +} +func (d PageGlobalData) User(id scraper.UserID) scraper.User { + return d.Users[id] +} +func (d PageGlobalData) Retweet(id scraper.TweetID) scraper.Retweet { + return d.Retweets[id] +} +func (d PageGlobalData) Space(id scraper.SpaceID) scraper.Space { + return d.Spaces[id] +} +func (d PageGlobalData) GetFocusedTweetID() scraper.TweetID { + return d.FocusedTweetID +} +func (d PageGlobalData) GetSearchText() string { + fmt.Println(d.SearchText) + return d.SearchText +} diff --git a/internal/webserver/response_helpers.go b/internal/webserver/response_helpers.go index 17eec8d..a5c08e4 100644 --- a/internal/webserver/response_helpers.go +++ b/internal/webserver/response_helpers.go @@ -1,23 +1,9 @@ package webserver import ( - "bytes" "fmt" - "html/template" - "io" - "io/fs" "net/http" - "net/url" - "path" - "path/filepath" - "regexp" - "runtime" "runtime/debug" - - "github.com/Masterminds/sprig/v3" - - "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence" - "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper" ) func panic_if(err error) { @@ -26,32 +12,6 @@ func panic_if(err error) { } } -var this_dir string - -func init() { - _, this_file, _, _ := runtime.Caller(0) // `this_file` is absolute path to this source file - this_dir = path.Dir(this_file) -} - -func get_filepath(s string) string { - if use_embedded == "true" { - return s - } - return path.Join(this_dir, s) -} - -func glob(path string) []string { - var ret []string - var err error - if use_embedded == "true" { - ret, err = fs.Glob(embedded_files, get_filepath(path)) - } else { - ret, err = filepath.Glob(get_filepath(path)) - } - panic_if(err) - return ret -} - // func (app *Application) error_400(w http.ResponseWriter) { // http.Error(w, "Bad Request", 400) // } @@ -76,175 +36,3 @@ func (app *Application) error_500(w http.ResponseWriter, err error) { } http.Error(w, "Server error :(", 500) } - -type TweetCollection interface { - Tweet(id scraper.TweetID) scraper.Tweet - User(id scraper.UserID) scraper.User - Retweet(id scraper.TweetID) scraper.Retweet - Space(id scraper.SpaceID) scraper.Space - FocusedTweetID() scraper.TweetID -} - -// Creates a template from the given template file using all the available partials. -// Calls `app.buffered_render` to render the created template. -func (app *Application) buffered_render_tweet_page(w http.ResponseWriter, tpl_file string, data TweetCollection) { - partials := append(glob("tpl/includes/*.tpl"), glob("tpl/tweet_page_includes/*.tpl")...) - - r := renderer{ - Funcs: func_map(template.FuncMap{ - "tweet": data.Tweet, - "user": data.User, - "retweet": data.Retweet, - "space": data.Space, - "active_user": app.get_active_user, - "focused_tweet_id": data.FocusedTweetID, - "get_entities": get_entities, - "get_tombstone_text": get_tombstone_text, - "cursor_to_query_params": cursor_to_query_params, - }), - Filenames: append(partials, get_filepath(tpl_file)), - TplName: "base", - Data: data, - } - r.BufferedRender(w) -} - -// Creates a template from the given template file using all the available partials. -// Calls `app.buffered_render` to render the created template. -func (app *Application) buffered_render_basic_page(w http.ResponseWriter, tpl_file string, data interface{}) { - partials := glob("tpl/includes/*.tpl") - - r := renderer{ - Funcs: func_map(template.FuncMap{"active_user": app.get_active_user}), - Filenames: append(partials, get_filepath(tpl_file)), - TplName: "base", - Data: data, - } - r.BufferedRender(w) -} - -func (app *Application) buffered_render_tweet_htmx(w http.ResponseWriter, tpl_name string, data TweetCollection) { - partials := append(glob("tpl/includes/*.tpl"), glob("tpl/tweet_page_includes/*.tpl")...) - - r := renderer{ - Funcs: func_map(template.FuncMap{ - "tweet": data.Tweet, - "user": data.User, - "retweet": data.Retweet, - "space": data.Space, - "active_user": app.get_active_user, - "focused_tweet_id": data.FocusedTweetID, - "get_entities": get_entities, - "get_tombstone_text": get_tombstone_text, - "cursor_to_query_params": cursor_to_query_params, - }), - Filenames: partials, - TplName: tpl_name, - Data: data, - } - r.BufferedRender(w) -} - -func (app *Application) buffered_render_basic_htmx(w http.ResponseWriter, tpl_name string, data interface{}) { - partials := glob("tpl/includes/*.tpl") - - r := renderer{ - Funcs: func_map(template.FuncMap{"active_user": app.get_active_user}), - Filenames: partials, - TplName: tpl_name, - Data: data, - } - r.BufferedRender(w) -} - -func (app *Application) get_active_user() scraper.User { - return app.ActiveUser -} - -type EntityType int - -const ( - ENTITY_TYPE_TEXT EntityType = iota - ENTITY_TYPE_MENTION - ENTITY_TYPE_HASHTAG -) - -type Entity struct { - EntityType - Contents string -} - -func get_entities(text string) []Entity { - ret := []Entity{} - start := 0 - for _, idxs := range regexp.MustCompile(`(\s|^)[@#]\w+`).FindAllStringIndex(text, -1) { - // Handle leading whitespace. Only match start-of-string or leading whitespace to avoid matching, e.g., emails - if text[idxs[0]] == ' ' || text[idxs[0]] == '\n' { - idxs[0] += 1 - } - if start != idxs[0] { - ret = append(ret, Entity{ENTITY_TYPE_TEXT, text[start:idxs[0]]}) - } - piece := text[idxs[0]+1 : idxs[1]] // Chop off the "#" or "@" - if text[idxs[0]] == '@' { - ret = append(ret, Entity{ENTITY_TYPE_MENTION, piece}) - } else { - ret = append(ret, Entity{ENTITY_TYPE_HASHTAG, piece}) - } - start = idxs[1] - } - if start < len(text) { - ret = append(ret, Entity{ENTITY_TYPE_TEXT, text[start:]}) - } - - return ret -} - -func get_tombstone_text(t scraper.Tweet) string { - if t.TombstoneText != "" { - return t.TombstoneText - } - return t.TombstoneType -} - -func func_map(extras template.FuncMap) template.FuncMap { - ret := sprig.FuncMap() - for i := range extras { - ret[i] = extras[i] - } - return ret -} - -type renderer struct { - Funcs template.FuncMap - Filenames []string - TplName string - Data interface{} -} - -// Render the given template using a bytes.Buffer. This avoids the possibility of failing partway -// through the rendering, and sending an imcomplete response with "Bad Request" or "Server Error" at the end. -func (r renderer) BufferedRender(w io.Writer) { - var tpl *template.Template - var err error - if use_embedded == "true" { - tpl, err = template.New("").Funcs(r.Funcs).ParseFS(embedded_files, r.Filenames...) - } else { - tpl, err = template.New("").Funcs(r.Funcs).ParseFiles(r.Filenames...) - } - panic_if(err) - - buf := new(bytes.Buffer) - err = tpl.ExecuteTemplate(buf, r.TplName, r.Data) - panic_if(err) - - _, err = buf.WriteTo(w) - panic_if(err) -} - -func cursor_to_query_params(c persistence.Cursor) string { - result := url.Values{} - result.Set("cursor", fmt.Sprint(c.CursorValue)) - result.Set("sort-order", c.SortOrder.String()) - return result.Encode() -} diff --git a/internal/webserver/tpl/list.tpl b/internal/webserver/tpl/list.tpl index adca996..d981180 100644 --- a/internal/webserver/tpl/list.tpl +++ b/internal/webserver/tpl/list.tpl @@ -1,5 +1,5 @@ {{define "title"}}{{.Title}}{{end}} {{define "main"}} - {{template "list" .Users}} + {{template "list" .UserIDs}} {{end}} diff --git a/internal/webserver/tpl/search.tpl b/internal/webserver/tpl/search.tpl index 60f2642..86abc06 100644 --- a/internal/webserver/tpl/search.tpl +++ b/internal/webserver/tpl/search.tpl @@ -20,7 +20,7 @@ {{if .IsUsersSearch}} - {{template "list" .UsersList}} + {{template "list" .UserIDs}} {{else}}
order: diff --git a/internal/webserver/tpl/includes/list.tpl b/internal/webserver/tpl/tweet_page_includes/list.tpl similarity index 55% rename from internal/webserver/tpl/includes/list.tpl rename to internal/webserver/tpl/tweet_page_includes/list.tpl index 0b60b2e..a3ab6c1 100644 --- a/internal/webserver/tpl/includes/list.tpl +++ b/internal/webserver/tpl/tweet_page_includes/list.tpl @@ -1,9 +1,10 @@ {{define "list"}}
{{range .}} + {{$user := (user .)}}
- {{template "author-info" .}} -

{{.Bio}}

+ {{template "author-info" $user}} +

{{$user.Bio}}

{{end}}