REFACTOR: rework the rendering helpers

- rendering helpers moved to their own file (separate from response helpers)
- create a unified render helper instead of "buffered_render_basic_X" and "buffered_render_tweet_X"
	- this helper takes 2 data objects: one with global data (tweet trove, logged in user, etc) and one page-specific
	- this lets us remove the disgusting interface type
- modify the User List template to use UserIDs indexing into a global data object instead of a list of Users
This commit is contained in:
Alessio 2023-12-31 15:56:12 -06:00
parent 474b30cdc1
commit 24364a26b0
13 changed files with 278 additions and 325 deletions

View File

@ -28,7 +28,7 @@ func (app *Application) UserFollow(w http.ResponseWriter, r *http.Request) {
app.Profile.SetUserFollowed(&user, true)
app.buffered_render_basic_htmx(w, "following-button", user)
app.buffered_render_htmx(w, "following-button", PageGlobalData{}, user)
}
func (app *Application) UserUnfollow(w http.ResponseWriter, r *http.Request) {
@ -51,5 +51,5 @@ func (app *Application) UserUnfollow(w http.ResponseWriter, r *http.Request) {
}
app.Profile.SetUserFollowed(&user, false)
app.buffered_render_basic_htmx(w, "following-button", user)
app.buffered_render_htmx(w, "following-button", PageGlobalData{}, user)
}

View File

@ -6,6 +6,23 @@ import (
"gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
)
type ListData struct {
Title string
UserIDs []scraper.UserID
}
func NewListData(users []scraper.User) (ListData, scraper.TweetTrove) {
trove := scraper.NewTweetTrove()
data := ListData{
UserIDs: []scraper.UserID{},
}
for _, u := range users {
trove.Users[u.ID] = u
data.UserIDs = append(data.UserIDs, u.ID)
}
return data, trove
}
func (app *Application) Lists(w http.ResponseWriter, r *http.Request) {
app.traceLog.Printf("'Lists' handler (path: %q)", r.URL.Path)
@ -18,5 +35,7 @@ func (app *Application) Lists(w http.ResponseWriter, r *http.Request) {
where is_followed = 1`)
panic_if(err)
app.buffered_render_basic_page(w, "tpl/list.tpl", ListData{Title: "Offline Follows", Users: users})
data, trove := NewListData(users)
data.Title = "Offline Follows"
app.buffered_render_page(w, "tpl/list.tpl", PageGlobalData{TweetTrove: trove}, data)
}

View File

@ -57,7 +57,7 @@ func (app *Application) Login(w http.ResponseWriter, r *http.Request) {
LoginForm: form,
ExistingSessions: app.Profile.ListSessions(),
}
app.buffered_render_basic_page(w, "tpl/login.tpl", &data)
app.buffered_render_page(w, "tpl/login.tpl", PageGlobalData{}, &data)
}
func (app *Application) ChangeSession(w http.ResponseWriter, r *http.Request) {
@ -76,5 +76,5 @@ func (app *Application) ChangeSession(w http.ResponseWriter, r *http.Request) {
app.error_400_with_message(w, fmt.Sprintf("User not in database: %s", form.AccountName))
return
}
app.buffered_render_basic_htmx(w, "nav-sidebar", nil)
app.buffered_render_htmx(w, "nav-sidebar", PageGlobalData{}, nil)
}

View File

@ -16,22 +16,6 @@ type MessageData struct {
LatestPollingTimestamp int
}
func (t MessageData) Tweet(id scraper.TweetID) scraper.Tweet {
return t.Tweets[id]
}
func (t MessageData) User(id scraper.UserID) scraper.User {
return t.Users[id]
}
func (t MessageData) Retweet(id scraper.TweetID) scraper.Retweet {
return t.Retweets[id]
}
func (t MessageData) Space(id scraper.SpaceID) scraper.Space {
return t.Spaces[id]
}
func (t MessageData) FocusedTweetID() scraper.TweetID {
return scraper.TweetID(0)
}
func (app *Application) Messages(w http.ResponseWriter, r *http.Request) {
app.traceLog.Printf("'Messages' handler (path: %q)", r.URL.Path)
@ -49,6 +33,7 @@ func (app *Application) Messages(w http.ResponseWriter, r *http.Request) {
}
chat_view_data := MessageData{DMChatView: app.Profile.GetChatRoomsPreview(app.ActiveUser.ID)} // Get message list previews
global_data := PageGlobalData{TweetTrove: chat_view_data.DMChatView.TweetTrove}
if room_id != "" {
// First send a message, if applicable
@ -61,7 +46,7 @@ func (app *Application) Messages(w http.ResponseWriter, r *http.Request) {
panic_if(json.Unmarshal(body, &message_data))
trove := scraper.SendDMMessage(room_id, message_data.Text, 0)
app.Profile.SaveDMTrove(trove, false)
app.buffered_render_tweet_htmx(w, "dm-composer", chat_view_data) // Wipe the chat box
app.buffered_render_htmx(w, "dm-composer", global_data, chat_view_data) // Wipe the chat box
go app.Profile.SaveDMTrove(trove, true)
}
chat_view_data.ActiveRoomID = room_id
@ -80,10 +65,10 @@ func (app *Application) Messages(w http.ResponseWriter, r *http.Request) {
}
if r.URL.Query().Has("poll") || len(parts) == 2 && parts[1] == "send" {
app.buffered_render_tweet_htmx(w, "messages-with-poller", chat_view_data)
app.buffered_render_htmx(w, "messages-with-poller", global_data, chat_view_data)
return
}
}
app.buffered_render_tweet_page(w, "tpl/messages.tpl", chat_view_data)
app.buffered_render_page(w, "tpl/messages.tpl", global_data, chat_view_data)
}

View File

@ -18,40 +18,28 @@ type SearchPageData struct {
SortOrder persistence.SortOrder
SortOrderOptions []string
IsUsersSearch bool
UsersList []scraper.User
UserIDs []scraper.UserID
// TODO: fill out the search text in the search bar as well (needs modifying the base template)
}
func NewSearchPageData() SearchPageData {
ret := SearchPageData{SortOrderOptions: []string{}}
ret := SearchPageData{SortOrderOptions: []string{}, Feed: persistence.NewFeed()}
for i := 0; i < 4; i++ { // Don't include "Liked At" option which is #4
ret.SortOrderOptions = append(ret.SortOrderOptions, persistence.SortOrder(i).String())
}
return ret
}
func (t SearchPageData) Tweet(id scraper.TweetID) scraper.Tweet {
return t.Tweets[id]
}
func (t SearchPageData) User(id scraper.UserID) scraper.User {
return t.Users[id]
}
func (t SearchPageData) Retweet(id scraper.TweetID) scraper.Retweet {
return t.Retweets[id]
}
func (t SearchPageData) Space(id scraper.SpaceID) scraper.Space {
return t.Spaces[id]
}
func (t SearchPageData) FocusedTweetID() scraper.TweetID {
return scraper.TweetID(0)
}
func (app *Application) SearchUsers(w http.ResponseWriter, r *http.Request) {
ret := NewSearchPageData()
ret.IsUsersSearch = true
ret.SearchText = strings.Trim(r.URL.Path, "/")
ret.UsersList = app.Profile.SearchUsers(ret.SearchText)
app.buffered_render_tweet_page(w, "tpl/search.tpl", ret)
ret.UserIDs = []scraper.UserID{}
for _, u := range app.Profile.SearchUsers(ret.SearchText) {
ret.TweetTrove.Users[u.ID] = u
ret.UserIDs = append(ret.UserIDs, u.ID)
}
app.buffered_render_page(w, "tpl/search.tpl", PageGlobalData{TweetTrove: ret.Feed.TweetTrove, SearchText: ret.SearchText}, ret)
}
func (app *Application) Search(w http.ResponseWriter, r *http.Request) {
@ -138,8 +126,8 @@ func (app *Application) Search(w http.ResponseWriter, r *http.Request) {
if r.Header.Get("HX-Request") == "true" && c.CursorPosition == persistence.CURSOR_MIDDLE {
// It's a Show More request
app.buffered_render_tweet_htmx(w, "timeline", data)
app.buffered_render_htmx(w, "timeline", PageGlobalData{TweetTrove: data.Feed.TweetTrove, SearchText: search_text}, data)
} else {
app.buffered_render_tweet_page(w, "tpl/search.tpl", data)
app.buffered_render_page(w, "tpl/search.tpl", PageGlobalData{TweetTrove: data.Feed.TweetTrove, SearchText: search_text}, data)
}
}

View File

@ -26,12 +26,10 @@ func (app *Application) Timeline(w http.ResponseWriter, r *http.Request) {
}
}
data := UserProfileData{Feed: feed} // TODO: wrong struct
if r.Header.Get("HX-Request") == "true" && c.CursorPosition == persistence.CURSOR_MIDDLE {
// It's a Show More request
app.buffered_render_tweet_htmx(w, "timeline", data)
app.buffered_render_htmx(w, "timeline", PageGlobalData{TweetTrove: feed.TweetTrove}, feed)
} else {
app.buffered_render_tweet_page(w, "tpl/offline_timeline.tpl", data)
app.buffered_render_page(w, "tpl/offline_timeline.tpl", PageGlobalData{TweetTrove: feed.TweetTrove}, feed)
}
}

View File

@ -24,21 +24,6 @@ func NewTweetDetailData() TweetDetailData {
TweetDetailView: persistence.NewTweetDetailView(),
}
}
func (t TweetDetailData) Tweet(id scraper.TweetID) scraper.Tweet {
return t.Tweets[id]
}
func (t TweetDetailData) User(id scraper.UserID) scraper.User {
return t.Users[id]
}
func (t TweetDetailData) Retweet(id scraper.TweetID) scraper.Retweet {
return t.Retweets[id]
}
func (t TweetDetailData) Space(id scraper.SpaceID) scraper.Space {
return t.Spaces[id]
}
func (t TweetDetailData) FocusedTweetID() scraper.TweetID {
return t.MainTweetID
}
func (app *Application) ensure_tweet(id scraper.TweetID, is_forced bool, is_conversation_required bool) (scraper.Tweet, error) {
is_available := false
@ -95,7 +80,7 @@ func (app *Application) LikeTweet(w http.ResponseWriter, r *http.Request) {
panic_if(err)
tweet.IsLikedByCurrentUser = true
app.buffered_render_basic_htmx(w, "likes-count", tweet)
app.buffered_render_htmx(w, "likes-count", PageGlobalData{}, tweet)
}
func (app *Application) UnlikeTweet(w http.ResponseWriter, r *http.Request) {
tweet := get_tweet_from_context(r.Context())
@ -109,7 +94,7 @@ func (app *Application) UnlikeTweet(w http.ResponseWriter, r *http.Request) {
panic_if(err)
tweet.IsLikedByCurrentUser = false
app.buffered_render_basic_htmx(w, "likes-count", tweet)
app.buffered_render_htmx(w, "likes-count", PageGlobalData{}, tweet)
}
func (app *Application) TweetDetail(w http.ResponseWriter, r *http.Request) {
@ -144,13 +129,12 @@ func (app *Application) TweetDetail(w http.ResponseWriter, r *http.Request) {
return
}
trove, err := app.Profile.GetTweetDetail(data.MainTweetID, app.ActiveUser.ID)
twt_detail, err := app.Profile.GetTweetDetail(data.MainTweetID, app.ActiveUser.ID)
panic_if(err) // ErrNotInDB should be impossible, since we already fetched the single tweet successfully
data.TweetDetailView = trove
// fmt.Println(to_json(data))
data.TweetDetailView = twt_detail
app.buffered_render_tweet_page(w, "tpl/tweet_detail.tpl", data)
app.buffered_render_page(w, "tpl/tweet_detail.tpl", PageGlobalData{TweetTrove: twt_detail.TweetTrove}, data)
}
type key string

View File

@ -10,28 +10,6 @@ import (
"gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
)
type UserProfileData struct {
persistence.Feed
scraper.UserID
FeedType string
}
func (t UserProfileData) Tweet(id scraper.TweetID) scraper.Tweet {
return t.Tweets[id]
}
func (t UserProfileData) User(id scraper.UserID) scraper.User {
return t.Users[id]
}
func (t UserProfileData) Retweet(id scraper.TweetID) scraper.Retweet {
return t.Retweets[id]
}
func (t UserProfileData) Space(id scraper.SpaceID) scraper.Space {
return t.Spaces[id]
}
func (t UserProfileData) FocusedTweetID() scraper.TweetID {
return scraper.TweetID(0)
}
func (app *Application) UserFeed(w http.ResponseWriter, r *http.Request) {
app.traceLog.Printf("'UserFeed' handler (path: %q)", r.URL.Path)
@ -114,7 +92,12 @@ func (app *Application) UserFeed(w http.ResponseWriter, r *http.Request) {
}
feed.Users[user.ID] = user
data := UserProfileData{Feed: feed, UserID: user.ID}
data := struct {
persistence.Feed
scraper.UserID
FeedType string
}{Feed: feed, UserID: user.ID}
if len(parts) == 2 {
data.FeedType = parts[1]
} else {
@ -123,26 +106,21 @@ func (app *Application) UserFeed(w http.ResponseWriter, r *http.Request) {
if r.Header.Get("HX-Request") == "true" && c.CursorPosition == persistence.CURSOR_MIDDLE {
// It's a Show More request
app.buffered_render_tweet_htmx(w, "timeline", data)
app.buffered_render_htmx(w, "timeline", PageGlobalData{TweetTrove: feed.TweetTrove}, data)
} else {
app.buffered_render_tweet_page(w, "tpl/user_feed.tpl", data)
app.buffered_render_page(w, "tpl/user_feed.tpl", PageGlobalData{TweetTrove: feed.TweetTrove}, data)
}
}
type ListData struct {
Title string
Users []scraper.User
}
func (app *Application) UserFollowees(w http.ResponseWriter, r *http.Request, user scraper.User) {
app.buffered_render_basic_page(w, "tpl/list.tpl", ListData{
Title: fmt.Sprintf("Followed by @%s", user.Handle),
Users: app.Profile.GetFollowees(user.ID),
})
data, trove := NewListData(app.Profile.GetFollowees(user.ID))
trove.Users[user.ID] = user // Not loaded otherwise; needed to profile image in the login button on the sidebar
data.Title = fmt.Sprintf("Followed by @%s", user.Handle)
app.buffered_render_page(w, "tpl/list.tpl", PageGlobalData{TweetTrove: trove}, data)
}
func (app *Application) UserFollowers(w http.ResponseWriter, r *http.Request, user scraper.User) {
app.buffered_render_basic_page(w, "tpl/list.tpl", ListData{
Title: fmt.Sprintf("Followers of @%s", user.Handle),
Users: app.Profile.GetFollowers(user.ID),
})
data, trove := NewListData(app.Profile.GetFollowers(user.ID))
trove.Users[user.ID] = user
data.Title = fmt.Sprintf("@%s's followers", user.Handle)
app.buffered_render_page(w, "tpl/list.tpl", PageGlobalData{TweetTrove: trove}, data)
}

View File

@ -0,0 +1,212 @@
package webserver
import (
"bytes"
"fmt"
"html/template"
"io"
"io/fs"
"net/http"
"net/url"
"path"
"path/filepath"
"regexp"
"runtime"
"github.com/Masterminds/sprig/v3"
"gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
"gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
)
var this_dir string
func init() {
_, this_file, _, _ := runtime.Caller(0) // `this_file` is absolute path to this source file
this_dir = path.Dir(this_file)
}
func get_filepath(s string) string {
if use_embedded == "true" {
return s
}
return path.Join(this_dir, s)
}
func glob(path string) []string {
var ret []string
var err error
if use_embedded == "true" {
ret, err = fs.Glob(embedded_files, get_filepath(path))
} else {
ret, err = filepath.Glob(get_filepath(path))
}
panic_if(err)
return ret
}
// Config object for buffered rendering
type renderer struct {
Funcs template.FuncMap
Filenames []string
TplName string
Data interface{}
}
// Render the given template using a bytes.Buffer. This avoids the possibility of failing partway
// through the rendering, and sending an imcomplete response with "Bad Request" or "Server Error" at the end.
func (r renderer) BufferedRender(w io.Writer) {
var tpl *template.Template
var err error
funcs := sprig.FuncMap()
for i := range r.Funcs {
funcs[i] = r.Funcs[i]
}
if use_embedded == "true" {
tpl, err = template.New("").Funcs(funcs).ParseFS(embedded_files, r.Filenames...)
} else {
tpl, err = template.New("").Funcs(funcs).ParseFiles(r.Filenames...)
}
panic_if(err)
buf := new(bytes.Buffer)
err = tpl.ExecuteTemplate(buf, r.TplName, r.Data)
panic_if(err)
_, err = buf.WriteTo(w)
panic_if(err)
}
// Render the "base" template, creating a full HTML page corresponding to the given template file,
// with all available partials.
func (app *Application) buffered_render_page(w http.ResponseWriter, tpl_file string, global_data PageGlobalData, tpl_data interface{}) {
partials := append(glob("tpl/includes/*.tpl"), glob("tpl/tweet_page_includes/*.tpl")...)
r := renderer{
Funcs: template.FuncMap{
"tweet": global_data.Tweet,
"user": global_data.User,
"retweet": global_data.Retweet,
"space": global_data.Space,
"active_user": app.get_active_user,
"focused_tweet_id": global_data.GetFocusedTweetID,
"search_text": global_data.GetSearchText,
"get_entities": get_entities,
"get_tombstone_text": get_tombstone_text,
"cursor_to_query_params": cursor_to_query_params,
},
Filenames: append(partials, get_filepath(tpl_file)),
TplName: "base",
Data: tpl_data,
}
r.BufferedRender(w)
}
// Render a particular template (HTMX response, i.e., not a full page)
func (app *Application) buffered_render_htmx(w http.ResponseWriter, tpl_name string, global_data PageGlobalData, tpl_data interface{}) {
partials := append(glob("tpl/includes/*.tpl"), glob("tpl/tweet_page_includes/*.tpl")...)
r := renderer{
Funcs: template.FuncMap{
"tweet": global_data.Tweet,
"user": global_data.User,
"retweet": global_data.Retweet,
"space": global_data.Space,
"active_user": app.get_active_user,
"focused_tweet_id": global_data.GetFocusedTweetID,
"search_text": global_data.GetSearchText,
"get_entities": get_entities,
"get_tombstone_text": get_tombstone_text,
"cursor_to_query_params": cursor_to_query_params,
},
Filenames: partials,
TplName: tpl_name,
Data: tpl_data,
}
r.BufferedRender(w)
}
func (app *Application) get_active_user() scraper.User {
return app.ActiveUser
}
func cursor_to_query_params(c persistence.Cursor) string {
result := url.Values{}
result.Set("cursor", fmt.Sprint(c.CursorValue))
result.Set("sort-order", c.SortOrder.String())
return result.Encode()
}
type EntityType int
const (
ENTITY_TYPE_TEXT EntityType = iota
ENTITY_TYPE_MENTION
ENTITY_TYPE_HASHTAG
)
type Entity struct {
EntityType
Contents string
}
func get_entities(text string) []Entity {
ret := []Entity{}
start := 0
for _, idxs := range regexp.MustCompile(`(\W|^)[@#]\w+`).FindAllStringIndex(text, -1) {
// Handle leading whitespace. Only match start-of-string or leading whitespace to avoid matching, e.g., emails
if text[idxs[0]] == ' ' || text[idxs[0]] == '\n' {
idxs[0] += 1
}
if start != idxs[0] {
ret = append(ret, Entity{ENTITY_TYPE_TEXT, text[start:idxs[0]]})
}
piece := text[idxs[0]+1 : idxs[1]] // Chop off the "#" or "@"
if text[idxs[0]] == '@' {
ret = append(ret, Entity{ENTITY_TYPE_MENTION, piece})
} else {
ret = append(ret, Entity{ENTITY_TYPE_HASHTAG, piece})
}
start = idxs[1]
}
if start < len(text) {
ret = append(ret, Entity{ENTITY_TYPE_TEXT, text[start:]})
}
return ret
}
func get_tombstone_text(t scraper.Tweet) string {
if t.TombstoneText != "" {
return t.TombstoneText
}
return t.TombstoneType
}
// TODO: this name sucks
type PageGlobalData struct {
scraper.TweetTrove
SearchText string
FocusedTweetID scraper.TweetID
}
func (d PageGlobalData) Tweet(id scraper.TweetID) scraper.Tweet {
return d.Tweets[id]
}
func (d PageGlobalData) User(id scraper.UserID) scraper.User {
return d.Users[id]
}
func (d PageGlobalData) Retweet(id scraper.TweetID) scraper.Retweet {
return d.Retweets[id]
}
func (d PageGlobalData) Space(id scraper.SpaceID) scraper.Space {
return d.Spaces[id]
}
func (d PageGlobalData) GetFocusedTweetID() scraper.TweetID {
return d.FocusedTweetID
}
func (d PageGlobalData) GetSearchText() string {
fmt.Println(d.SearchText)
return d.SearchText
}

View File

@ -1,23 +1,9 @@
package webserver
import (
"bytes"
"fmt"
"html/template"
"io"
"io/fs"
"net/http"
"net/url"
"path"
"path/filepath"
"regexp"
"runtime"
"runtime/debug"
"github.com/Masterminds/sprig/v3"
"gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
"gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
)
func panic_if(err error) {
@ -26,32 +12,6 @@ func panic_if(err error) {
}
}
var this_dir string
func init() {
_, this_file, _, _ := runtime.Caller(0) // `this_file` is absolute path to this source file
this_dir = path.Dir(this_file)
}
func get_filepath(s string) string {
if use_embedded == "true" {
return s
}
return path.Join(this_dir, s)
}
func glob(path string) []string {
var ret []string
var err error
if use_embedded == "true" {
ret, err = fs.Glob(embedded_files, get_filepath(path))
} else {
ret, err = filepath.Glob(get_filepath(path))
}
panic_if(err)
return ret
}
// func (app *Application) error_400(w http.ResponseWriter) {
// http.Error(w, "Bad Request", 400)
// }
@ -76,175 +36,3 @@ func (app *Application) error_500(w http.ResponseWriter, err error) {
}
http.Error(w, "Server error :(", 500)
}
type TweetCollection interface {
Tweet(id scraper.TweetID) scraper.Tweet
User(id scraper.UserID) scraper.User
Retweet(id scraper.TweetID) scraper.Retweet
Space(id scraper.SpaceID) scraper.Space
FocusedTweetID() scraper.TweetID
}
// Creates a template from the given template file using all the available partials.
// Calls `app.buffered_render` to render the created template.
func (app *Application) buffered_render_tweet_page(w http.ResponseWriter, tpl_file string, data TweetCollection) {
partials := append(glob("tpl/includes/*.tpl"), glob("tpl/tweet_page_includes/*.tpl")...)
r := renderer{
Funcs: func_map(template.FuncMap{
"tweet": data.Tweet,
"user": data.User,
"retweet": data.Retweet,
"space": data.Space,
"active_user": app.get_active_user,
"focused_tweet_id": data.FocusedTweetID,
"get_entities": get_entities,
"get_tombstone_text": get_tombstone_text,
"cursor_to_query_params": cursor_to_query_params,
}),
Filenames: append(partials, get_filepath(tpl_file)),
TplName: "base",
Data: data,
}
r.BufferedRender(w)
}
// Creates a template from the given template file using all the available partials.
// Calls `app.buffered_render` to render the created template.
func (app *Application) buffered_render_basic_page(w http.ResponseWriter, tpl_file string, data interface{}) {
partials := glob("tpl/includes/*.tpl")
r := renderer{
Funcs: func_map(template.FuncMap{"active_user": app.get_active_user}),
Filenames: append(partials, get_filepath(tpl_file)),
TplName: "base",
Data: data,
}
r.BufferedRender(w)
}
func (app *Application) buffered_render_tweet_htmx(w http.ResponseWriter, tpl_name string, data TweetCollection) {
partials := append(glob("tpl/includes/*.tpl"), glob("tpl/tweet_page_includes/*.tpl")...)
r := renderer{
Funcs: func_map(template.FuncMap{
"tweet": data.Tweet,
"user": data.User,
"retweet": data.Retweet,
"space": data.Space,
"active_user": app.get_active_user,
"focused_tweet_id": data.FocusedTweetID,
"get_entities": get_entities,
"get_tombstone_text": get_tombstone_text,
"cursor_to_query_params": cursor_to_query_params,
}),
Filenames: partials,
TplName: tpl_name,
Data: data,
}
r.BufferedRender(w)
}
func (app *Application) buffered_render_basic_htmx(w http.ResponseWriter, tpl_name string, data interface{}) {
partials := glob("tpl/includes/*.tpl")
r := renderer{
Funcs: func_map(template.FuncMap{"active_user": app.get_active_user}),
Filenames: partials,
TplName: tpl_name,
Data: data,
}
r.BufferedRender(w)
}
func (app *Application) get_active_user() scraper.User {
return app.ActiveUser
}
type EntityType int
const (
ENTITY_TYPE_TEXT EntityType = iota
ENTITY_TYPE_MENTION
ENTITY_TYPE_HASHTAG
)
type Entity struct {
EntityType
Contents string
}
func get_entities(text string) []Entity {
ret := []Entity{}
start := 0
for _, idxs := range regexp.MustCompile(`(\s|^)[@#]\w+`).FindAllStringIndex(text, -1) {
// Handle leading whitespace. Only match start-of-string or leading whitespace to avoid matching, e.g., emails
if text[idxs[0]] == ' ' || text[idxs[0]] == '\n' {
idxs[0] += 1
}
if start != idxs[0] {
ret = append(ret, Entity{ENTITY_TYPE_TEXT, text[start:idxs[0]]})
}
piece := text[idxs[0]+1 : idxs[1]] // Chop off the "#" or "@"
if text[idxs[0]] == '@' {
ret = append(ret, Entity{ENTITY_TYPE_MENTION, piece})
} else {
ret = append(ret, Entity{ENTITY_TYPE_HASHTAG, piece})
}
start = idxs[1]
}
if start < len(text) {
ret = append(ret, Entity{ENTITY_TYPE_TEXT, text[start:]})
}
return ret
}
func get_tombstone_text(t scraper.Tweet) string {
if t.TombstoneText != "" {
return t.TombstoneText
}
return t.TombstoneType
}
func func_map(extras template.FuncMap) template.FuncMap {
ret := sprig.FuncMap()
for i := range extras {
ret[i] = extras[i]
}
return ret
}
type renderer struct {
Funcs template.FuncMap
Filenames []string
TplName string
Data interface{}
}
// Render the given template using a bytes.Buffer. This avoids the possibility of failing partway
// through the rendering, and sending an imcomplete response with "Bad Request" or "Server Error" at the end.
func (r renderer) BufferedRender(w io.Writer) {
var tpl *template.Template
var err error
if use_embedded == "true" {
tpl, err = template.New("").Funcs(r.Funcs).ParseFS(embedded_files, r.Filenames...)
} else {
tpl, err = template.New("").Funcs(r.Funcs).ParseFiles(r.Filenames...)
}
panic_if(err)
buf := new(bytes.Buffer)
err = tpl.ExecuteTemplate(buf, r.TplName, r.Data)
panic_if(err)
_, err = buf.WriteTo(w)
panic_if(err)
}
func cursor_to_query_params(c persistence.Cursor) string {
result := url.Values{}
result.Set("cursor", fmt.Sprint(c.CursorValue))
result.Set("sort-order", c.SortOrder.String())
return result.Encode()
}

View File

@ -1,5 +1,5 @@
{{define "title"}}{{.Title}}{{end}}
{{define "main"}}
{{template "list" .Users}}
{{template "list" .UserIDs}}
{{end}}

View File

@ -20,7 +20,7 @@
</div>
</div>
{{if .IsUsersSearch}}
{{template "list" .UsersList}}
{{template "list" .UserIDs}}
{{else}}
<div class="sort-order-container">
<span class="sort-order-label">order:</span>

View File

@ -1,9 +1,10 @@
{{define "list"}}
<div class="users-list-container">
{{range .}}
{{$user := (user .)}}
<div class="user">
{{template "author-info" .}}
<p class="bio">{{.Bio}}</p>
{{template "author-info" $user}}
<p class="bio">{{$user.Bio}}</p>
</div>
{{end}}
</div>