diff --git a/internal/webserver/handler_user_feed.go b/internal/webserver/handler_user_feed.go index df663dc..89dd1fa 100644 --- a/internal/webserver/handler_user_feed.go +++ b/internal/webserver/handler_user_feed.go @@ -12,6 +12,7 @@ import ( type UserProfileData struct { persistence.Feed scraper.UserID + FeedType string } func (t UserProfileData) Tweet(id scraper.TweetID) scraper.Tweet { @@ -41,22 +42,42 @@ func (app *Application) UserFeed(w http.ResponseWriter, r *http.Request) { return } - if len(parts) == 2 && parts[1] == "scrape" { + if len(parts) > 1 && parts[len(parts)-1] == "scrape" { if app.IsScrapingDisabled { http.Error(w, "Scraping is disabled (are you logged in?)", 401) return } - // Run scraper - trove, err := scraper.GetUserFeedGraphqlFor(user.ID, 50) // TODO: parameterizable - if err != nil { - app.ErrorLog.Print(err) - // TOOD: show error in UI + if len(parts) == 2 { // Already checked the last part is "scrape" + // Run scraper + trove, err := scraper.GetUserFeedGraphqlFor(user.ID, 50) // TODO: parameterizable + if err != nil { + app.ErrorLog.Print(err) + // TOOD: show error in UI + } + app.Profile.SaveTweetTrove(trove) + } else if len(parts) == 3 && parts[1] == "likes" { + trove, err := scraper.GetUserLikes(user.ID, 50) // TODO: parameterizable + if err != nil { + app.ErrorLog.Print(err) + // TOOD: show error in UI + } + app.Profile.SaveTweetTrove(trove) } - app.Profile.SaveTweetTrove(trove) } - c := persistence.NewUserFeedCursor(user.Handle) + var c persistence.Cursor + if len(parts) > 1 && parts[1] == "likes" { + c = persistence.NewUserFeedLikesCursor(user.Handle) + } else { + c = persistence.NewUserFeedCursor(user.Handle) + } + if len(parts) > 1 && parts[1] == "without_replies" { + c.FilterReplies = persistence.EXCLUDE + } + if len(parts) > 1 && parts[1] == "media" { + c.FilterMedia = persistence.REQUIRE + } err = parse_cursor_value(&c, r) if err != nil { app.error_400_with_message(w, "invalid cursor (must be a number)") @@ -74,6 +95,11 @@ func (app *Application) UserFeed(w http.ResponseWriter, r *http.Request) { feed.Users[user.ID] = user data := UserProfileData{Feed: feed, UserID: user.ID} + if len(parts) == 2 { + data.FeedType = parts[1] + } else { + data.FeedType = "" + } if r.Header.Get("HX-Request") == "true" && c.CursorPosition == persistence.CURSOR_MIDDLE { // It's a Show More request diff --git a/internal/webserver/server_test.go b/internal/webserver/server_test.go index e527704..7eda36a 100644 --- a/internal/webserver/server_test.go +++ b/internal/webserver/server_test.go @@ -133,6 +133,54 @@ func TestUserFeedWithCursorBadNumber(t *testing.T) { require.Equal(resp.StatusCode, 400) } +func TestUserFeedTweetsOnlyTab(t *testing.T) { + assert := assert.New(t) + require := require.New(t) + + resp := do_request(httptest.NewRequest("GET", "/Peter_Nimitz/without_replies", nil)) + require.Equal(resp.StatusCode, 200) + + root, err := html.Parse(resp.Body) + require.NoError(err) + tweets := cascadia.QueryAll(root, selector(".timeline > .tweet")) + assert.Len(tweets, 2) +} + +func TestUserFeedMediaTab(t *testing.T) { + assert := assert.New(t) + require := require.New(t) + + resp := do_request(httptest.NewRequest("GET", "/Cernovich/media", nil)) + require.Equal(resp.StatusCode, 200) + + root, err := html.Parse(resp.Body) + require.NoError(err) + tweets := cascadia.QueryAll(root, selector(".timeline > .tweet")) + assert.Len(tweets, 1) +} + +func TestUserFeedLikesTab(t *testing.T) { + assert := assert.New(t) + require := require.New(t) + + resp := do_request(httptest.NewRequest("GET", "/MysteryGrove/likes", nil)) + require.Equal(resp.StatusCode, 200) + + root, err := html.Parse(resp.Body) + require.NoError(err) + tweets := cascadia.QueryAll(root, selector(".timeline > .tweet")) + assert.Len(tweets, 5) + + // Double check pagination works properly + resp = do_request(httptest.NewRequest("GET", "/MysteryGrove/likes?cursor=5", nil)) + require.Equal(resp.StatusCode, 200) + + root, err = html.Parse(resp.Body) + require.NoError(err) + tweets = cascadia.QueryAll(root, selector(".timeline > .tweet")) + assert.Len(tweets, 4) +} + // Timeline page // ------------- diff --git a/internal/webserver/static/styles.css b/internal/webserver/static/styles.css index b91a624..9849142 100644 --- a/internal/webserver/static/styles.css +++ b/internal/webserver/static/styles.css @@ -221,6 +221,21 @@ h3 { .user-feed-header .profile-image { width: 8em; } +.user-feed-header a.user-feed-tab { + flex-grow: 1; + text-align: center; + font-size: 1.1em; + font-weight: bold; + color: var(--color-twitter-text-gray); + padding: 0.8em; +} +.user-feed-header a.user-feed-tab.active-tab { + color: var(--color-twitter-blue); + border-bottom: 0.2em solid var(--color-twitter-blue); +} +.user-feed-header a.user-feed-tab:hover { + color: var(--color-twitter-blue); +} .row { display: flex; diff --git a/internal/webserver/tpl/user_feed.tpl b/internal/webserver/tpl/user_feed.tpl index 7dbb929..fe272eb 100644 --- a/internal/webserver/tpl/user_feed.tpl +++ b/internal/webserver/tpl/user_feed.tpl @@ -59,10 +59,31 @@ Re-fetch user feed + + + + +
+ + Tweets and replies + + + Tweets + + + Media + + + Likes + +
diff --git a/pkg/persistence/compound_ssf_queries.go b/pkg/persistence/compound_ssf_queries.go index 596bc7e..a7923fd 100644 --- a/pkg/persistence/compound_ssf_queries.go +++ b/pkg/persistence/compound_ssf_queries.go @@ -180,6 +180,23 @@ func NewUserFeedCursor(h scraper.UserHandle) Cursor { } } +// Generate a cursor appropriate for a user's Media tab +func NewUserFeedMediaCursor(h scraper.UserHandle) Cursor { + return Cursor{ + Keywords: []string{}, + ToUserHandles: []scraper.UserHandle{}, + SinceTimestamp: scraper.TimestampFromUnix(0), + UntilTimestamp: scraper.TimestampFromUnix(0), + CursorPosition: CURSOR_START, + CursorValue: 0, + SortOrder: SORT_ORDER_NEWEST, + PageSize: 50, + + ByUserHandle: h, + FilterMedia: REQUIRE, + } +} + // Generate a cursor for a User's Likes func NewUserFeedLikesCursor(h scraper.UserHandle) Cursor { return Cursor{ @@ -263,6 +280,7 @@ func (c *Cursor) apply_token(token string) error { c.ToUserHandles = append(c.ToUserHandles, scraper.UserHandle(parts[1])) case "retweeted_by": c.RetweetedByUserHandle = scraper.UserHandle(parts[1]) + c.FilterRetweets = NONE // Clear the "exclude retweets" filter set by default in NewCursor case "liked_by": c.LikedByUserHandle = scraper.UserHandle(parts[1]) case "since": @@ -283,6 +301,10 @@ func (c *Cursor) apply_token(token string) error { c.FilterPolls = REQUIRE case "spaces": c.FilterSpaces = REQUIRE + case "replies": + c.FilterReplies = REQUIRE + case "retweets": + c.FilterRetweets = REQUIRE } case "-filter": switch parts[1] { @@ -298,6 +320,10 @@ func (c *Cursor) apply_token(token string) error { c.FilterPolls = EXCLUDE case "spaces": c.FilterSpaces = EXCLUDE + case "replies": + c.FilterReplies = EXCLUDE + case "retweets": + c.FilterRetweets = EXCLUDE } }