Add pagination to notifications
- handle notifications end-of-feed - add support for "quoted" and "mentioned" notifications
This commit is contained in:
parent
04991ad554
commit
2c7c35e23f
@ -599,12 +599,8 @@ func send_dm_reacc(room_id string, in_reply_to_id int, reacc string) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func get_notifications(how_many int) {
|
func get_notifications(how_many int) {
|
||||||
resp, err := api.GetNotifications("") // TODO: how_many
|
trove, err := api.GetNotifications(how_many)
|
||||||
if err != nil {
|
if err != nil && !errors.Is(err, scraper.END_OF_FEED) {
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
trove, err := resp.ToTweetTroveAsNotifications(api.UserID)
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
panic(err)
|
||||||
}
|
}
|
||||||
profile.SaveTweetTrove(trove, true, &api)
|
profile.SaveTweetTrove(trove, true, &api)
|
||||||
|
@ -18,8 +18,8 @@ func (p Profile) SaveNotification(n Notification) {
|
|||||||
on conflict do update
|
on conflict do update
|
||||||
set sent_at = max(sent_at, :sent_at),
|
set sent_at = max(sent_at, :sent_at),
|
||||||
sort_index = max(sort_index, :sort_index),
|
sort_index = max(sort_index, :sort_index),
|
||||||
action_user_id = :action_user_id,
|
action_user_id = nullif(:action_user_id, 0),
|
||||||
action_tweet_id = :action_tweet_id
|
action_tweet_id = nullif(:action_tweet_id, 0)
|
||||||
`, n)
|
`, n)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
panic(err)
|
||||||
|
@ -524,7 +524,11 @@ func (t *TweetResponse) GetCursor() string {
|
|||||||
* In this case, we look for an "entries" object that has only cursors in it, and no tweets.
|
* In this case, we look for an "entries" object that has only cursors in it, and no tweets.
|
||||||
*/
|
*/
|
||||||
func (t *TweetResponse) IsEndOfFeed() bool {
|
func (t *TweetResponse) IsEndOfFeed() bool {
|
||||||
entries := t.Timeline.Instructions[0].AddEntries.Entries
|
for _, instr := range t.Timeline.Instructions {
|
||||||
|
entries := instr.AddEntries.Entries
|
||||||
|
if len(entries) == 0 {
|
||||||
|
continue // Not the main instruction
|
||||||
|
}
|
||||||
if len(entries) > 2 {
|
if len(entries) > 2 {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
@ -533,6 +537,7 @@ func (t *TweetResponse) IsEndOfFeed() bool {
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,14 +1,17 @@
|
|||||||
package scraper
|
package scraper
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"errors"
|
||||||
"net/url"
|
"net/url"
|
||||||
"regexp"
|
"regexp"
|
||||||
"sort"
|
"sort"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
)
|
)
|
||||||
|
|
||||||
// TODO: pagination
|
// TODO: pagination
|
||||||
func (api *API) GetNotifications(cursor string) (TweetResponse, error) {
|
func (api *API) GetNotificationsPage(cursor string) (TweetResponse, error) {
|
||||||
url, err := url.Parse("https://api.twitter.com/2/notifications/all.json")
|
url, err := url.Parse("https://api.twitter.com/2/notifications/all.json")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
panic(err)
|
||||||
@ -24,6 +27,38 @@ func (api *API) GetNotifications(cursor string) (TweetResponse, error) {
|
|||||||
return result, err
|
return result, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (api *API) GetNotifications(how_many int) (TweetTrove, error) {
|
||||||
|
resp, err := api.GetNotificationsPage("")
|
||||||
|
if err != nil {
|
||||||
|
return TweetTrove{}, err
|
||||||
|
}
|
||||||
|
trove, err := resp.ToTweetTroveAsNotifications(api.UserID)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
for len(trove.Notifications) < how_many {
|
||||||
|
resp, err = api.GetNotificationsPage(resp.GetCursor())
|
||||||
|
if errors.Is(err, ErrRateLimited) {
|
||||||
|
log.Warnf("Rate limited!")
|
||||||
|
break
|
||||||
|
} else if err != nil {
|
||||||
|
return TweetTrove{}, err
|
||||||
|
}
|
||||||
|
if resp.IsEndOfFeed() {
|
||||||
|
log.Infof("End of feed!")
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
new_trove, err := resp.ToTweetTroveAsNotifications(api.UserID)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
trove.MergeWith(new_trove)
|
||||||
|
}
|
||||||
|
return trove, nil
|
||||||
|
}
|
||||||
|
|
||||||
func (t *TweetResponse) ToTweetTroveAsNotifications(current_user_id UserID) (TweetTrove, error) {
|
func (t *TweetResponse) ToTweetTroveAsNotifications(current_user_id UserID) (TweetTrove, error) {
|
||||||
ret, err := t.ToTweetTrove()
|
ret, err := t.ToTweetTrove()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -52,6 +87,10 @@ func (t *TweetResponse) ToTweetTroveAsNotifications(current_user_id UserID) (Twe
|
|||||||
notification.Type = NOTIFICATION_TYPE_REPLY
|
notification.Type = NOTIFICATION_TYPE_REPLY
|
||||||
} else if strings.Contains(entry.Content.Item.ClientEventInfo.Element, "recommended") {
|
} else if strings.Contains(entry.Content.Item.ClientEventInfo.Element, "recommended") {
|
||||||
notification.Type = NOTIFICATION_TYPE_RECOMMENDED_POST
|
notification.Type = NOTIFICATION_TYPE_RECOMMENDED_POST
|
||||||
|
} else if strings.Contains(entry.Content.Item.ClientEventInfo.Element, "quoted") {
|
||||||
|
notification.Type = NOTIFICATION_TYPE_QUOTE_TWEET
|
||||||
|
} else if strings.Contains(entry.Content.Item.ClientEventInfo.Element, "mentioned") {
|
||||||
|
notification.Type = NOTIFICATION_TYPE_MENTION
|
||||||
}
|
}
|
||||||
if entry.Content.Item.Content.Tweet.ID != 0 {
|
if entry.Content.Item.Content.Tweet.ID != 0 {
|
||||||
notification.ActionTweetID = TweetID(entry.Content.Item.Content.Tweet.ID)
|
notification.ActionTweetID = TweetID(entry.Content.Item.Content.Tweet.ID)
|
||||||
@ -103,7 +142,6 @@ func ParseSingleNotification(n APINotification) Notification {
|
|||||||
// TODO: more types?
|
// TODO: more types?
|
||||||
|
|
||||||
ret.SentAt = TimestampFromUnixMilli(n.TimestampMs)
|
ret.SentAt = TimestampFromUnixMilli(n.TimestampMs)
|
||||||
// TODO: caller should set ret.UserID
|
|
||||||
ret.UserIDs = []UserID{}
|
ret.UserIDs = []UserID{}
|
||||||
for _, u := range n.Template.AggregateUserActionsV1.FromUsers {
|
for _, u := range n.Template.AggregateUserActionsV1.FromUsers {
|
||||||
ret.UserIDs = append(ret.UserIDs, UserID(u.User.ID))
|
ret.UserIDs = append(ret.UserIDs, UserID(u.User.ID))
|
||||||
|
@ -28,12 +28,12 @@ func TestParseNotificationsPage(t *testing.T) {
|
|||||||
|
|
||||||
notif1, is_ok := tweet_trove.Notifications["FKncQJGVgAQAAAABSQ3bEYsN6BFN3re-ZsU"]
|
notif1, is_ok := tweet_trove.Notifications["FKncQJGVgAQAAAABSQ3bEYsN6BFN3re-ZsU"]
|
||||||
assert.True(is_ok)
|
assert.True(is_ok)
|
||||||
assert.Equal(9, notif1.Type) // login
|
assert.Equal(NOTIFICATION_TYPE_LOGIN, notif1.Type)
|
||||||
assert.Equal(current_user_id, notif1.UserID)
|
assert.Equal(current_user_id, notif1.UserID)
|
||||||
|
|
||||||
notif2, is_ok := tweet_trove.Notifications["FKncQJGVgAQAAAABSQ3bEYsN6BFaOkNV8aw"]
|
notif2, is_ok := tweet_trove.Notifications["FKncQJGVgAQAAAABSQ3bEYsN6BFaOkNV8aw"]
|
||||||
assert.True(is_ok)
|
assert.True(is_ok)
|
||||||
assert.Equal(2, notif2.Type) // retweet
|
assert.Equal(NOTIFICATION_TYPE_RETWEET, notif2.Type)
|
||||||
assert.Equal(current_user_id, notif2.UserID)
|
assert.Equal(current_user_id, notif2.UserID)
|
||||||
assert.Equal(UserID(1458284524761075714), notif2.ActionUserID)
|
assert.Equal(UserID(1458284524761075714), notif2.ActionUserID)
|
||||||
assert.Equal(TweetID(1824915465275392037), notif2.ActionTweetID)
|
assert.Equal(TweetID(1824915465275392037), notif2.ActionTweetID)
|
||||||
@ -41,31 +41,40 @@ func TestParseNotificationsPage(t *testing.T) {
|
|||||||
|
|
||||||
notif3, is_ok := tweet_trove.Notifications["FKncQJGVgAQAAAABSQ3bEYsN6BE-OY688aw"]
|
notif3, is_ok := tweet_trove.Notifications["FKncQJGVgAQAAAABSQ3bEYsN6BE-OY688aw"]
|
||||||
assert.True(is_ok)
|
assert.True(is_ok)
|
||||||
assert.Equal(1, notif3.Type) // like
|
assert.Equal(NOTIFICATION_TYPE_LIKE, notif3.Type)
|
||||||
assert.Equal(current_user_id, notif3.UserID)
|
assert.Equal(current_user_id, notif3.UserID)
|
||||||
assert.Equal(UserID(1458284524761075714), notif3.ActionUserID)
|
assert.Equal(UserID(1458284524761075714), notif3.ActionUserID)
|
||||||
assert.Equal(TweetID(1824915465275392037), notif3.ActionTweetID)
|
assert.Equal(TweetID(1824915465275392037), notif3.ActionTweetID)
|
||||||
|
|
||||||
notif4, is_ok := tweet_trove.Notifications["FKncQJGVgAQAAAABSQ3bEYsN6BGLlh8UIQs"]
|
notif4, is_ok := tweet_trove.Notifications["FKncQJGVgAQAAAABSQ3bEYsN6BGLlh8UIQs"]
|
||||||
assert.True(is_ok)
|
assert.True(is_ok)
|
||||||
assert.Equal(11, notif4.Type) // recommended
|
assert.Equal(NOTIFICATION_TYPE_RECOMMENDED_POST, notif4.Type)
|
||||||
assert.Equal(current_user_id, notif4.UserID)
|
assert.Equal(current_user_id, notif4.UserID)
|
||||||
|
|
||||||
notif5, is_ok := tweet_trove.Notifications["FKncQJGVgAQAAAABSQ3bEYsN6BHS11EvITw"]
|
notif5, is_ok := tweet_trove.Notifications["FKncQJGVgAQAAAABSQ3bEYsN6BHS11EvITw"]
|
||||||
assert.True(is_ok)
|
assert.True(is_ok)
|
||||||
assert.Equal(5, notif5.Type) // followed you
|
assert.Equal(NOTIFICATION_TYPE_FOLLOW, notif5.Type)
|
||||||
assert.Equal(current_user_id, notif5.UserID)
|
assert.Equal(current_user_id, notif5.UserID)
|
||||||
assert.Equal(UserID(28815778), notif5.ActionUserID)
|
assert.Equal(UserID(28815778), notif5.ActionUserID)
|
||||||
|
|
||||||
notif6, is_ok := tweet_trove.Notifications["FKncQJGVgAQAAAABSQ3bEYsN6BE5ujkCepo"]
|
notif6, is_ok := tweet_trove.Notifications["FKncQJGVgAQAAAABSQ3bEYsN6BE5ujkCepo"]
|
||||||
assert.True(is_ok)
|
assert.True(is_ok)
|
||||||
assert.Equal(1, notif6.Type)
|
assert.Equal(NOTIFICATION_TYPE_LIKE, notif6.Type)
|
||||||
assert.Equal(current_user_id, notif6.UserID)
|
assert.Equal(current_user_id, notif6.UserID)
|
||||||
assert.Equal(UserID(1458284524761075714), notif6.ActionUserID)
|
assert.Equal(UserID(1458284524761075714), notif6.ActionUserID)
|
||||||
assert.Equal(TweetID(1826778617705115868), notif6.ActionTweetID)
|
assert.Equal(TweetID(1826778617705115868), notif6.ActionTweetID)
|
||||||
assert.Contains(notif6.UserIDs, UserID(1458284524761075714))
|
assert.Contains(notif6.UserIDs, UserID(1458284524761075714))
|
||||||
assert.Contains(notif6.UserIDs, UserID(2694459866))
|
assert.Contains(notif6.UserIDs, UserID(2694459866))
|
||||||
|
|
||||||
|
notif7, is_ok := tweet_trove.Notifications["FKncQJGVgAQAAAABSQ3bEYsN6BGJjUVEd8Y"]
|
||||||
|
assert.True(is_ok)
|
||||||
|
assert.Equal(NOTIFICATION_TYPE_QUOTE_TWEET, notif7.Type)
|
||||||
|
assert.Equal(TweetID(1817720429941059773), notif7.ActionTweetID) // Not in the trove (using fake data)
|
||||||
|
|
||||||
|
notif8, is_ok := tweet_trove.Notifications["FKncQJGVgAQAAAABSQ3bEYsN6BG1nnPGJlQ"]
|
||||||
|
assert.True(is_ok)
|
||||||
|
assert.Equal(NOTIFICATION_TYPE_MENTION, notif8.Type)
|
||||||
|
|
||||||
// Check users
|
// Check users
|
||||||
for _, u_id := range []UserID{1458284524761075714, 28815778} {
|
for _, u_id := range []UserID{1458284524761075714, 28815778} {
|
||||||
_, is_ok := tweet_trove.Users[u_id]
|
_, is_ok := tweet_trove.Users[u_id]
|
||||||
@ -81,4 +90,18 @@ func TestParseNotificationsPage(t *testing.T) {
|
|||||||
// Test cursor-bottom
|
// Test cursor-bottom
|
||||||
bottom_cursor := resp.GetCursor()
|
bottom_cursor := resp.GetCursor()
|
||||||
assert.Equal("DAACDAABCgABFKncQJGVgAQIAAIAAAABCAADSQ3bEQgABIsN6BEACwACAAAAC0FaRkxRSXFNLTJJAAA", bottom_cursor)
|
assert.Equal("DAACDAABCgABFKncQJGVgAQIAAIAAAABCAADSQ3bEQgABIsN6BEACwACAAAAC0FaRkxRSXFNLTJJAAA", bottom_cursor)
|
||||||
|
assert.False(resp.IsEndOfFeed())
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestParseNotificationsEndOfFeed(t *testing.T) {
|
||||||
|
assert := assert.New(t)
|
||||||
|
require := require.New(t)
|
||||||
|
data, err := os.ReadFile("test_responses/notifications/notifications_end_of_feed.json")
|
||||||
|
require.NoError(err)
|
||||||
|
|
||||||
|
var resp TweetResponse
|
||||||
|
err = json.Unmarshal(data, &resp)
|
||||||
|
require.NoError(err)
|
||||||
|
|
||||||
|
assert.True(resp.IsEndOfFeed())
|
||||||
}
|
}
|
||||||
|
File diff suppressed because one or more lines are too long
Loading…
x
Reference in New Issue
Block a user