Remove most uses of the singleton pattern in 'scraper' package

This commit is contained in:
Alessio 2024-08-19 16:45:32 -07:00
parent adbab97ca1
commit 08e5a726a8
16 changed files with 53 additions and 110 deletions

View File

@ -23,6 +23,8 @@ var profile persistence.Profile
var version_string string var version_string string
var api scraper.API
func main() { func main() {
profile_dir := flag.String("profile", ".", "") profile_dir := flag.String("profile", ".", "")
flag.StringVar(profile_dir, "p", ".", "") flag.StringVar(profile_dir, "p", ".", "")
@ -126,13 +128,15 @@ func main() {
// Lop off the ".session" suffix (allows using `--session asdf.session` which lets you tab-autocomplete at command line) // Lop off the ".session" suffix (allows using `--session asdf.session` which lets you tab-autocomplete at command line)
*session_name = (*session_name)[:len(*session_name)-8] *session_name = (*session_name)[:len(*session_name)-8]
} }
scraper.InitApi(profile.LoadSession(scraper.UserHandle(*session_name))) api = profile.LoadSession(scraper.UserHandle(*session_name))
scraper.InitApi(api)
} else { } else {
session, err := scraper.NewGuestSession() var err error
api, err = scraper.NewGuestSession()
if err != nil { if err != nil {
log.Warnf("Unable to initialize guest session! Might be a network issue") log.Warnf("Unable to initialize guest session! Might be a network issue")
} else { } else {
scraper.InitApi(session) scraper.InitApi(api)
} }
} }
@ -234,7 +238,6 @@ func main() {
// - username: twitter username or email address // - username: twitter username or email address
// - password: twitter account password // - password: twitter account password
func login(username string, password string) { func login(username string, password string) {
// Skip the scraper.InitApi, just use a local one since no scraping is happening
api, err := scraper.NewGuestSession() api, err := scraper.NewGuestSession()
if err != nil { if err != nil {
die(fmt.Sprintf("Unable to create session: %s", err.Error()), false, 1) die(fmt.Sprintf("Unable to create session: %s", err.Error()), false, 1)
@ -303,7 +306,7 @@ func fetch_tweet_only(tweet_identifier string) {
die(err.Error(), false, -1) die(err.Error(), false, -1)
} }
tweet, err := scraper.GetTweet(tweet_id) tweet, err := api.GetTweet(tweet_id)
if is_scrape_failure(err) { if is_scrape_failure(err) {
die(fmt.Sprintf("Error fetching tweet: %s", err.Error()), false, -1) die(fmt.Sprintf("Error fetching tweet: %s", err.Error()), false, -1)
} }
@ -328,7 +331,7 @@ func fetch_tweet_conversation(tweet_identifier string, how_many int) {
die(err.Error(), false, -1) die(err.Error(), false, -1)
} }
trove, err := scraper.GetTweetFullAPIV2(tweet_id, how_many) trove, err := api.GetTweetFullAPIV2(tweet_id, how_many)
if is_scrape_failure(err) { if is_scrape_failure(err) {
die(err.Error(), false, -1) die(err.Error(), false, -1)
} }
@ -349,7 +352,7 @@ func fetch_user_feed(handle string, how_many int) {
die(fmt.Sprintf("Error getting user: %s\n %s", handle, err.Error()), false, -1) die(fmt.Sprintf("Error getting user: %s\n %s", handle, err.Error()), false, -1)
} }
trove, err := scraper.GetUserFeed(user.ID, how_many) trove, err := api.GetUserFeed(user.ID, how_many)
if is_scrape_failure(err) { if is_scrape_failure(err) {
die(fmt.Sprintf("Error scraping feed: %s\n %s", handle, err.Error()), false, -2) die(fmt.Sprintf("Error scraping feed: %s\n %s", handle, err.Error()), false, -2)
} }
@ -367,7 +370,7 @@ func get_user_likes(handle string, how_many int) {
die(fmt.Sprintf("Error getting user: %s\n %s", handle, err.Error()), false, -1) die(fmt.Sprintf("Error getting user: %s\n %s", handle, err.Error()), false, -1)
} }
trove, err := scraper.GetUserLikes(user.ID, how_many) trove, err := api.GetUserLikes(user.ID, how_many)
if is_scrape_failure(err) { if is_scrape_failure(err) {
die(fmt.Sprintf("Error scraping feed: %s\n %s", handle, err.Error()), false, -2) die(fmt.Sprintf("Error scraping feed: %s\n %s", handle, err.Error()), false, -2)
} }
@ -385,7 +388,7 @@ func get_followees(handle string, how_many int) {
die(fmt.Sprintf("Error getting user: %s\n %s", handle, err.Error()), false, -1) die(fmt.Sprintf("Error getting user: %s\n %s", handle, err.Error()), false, -1)
} }
trove, err := scraper.GetFollowees(user.ID, how_many) trove, err := api.GetFollowees(user.ID, how_many)
if is_scrape_failure(err) { if is_scrape_failure(err) {
die(fmt.Sprintf("Error getting followees: %s\n %s", handle, err.Error()), false, -2) die(fmt.Sprintf("Error getting followees: %s\n %s", handle, err.Error()), false, -2)
} }
@ -399,7 +402,7 @@ func get_followers(handle string, how_many int) {
if err != nil { if err != nil {
die(fmt.Sprintf("Error getting user: %s\n %s", handle, err.Error()), false, -1) die(fmt.Sprintf("Error getting user: %s\n %s", handle, err.Error()), false, -1)
} }
trove, err := scraper.GetFollowers(user.ID, how_many) trove, err := api.GetFollowers(user.ID, how_many)
if is_scrape_failure(err) { if is_scrape_failure(err) {
die(fmt.Sprintf("Error getting followees: %s\n %s", handle, err.Error()), false, -2) die(fmt.Sprintf("Error getting followees: %s\n %s", handle, err.Error()), false, -2)
} }
@ -409,7 +412,7 @@ func get_followers(handle string, how_many int) {
happy_exit(fmt.Sprintf("Saved %d followers", len(trove.Users)), err) happy_exit(fmt.Sprintf("Saved %d followers", len(trove.Users)), err)
} }
func get_bookmarks(how_many int) { func get_bookmarks(how_many int) {
trove, err := scraper.GetBookmarks(how_many) trove, err := api.GetBookmarks(how_many)
if is_scrape_failure(err) { if is_scrape_failure(err) {
die(fmt.Sprintf("Error scraping bookmarks:\n %s", err.Error()), false, -2) die(fmt.Sprintf("Error scraping bookmarks:\n %s", err.Error()), false, -2)
} }
@ -422,7 +425,7 @@ func get_bookmarks(how_many int) {
) )
} }
func fetch_timeline(is_following_only bool) { func fetch_timeline(is_following_only bool) {
trove, err := scraper.GetHomeTimeline("", is_following_only) trove, err := api.GetHomeTimeline("", is_following_only)
if is_scrape_failure(err) { if is_scrape_failure(err) {
die(fmt.Sprintf("Error fetching timeline:\n %s", err.Error()), false, -2) die(fmt.Sprintf("Error fetching timeline:\n %s", err.Error()), false, -2)
} }
@ -462,7 +465,7 @@ func download_user_content(handle scraper.UserHandle) {
} }
func search(query string, how_many int) { func search(query string, how_many int) {
trove, err := scraper.Search(query, how_many) trove, err := api.Search(query, how_many)
if is_scrape_failure(err) { if is_scrape_failure(err) {
die(fmt.Sprintf("Error scraping search results: %s", err.Error()), false, -100) die(fmt.Sprintf("Error scraping search results: %s", err.Error()), false, -100)
} }
@ -490,7 +493,7 @@ func unlike_tweet(tweet_identifier string) {
if err != nil { if err != nil {
die(err.Error(), false, -1) die(err.Error(), false, -1)
} }
err = scraper.UnlikeTweet(tweet_id) err = api.UnlikeTweet(tweet_id)
if err != nil { if err != nil {
die(err.Error(), false, -10) die(err.Error(), false, -10)
} }
@ -502,7 +505,7 @@ func like_tweet(tweet_identifier string) {
if err != nil { if err != nil {
die(err.Error(), false, -1) die(err.Error(), false, -1)
} }
like, err := scraper.LikeTweet(tweet_id) like, err := api.LikeTweet(tweet_id)
if err != nil { if err != nil {
die(err.Error(), false, -10) die(err.Error(), false, -10)
} }
@ -525,7 +528,7 @@ func start_webserver(addr string, should_auto_open bool) {
} }
func fetch_inbox(how_many int) { func fetch_inbox(how_many int) {
trove, _, err := scraper.GetInbox(how_many) trove, _, err := api.GetInbox(how_many)
if err != nil { if err != nil {
die(fmt.Sprintf("Failed to fetch inbox:\n %s", err.Error()), false, 1) die(fmt.Sprintf("Failed to fetch inbox:\n %s", err.Error()), false, 1)
} }
@ -539,7 +542,7 @@ func fetch_dm(id string, how_many int) {
panic(err) panic(err)
} }
max_id := scraper.DMMessageID(^uint(0) >> 1) max_id := scraper.DMMessageID(^uint(0) >> 1)
trove, err := scraper.GetConversation(room.ID, max_id, how_many) trove, err := api.GetConversation(room.ID, max_id, how_many)
if err != nil { if err != nil {
die(fmt.Sprintf("Failed to fetch dm:\n %s", err.Error()), false, 1) die(fmt.Sprintf("Failed to fetch dm:\n %s", err.Error()), false, 1)
} }
@ -556,7 +559,7 @@ func send_dm(room_id string, text string, in_reply_to_id int) {
die(fmt.Sprintf("No such chat room: %d", in_reply_to_id), false, 1) die(fmt.Sprintf("No such chat room: %d", in_reply_to_id), false, 1)
} }
trove, err := scraper.SendDMMessage(room.ID, text, scraper.DMMessageID(in_reply_to_id)) trove, err := api.SendDMMessage(room.ID, text, scraper.DMMessageID(in_reply_to_id))
if err != nil { if err != nil {
die(fmt.Sprintf("Failed to send dm:\n %s", err.Error()), false, 1) die(fmt.Sprintf("Failed to send dm:\n %s", err.Error()), false, 1)
} }
@ -573,7 +576,7 @@ func send_dm_reacc(room_id string, in_reply_to_id int, reacc string) {
if err != nil { if err != nil {
die(fmt.Sprintf("No such message: %d", in_reply_to_id), false, 1) die(fmt.Sprintf("No such message: %d", in_reply_to_id), false, 1)
} }
err = scraper.SendDMReaction(room.ID, scraper.DMMessageID(in_reply_to_id), reacc) err = api.SendDMReaction(room.ID, scraper.DMMessageID(in_reply_to_id), reacc)
if err != nil { if err != nil {
die(fmt.Sprintf("Failed to react to message:\n %s", err.Error()), false, 1) die(fmt.Sprintf("Failed to react to message:\n %s", err.Error()), false, 1)
} }

View File

@ -20,7 +20,7 @@ func (app *Application) Bookmarks(w http.ResponseWriter, r *http.Request) {
} }
// Run scraper // Run scraper
trove, err := scraper.GetBookmarks(300) // TODO: parameterizable trove, err := app.API.GetBookmarks(300) // TODO: parameterizable
if err != nil && !errors.Is(err, scraper.END_OF_FEED) { if err != nil && !errors.Is(err, scraper.END_OF_FEED) {
app.ErrorLog.Print(err) app.ErrorLog.Print(err)
panic(err) // Return a toast panic(err) // Return a toast

View File

@ -82,7 +82,7 @@ func (app *Application) after_login(w http.ResponseWriter, r *http.Request, api
panic_if(err) panic_if(err)
// Scrape the user's feed // Scrape the user's feed
trove, err := scraper.GetHomeTimeline("", true) trove, err := app.API.GetHomeTimeline("", true)
if err != nil { if err != nil {
app.ErrorLog.Printf("Initial timeline scrape failed: %s", err.Error()) app.ErrorLog.Printf("Initial timeline scrape failed: %s", err.Error())
http.Redirect(w, r, "/", 303) http.Redirect(w, r, "/", 303)
@ -92,7 +92,7 @@ func (app *Application) after_login(w http.ResponseWriter, r *http.Request, api
go app.Profile.SaveTweetTrove(trove, true) go app.Profile.SaveTweetTrove(trove, true)
// Scrape the user's followers // Scrape the user's followers
trove, err = scraper.GetFollowees(user.ID, 1000) trove, err = app.API.GetFollowees(user.ID, 1000)
if err != nil { if err != nil {
app.ErrorLog.Printf("Failed to scrape followers: %s", err.Error()) app.ErrorLog.Printf("Failed to scrape followers: %s", err.Error())
http.Redirect(w, r, "/", 303) http.Redirect(w, r, "/", 303)

View File

@ -33,7 +33,7 @@ func (app *Application) message_mark_as_read(w http.ResponseWriter, r *http.Requ
c.PageSize = 1 c.PageSize = 1
chat_contents := app.Profile.GetChatRoomMessagesByCursor(c) chat_contents := app.Profile.GetChatRoomMessagesByCursor(c)
last_message_id := chat_contents.MessageIDs[len(chat_contents.MessageIDs)-1] last_message_id := chat_contents.MessageIDs[len(chat_contents.MessageIDs)-1]
panic_if(scraper.MarkDMChatRead(room_id, last_message_id)) panic_if(app.API.MarkDMChatRead(room_id, last_message_id))
room := chat_contents.Rooms[room_id] room := chat_contents.Rooms[room_id]
participant, is_ok := room.Participants[app.ActiveUser.ID] participant, is_ok := room.Participants[app.ActiveUser.ID]
if !is_ok { if !is_ok {
@ -66,7 +66,7 @@ func (app *Application) message_send(w http.ResponseWriter, r *http.Request) {
in_reply_to_id = 0 in_reply_to_id = 0
} }
trove, err := scraper.SendDMMessage(room_id, message_data.Text, scraper.DMMessageID(in_reply_to_id)) trove, err := app.API.SendDMMessage(room_id, message_data.Text, scraper.DMMessageID(in_reply_to_id))
if err != nil { if err != nil {
panic(err) panic(err)
} }
@ -96,7 +96,7 @@ func (app *Application) message_detail(w http.ResponseWriter, r *http.Request) {
data_, err := io.ReadAll(r.Body) data_, err := io.ReadAll(r.Body)
panic_if(err) panic_if(err)
panic_if(json.Unmarshal(data_, &data)) panic_if(json.Unmarshal(data_, &data))
panic_if(scraper.SendDMReaction(room_id, data.MessageID, data.Reacc)) panic_if(app.API.SendDMReaction(room_id, data.MessageID, data.Reacc))
dm_message := global_data.Messages[data.MessageID] dm_message := global_data.Messages[data.MessageID]
dm_message.Reactions[app.ActiveUser.ID] = scraper.DMReaction{ dm_message.Reactions[app.ActiveUser.ID] = scraper.DMReaction{
@ -118,7 +118,7 @@ func (app *Application) message_detail(w http.ResponseWriter, r *http.Request) {
if r.URL.Query().Has("scrape") && !app.IsScrapingDisabled { if r.URL.Query().Has("scrape") && !app.IsScrapingDisabled {
max_id := scraper.DMMessageID(^uint(0) >> 1) max_id := scraper.DMMessageID(^uint(0) >> 1)
trove, err := scraper.GetConversation(room_id, max_id, 50) // TODO: parameterizable trove, err := app.API.GetConversation(room_id, max_id, 50) // TODO: parameterizable
if err != nil { if err != nil {
panic(err) panic(err)
} }

View File

@ -103,7 +103,7 @@ func (app *Application) Search(w http.ResponseWriter, r *http.Request) {
} }
// Run scraper // Run scraper
trove, err := scraper.Search(search_text, 1) // TODO: parameterizable trove, err := app.API.Search(search_text, 1) // TODO: parameterizable
if err != nil && !errors.Is(err, scraper.END_OF_FEED) { if err != nil && !errors.Is(err, scraper.END_OF_FEED) {
app.ErrorLog.Print(err) app.ErrorLog.Print(err)
// TOOD: show error in UI // TOOD: show error in UI

View File

@ -49,7 +49,7 @@ func (app *Application) ensure_tweet(id scraper.TweetID, is_forced bool, is_conv
} }
if is_needing_scrape && !app.IsScrapingDisabled { if is_needing_scrape && !app.IsScrapingDisabled {
trove, err := scraper.GetTweetFullAPIV2(id, 50) // TODO: parameterizable trove, err := app.API.GetTweetFullAPIV2(id, 50) // TODO: parameterizable
// Save the trove unless there was an unrecoverable error // Save the trove unless there was an unrecoverable error
if err == nil || errors.Is(err, scraper.END_OF_FEED) || errors.Is(err, scraper.ErrRateLimited) { if err == nil || errors.Is(err, scraper.END_OF_FEED) || errors.Is(err, scraper.ErrRateLimited) {
@ -73,7 +73,7 @@ func (app *Application) ensure_tweet(id scraper.TweetID, is_forced bool, is_conv
func (app *Application) LikeTweet(w http.ResponseWriter, r *http.Request) { func (app *Application) LikeTweet(w http.ResponseWriter, r *http.Request) {
tweet := get_tweet_from_context(r.Context()) tweet := get_tweet_from_context(r.Context())
like, err := scraper.LikeTweet(tweet.ID) like, err := app.API.LikeTweet(tweet.ID)
// "Already Liked This Tweet" is no big deal-- we can just update the UI as if it succeeded // "Already Liked This Tweet" is no big deal-- we can just update the UI as if it succeeded
if err != nil && !errors.Is(err, scraper.AlreadyLikedThisTweet) { if err != nil && !errors.Is(err, scraper.AlreadyLikedThisTweet) {
// It's a different error // It's a different error
@ -87,7 +87,7 @@ func (app *Application) LikeTweet(w http.ResponseWriter, r *http.Request) {
} }
func (app *Application) UnlikeTweet(w http.ResponseWriter, r *http.Request) { func (app *Application) UnlikeTweet(w http.ResponseWriter, r *http.Request) {
tweet := get_tweet_from_context(r.Context()) tweet := get_tweet_from_context(r.Context())
err := scraper.UnlikeTweet(tweet.ID) err := app.API.UnlikeTweet(tweet.ID)
// As above, "Haven't Liked This Tweet" is no big deal-- we can just update the UI as if the request succeeded // As above, "Haven't Liked This Tweet" is no big deal-- we can just update the UI as if the request succeeded
if err != nil && !errors.Is(err, scraper.HaventLikedThisTweet) { if err != nil && !errors.Is(err, scraper.HaventLikedThisTweet) {
// It's a different error // It's a different error

View File

@ -46,7 +46,7 @@ func (app *Application) UserFeed(w http.ResponseWriter, r *http.Request) {
if len(parts) == 1 { // The URL is just the user handle if len(parts) == 1 { // The URL is just the user handle
// Run scraper // Run scraper
trove, err := scraper.GetUserFeed(user.ID, 50) // TODO: parameterizable trove, err := app.API.GetUserFeed(user.ID, 50) // TODO: parameterizable
if err != nil { if err != nil {
app.ErrorLog.Print(err) app.ErrorLog.Print(err)
// TOOD: show error in UI // TOOD: show error in UI
@ -54,7 +54,7 @@ func (app *Application) UserFeed(w http.ResponseWriter, r *http.Request) {
app.Profile.SaveTweetTrove(trove, false) app.Profile.SaveTweetTrove(trove, false)
go app.Profile.SaveTweetTrove(trove, true) go app.Profile.SaveTweetTrove(trove, true)
} else if len(parts) == 2 && parts[1] == "likes" { } else if len(parts) == 2 && parts[1] == "likes" {
trove, err := scraper.GetUserLikes(user.ID, 50) // TODO: parameterizable trove, err := app.API.GetUserLikes(user.ID, 50) // TODO: parameterizable
if err != nil { if err != nil {
app.ErrorLog.Print(err) app.ErrorLog.Print(err)
// TOOD: show error in UI // TOOD: show error in UI
@ -158,7 +158,7 @@ func (app *Application) UserFollowees(w http.ResponseWriter, r *http.Request, us
} }
// Run scraper // Run scraper
trove, err := scraper.GetFollowees(user.ID, 200) // TODO: parameterizable trove, err := app.API.GetFollowees(user.ID, 200) // TODO: parameterizable
if err != nil { if err != nil {
app.ErrorLog.Print(err) app.ErrorLog.Print(err)
// TOOD: show error in UI // TOOD: show error in UI
@ -184,7 +184,7 @@ func (app *Application) UserFollowers(w http.ResponseWriter, r *http.Request, us
} }
// Run scraper // Run scraper
trove, err := scraper.GetFollowers(user.ID, 200) // TODO: parameterizable trove, err := app.API.GetFollowers(user.ID, 200) // TODO: parameterizable
if err != nil { if err != nil {
app.ErrorLog.Print(err) app.ErrorLog.Print(err)
// TOOD: show error in UI // TOOD: show error in UI

View File

@ -31,6 +31,7 @@ type Application struct {
Profile persistence.Profile Profile persistence.Profile
ActiveUser scraper.User ActiveUser scraper.User
IsScrapingDisabled bool IsScrapingDisabled bool
API scraper.API
} }
func NewApp(profile persistence.Profile) Application { func NewApp(profile persistence.Profile) Application {
@ -44,6 +45,11 @@ func NewApp(profile persistence.Profile) Application {
ActiveUser: get_default_user(), ActiveUser: get_default_user(),
IsScrapingDisabled: true, // Until an active user is set IsScrapingDisabled: true, // Until an active user is set
} }
// Can ignore errors; if not authenticated, it won't be used for anything.
// GetUser and Login both create a new session.
ret.API, _ = scraper.NewGuestSession() //nolint:errcheck // see above
ret.Middlewares = []Middleware{ ret.Middlewares = []Middleware{
secureHeaders, secureHeaders,
ret.logRequest, ret.logRequest,
@ -69,7 +75,7 @@ func (app *Application) SetActiveUser(handle scraper.UserHandle) error {
if err != nil { if err != nil {
return fmt.Errorf("set active user to %q: %w", handle, err) return fmt.Errorf("set active user to %q: %w", handle, err)
} }
scraper.InitApi(app.Profile.LoadSession(handle)) app.API = app.Profile.LoadSession(handle)
app.ActiveUser = user app.ActiveUser = user
app.IsScrapingDisabled = false app.IsScrapingDisabled = false
} }

View File

@ -31,7 +31,7 @@ func (app *Application) background_scrape() {
} }
fmt.Println("Scraping home timeline...") fmt.Println("Scraping home timeline...")
trove, err := scraper.GetHomeTimeline("", is_following_only) trove, err := app.API.GetHomeTimeline("", is_following_only)
if err != nil { if err != nil {
app.ErrorLog.Printf("Background scrape failed: %s", err.Error()) app.ErrorLog.Printf("Background scrape failed: %s", err.Error())
return return
@ -66,7 +66,7 @@ func (app *Application) background_user_likes_scrape() {
} }
fmt.Println("Scraping user likes...") fmt.Println("Scraping user likes...")
trove, err := scraper.GetUserLikes(app.ActiveUser.ID, 50) // TODO: parameterizable trove, err := app.API.GetUserLikes(app.ActiveUser.ID, 50) // TODO: parameterizable
if err != nil { if err != nil {
app.ErrorLog.Printf("Background scrape failed: %s", err.Error()) app.ErrorLog.Printf("Background scrape failed: %s", err.Error())
return return
@ -105,9 +105,9 @@ func (app *Application) background_dm_polling_scrape() {
var trove scraper.TweetTrove var trove scraper.TweetTrove
var err error var err error
if inbox_cursor == "" { if inbox_cursor == "" {
trove, inbox_cursor, err = scraper.GetInbox(0) trove, inbox_cursor, err = app.API.GetInbox(0)
} else { } else {
trove, inbox_cursor, err = scraper.PollInboxUpdates(inbox_cursor) trove, inbox_cursor, err = app.API.PollInboxUpdates(inbox_cursor)
} }
if err != nil { if err != nil {
panic(err) panic(err)

View File

@ -61,10 +61,6 @@ func (api *API) GetFollowees(user_id UserID, how_many int) (TweetTrove, error) {
return api.GetPaginatedQuery(PaginatedFollowees{user_id}, how_many) return api.GetPaginatedQuery(PaginatedFollowees{user_id}, how_many)
} }
func GetFollowees(user_id UserID, how_many int) (TweetTrove, error) {
return the_api.GetFollowees(user_id, how_many)
}
func (api *API) GetFollowersPage(user_id UserID, cursor string) (APIV2Response, error) { func (api *API) GetFollowersPage(user_id UserID, cursor string) (APIV2Response, error) {
url, err := url.Parse(GraphqlURL{ url, err := url.Parse(GraphqlURL{
BaseUrl: "https://twitter.com/i/api/graphql/3_7xfjmh897x8h_n6QBqTA/Followers", BaseUrl: "https://twitter.com/i/api/graphql/3_7xfjmh897x8h_n6QBqTA/Followers",
@ -121,7 +117,3 @@ func (p PaginatedFollowers) ToTweetTrove(r APIV2Response) (TweetTrove, error) {
func (api *API) GetFollowers(user_id UserID, how_many int) (TweetTrove, error) { func (api *API) GetFollowers(user_id UserID, how_many int) (TweetTrove, error) {
return api.GetPaginatedQuery(PaginatedFollowers{user_id}, how_many) return api.GetPaginatedQuery(PaginatedFollowers{user_id}, how_many)
} }
func GetFollowers(user_id UserID, how_many int) (TweetTrove, error) {
return the_api.GetFollowers(user_id, how_many)
}

View File

@ -86,10 +86,3 @@ func (api API) UnlikeTweet(id TweetID) error {
} }
return nil return nil
} }
func LikeTweet(id TweetID) (Like, error) {
return the_api.LikeTweet(id)
}
func UnlikeTweet(id TweetID) error {
return the_api.UnlikeTweet(id)
}

View File

@ -140,7 +140,3 @@ func (api *API) FetchSpaceDetail(id SpaceID) (TweetTrove, error) {
} }
return space_response.ToTweetTrove(), nil return space_response.ToTweetTrove(), nil
} }
func FetchSpaceDetail(id SpaceID) (TweetTrove, error) {
return the_api.FetchSpaceDetail(id)
}

View File

@ -950,7 +950,7 @@ func (api *API) GetPaginatedQuery(pq PaginatedQuery, count int) (TweetTrove, err
} }
fmt.Println("------------") fmt.Println("------------")
err2 = trove.PostProcess() err2 = trove.PostProcess(api)
if err2 != nil { if err2 != nil {
return TweetTrove{}, fmt.Errorf("failed to post-process tweet trove: %w", err2) return TweetTrove{}, fmt.Errorf("failed to post-process tweet trove: %w", err2)
} }
@ -1022,10 +1022,6 @@ func (api *API) GetUserFeed(user_id UserID, min_tweets int) (trove TweetTrove, e
return api.GetPaginatedQuery(PaginatedUserFeed{user_id}, min_tweets) return api.GetPaginatedQuery(PaginatedUserFeed{user_id}, min_tweets)
} }
func GetUserFeed(user_id UserID, min_tweets int) (trove TweetTrove, err error) {
return the_api.GetUserFeed(user_id, min_tweets)
}
// Paginated Tweet Detail (conversation) // Paginated Tweet Detail (conversation)
// ------------------------------------- // -------------------------------------
@ -1116,9 +1112,6 @@ func (api *API) GetTweetFullAPIV2(id TweetID, how_many int) (TweetTrove, error)
return trove, err return trove, err
} }
func GetTweetFullAPIV2(id TweetID, how_many int) (TweetTrove, error) {
return the_api.GetTweetFullAPIV2(id, how_many)
}
// Paginated User Likes // Paginated User Likes
// -------------------- // --------------------
@ -1192,10 +1185,6 @@ func (api *API) GetUserLikes(user_id UserID, how_many int) (TweetTrove, error) {
return api.GetPaginatedQuery(PaginatedUserLikes{user_id}, how_many) return api.GetPaginatedQuery(PaginatedUserLikes{user_id}, how_many)
} }
func GetUserLikes(user_id UserID, how_many int) (TweetTrove, error) {
return the_api.GetUserLikes(user_id, how_many)
}
// Paginated Bookmarks // Paginated Bookmarks
// ------------------- // -------------------
@ -1265,10 +1254,6 @@ func (api *API) GetBookmarks(how_many int) (TweetTrove, error) {
return api.GetPaginatedQuery(PaginatedBookmarks{api.UserID}, how_many) return api.GetPaginatedQuery(PaginatedBookmarks{api.UserID}, how_many)
} }
func GetBookmarks(how_many int) (TweetTrove, error) {
return the_api.GetBookmarks(how_many)
}
// Paginated Home Timeline // Paginated Home Timeline
// ----------------------- // -----------------------
@ -1331,10 +1316,6 @@ func (api *API) GetHomeTimeline(cursor string, is_following_only bool) (TweetTro
return trove, err return trove, err
} }
func GetHomeTimeline(cursor string, is_following_only bool) (TweetTrove, error) {
return the_api.GetHomeTimeline(cursor, is_following_only)
}
// Get User // Get User
// -------- // --------
@ -1447,6 +1428,3 @@ func (p PaginatedSearch) ToTweetTrove(r APIV2Response) (TweetTrove, error) {
func (api *API) Search(query string, min_results int) (trove TweetTrove, err error) { func (api *API) Search(query string, min_results int) (trove TweetTrove, err error) {
return api.GetPaginatedQuery(PaginatedSearch{query}, min_results) return api.GetPaginatedQuery(PaginatedSearch{query}, min_results)
} }
func Search(query string, min_results int) (trove TweetTrove, err error) {
return the_api.Search(query, min_results)
}

View File

@ -37,9 +37,6 @@ func (api *API) GetInbox(how_many int) (TweetTrove, string, error) {
return trove, cursor, nil return trove, cursor, nil
} }
func GetInbox(how_many int) (TweetTrove, string, error) {
return the_api.GetInbox(how_many)
}
func (api *API) GetConversation(id DMChatRoomID, max_id DMMessageID, how_many int) (TweetTrove, error) { func (api *API) GetConversation(id DMChatRoomID, max_id DMMessageID, how_many int) (TweetTrove, error) {
if !api.IsAuthenticated { if !api.IsAuthenticated {
@ -64,22 +61,3 @@ func (api *API) GetConversation(id DMChatRoomID, max_id DMMessageID, how_many in
return trove, nil return trove, nil
} }
func GetConversation(id DMChatRoomID, max_id DMMessageID, how_many int) (TweetTrove, error) {
return the_api.GetConversation(id, max_id, how_many)
}
func PollInboxUpdates(cursor string) (TweetTrove, string, error) {
return the_api.PollInboxUpdates(cursor)
}
func SendDMMessage(room_id DMChatRoomID, text string, in_reply_to_id DMMessageID) (TweetTrove, error) {
return the_api.SendDMMessage(room_id, text, in_reply_to_id)
}
func SendDMReaction(room_id DMChatRoomID, message_id DMMessageID, reacc string) error {
return the_api.SendDMReaction(room_id, message_id, reacc)
}
func MarkDMChatRead(room_id DMChatRoomID, read_message_id DMMessageID) error {
return the_api.MarkDMChatRead(room_id, read_message_id)
}

View File

@ -268,6 +268,3 @@ func (api *API) GetTweet(id TweetID) (Tweet, error) {
tweet.IsConversationScraped = true tweet.IsConversationScraped = true
return tweet, nil return tweet, nil
} }
func GetTweet(id TweetID) (Tweet, error) {
return the_api.GetTweet(id)
}

View File

@ -154,11 +154,11 @@ func (trove *TweetTrove) FillMissingUserIDs() {
} }
} }
func (trove *TweetTrove) FillSpaceDetails() error { func (trove *TweetTrove) FillSpaceDetails(api *API) error {
fmt.Println("Filling space details") fmt.Println("Filling space details")
for i := range trove.Spaces { for i := range trove.Spaces {
fmt.Printf("Getting space: %q\n", trove.Spaces[i].ID) fmt.Printf("Getting space: %q\n", trove.Spaces[i].ID)
new_trove, err := FetchSpaceDetail(trove.Spaces[i].ID) new_trove, err := api.FetchSpaceDetail(trove.Spaces[i].ID)
if err != nil { if err != nil {
return err return err
} }
@ -177,10 +177,10 @@ func (trove *TweetTrove) FillSpaceDetails() error {
return nil return nil
} }
func (trove *TweetTrove) PostProcess() error { func (trove *TweetTrove) PostProcess(api *API) error {
trove.FetchTombstoneUsers() trove.FetchTombstoneUsers()
trove.FillMissingUserIDs() trove.FillMissingUserIDs()
err := trove.FillSpaceDetails() err := trove.FillSpaceDetails(api)
if err != nil { if err != nil {
return err return err
} }