From 08e5a726a8e92f3dc04cbc032416902b8317e760 Mon Sep 17 00:00:00 2001 From: Alessio Date: Mon, 19 Aug 2024 16:45:32 -0700 Subject: [PATCH] Remove most uses of the singleton pattern in 'scraper' package --- cmd/twitter/main.go | 41 ++++++++++++---------- internal/webserver/handler_bookmarks.go | 2 +- internal/webserver/handler_login.go | 4 +-- internal/webserver/handler_messages.go | 8 ++--- internal/webserver/handler_search.go | 2 +- internal/webserver/handler_tweet_detail.go | 6 ++-- internal/webserver/handler_user_feed.go | 8 ++--- internal/webserver/server.go | 8 ++++- internal/webserver/stopwatch.go | 8 ++--- pkg/scraper/api_types_lists.go | 8 ----- pkg/scraper/api_types_posting.go | 7 ---- pkg/scraper/api_types_spaces.go | 4 --- pkg/scraper/api_types_v2.go | 24 +------------ pkg/scraper/dm_trove.go | 22 ------------ pkg/scraper/tweet.go | 3 -- pkg/scraper/tweet_trove.go | 8 ++--- 16 files changed, 53 insertions(+), 110 deletions(-) diff --git a/cmd/twitter/main.go b/cmd/twitter/main.go index edbd51c..1bf4847 100644 --- a/cmd/twitter/main.go +++ b/cmd/twitter/main.go @@ -23,6 +23,8 @@ var profile persistence.Profile var version_string string +var api scraper.API + func main() { profile_dir := flag.String("profile", ".", "") flag.StringVar(profile_dir, "p", ".", "") @@ -126,13 +128,15 @@ func main() { // Lop off the ".session" suffix (allows using `--session asdf.session` which lets you tab-autocomplete at command line) *session_name = (*session_name)[:len(*session_name)-8] } - scraper.InitApi(profile.LoadSession(scraper.UserHandle(*session_name))) + api = profile.LoadSession(scraper.UserHandle(*session_name)) + scraper.InitApi(api) } else { - session, err := scraper.NewGuestSession() + var err error + api, err = scraper.NewGuestSession() if err != nil { log.Warnf("Unable to initialize guest session! Might be a network issue") } else { - scraper.InitApi(session) + scraper.InitApi(api) } } @@ -234,7 +238,6 @@ func main() { // - username: twitter username or email address // - password: twitter account password func login(username string, password string) { - // Skip the scraper.InitApi, just use a local one since no scraping is happening api, err := scraper.NewGuestSession() if err != nil { die(fmt.Sprintf("Unable to create session: %s", err.Error()), false, 1) @@ -303,7 +306,7 @@ func fetch_tweet_only(tweet_identifier string) { die(err.Error(), false, -1) } - tweet, err := scraper.GetTweet(tweet_id) + tweet, err := api.GetTweet(tweet_id) if is_scrape_failure(err) { die(fmt.Sprintf("Error fetching tweet: %s", err.Error()), false, -1) } @@ -328,7 +331,7 @@ func fetch_tweet_conversation(tweet_identifier string, how_many int) { die(err.Error(), false, -1) } - trove, err := scraper.GetTweetFullAPIV2(tweet_id, how_many) + trove, err := api.GetTweetFullAPIV2(tweet_id, how_many) if is_scrape_failure(err) { die(err.Error(), false, -1) } @@ -349,7 +352,7 @@ func fetch_user_feed(handle string, how_many int) { die(fmt.Sprintf("Error getting user: %s\n %s", handle, err.Error()), false, -1) } - trove, err := scraper.GetUserFeed(user.ID, how_many) + trove, err := api.GetUserFeed(user.ID, how_many) if is_scrape_failure(err) { die(fmt.Sprintf("Error scraping feed: %s\n %s", handle, err.Error()), false, -2) } @@ -367,7 +370,7 @@ func get_user_likes(handle string, how_many int) { die(fmt.Sprintf("Error getting user: %s\n %s", handle, err.Error()), false, -1) } - trove, err := scraper.GetUserLikes(user.ID, how_many) + trove, err := api.GetUserLikes(user.ID, how_many) if is_scrape_failure(err) { die(fmt.Sprintf("Error scraping feed: %s\n %s", handle, err.Error()), false, -2) } @@ -385,7 +388,7 @@ func get_followees(handle string, how_many int) { die(fmt.Sprintf("Error getting user: %s\n %s", handle, err.Error()), false, -1) } - trove, err := scraper.GetFollowees(user.ID, how_many) + trove, err := api.GetFollowees(user.ID, how_many) if is_scrape_failure(err) { die(fmt.Sprintf("Error getting followees: %s\n %s", handle, err.Error()), false, -2) } @@ -399,7 +402,7 @@ func get_followers(handle string, how_many int) { if err != nil { die(fmt.Sprintf("Error getting user: %s\n %s", handle, err.Error()), false, -1) } - trove, err := scraper.GetFollowers(user.ID, how_many) + trove, err := api.GetFollowers(user.ID, how_many) if is_scrape_failure(err) { die(fmt.Sprintf("Error getting followees: %s\n %s", handle, err.Error()), false, -2) } @@ -409,7 +412,7 @@ func get_followers(handle string, how_many int) { happy_exit(fmt.Sprintf("Saved %d followers", len(trove.Users)), err) } func get_bookmarks(how_many int) { - trove, err := scraper.GetBookmarks(how_many) + trove, err := api.GetBookmarks(how_many) if is_scrape_failure(err) { die(fmt.Sprintf("Error scraping bookmarks:\n %s", err.Error()), false, -2) } @@ -422,7 +425,7 @@ func get_bookmarks(how_many int) { ) } func fetch_timeline(is_following_only bool) { - trove, err := scraper.GetHomeTimeline("", is_following_only) + trove, err := api.GetHomeTimeline("", is_following_only) if is_scrape_failure(err) { die(fmt.Sprintf("Error fetching timeline:\n %s", err.Error()), false, -2) } @@ -462,7 +465,7 @@ func download_user_content(handle scraper.UserHandle) { } func search(query string, how_many int) { - trove, err := scraper.Search(query, how_many) + trove, err := api.Search(query, how_many) if is_scrape_failure(err) { die(fmt.Sprintf("Error scraping search results: %s", err.Error()), false, -100) } @@ -490,7 +493,7 @@ func unlike_tweet(tweet_identifier string) { if err != nil { die(err.Error(), false, -1) } - err = scraper.UnlikeTweet(tweet_id) + err = api.UnlikeTweet(tweet_id) if err != nil { die(err.Error(), false, -10) } @@ -502,7 +505,7 @@ func like_tweet(tweet_identifier string) { if err != nil { die(err.Error(), false, -1) } - like, err := scraper.LikeTweet(tweet_id) + like, err := api.LikeTweet(tweet_id) if err != nil { die(err.Error(), false, -10) } @@ -525,7 +528,7 @@ func start_webserver(addr string, should_auto_open bool) { } func fetch_inbox(how_many int) { - trove, _, err := scraper.GetInbox(how_many) + trove, _, err := api.GetInbox(how_many) if err != nil { die(fmt.Sprintf("Failed to fetch inbox:\n %s", err.Error()), false, 1) } @@ -539,7 +542,7 @@ func fetch_dm(id string, how_many int) { panic(err) } max_id := scraper.DMMessageID(^uint(0) >> 1) - trove, err := scraper.GetConversation(room.ID, max_id, how_many) + trove, err := api.GetConversation(room.ID, max_id, how_many) if err != nil { die(fmt.Sprintf("Failed to fetch dm:\n %s", err.Error()), false, 1) } @@ -556,7 +559,7 @@ func send_dm(room_id string, text string, in_reply_to_id int) { die(fmt.Sprintf("No such chat room: %d", in_reply_to_id), false, 1) } - trove, err := scraper.SendDMMessage(room.ID, text, scraper.DMMessageID(in_reply_to_id)) + trove, err := api.SendDMMessage(room.ID, text, scraper.DMMessageID(in_reply_to_id)) if err != nil { die(fmt.Sprintf("Failed to send dm:\n %s", err.Error()), false, 1) } @@ -573,7 +576,7 @@ func send_dm_reacc(room_id string, in_reply_to_id int, reacc string) { if err != nil { die(fmt.Sprintf("No such message: %d", in_reply_to_id), false, 1) } - err = scraper.SendDMReaction(room.ID, scraper.DMMessageID(in_reply_to_id), reacc) + err = api.SendDMReaction(room.ID, scraper.DMMessageID(in_reply_to_id), reacc) if err != nil { die(fmt.Sprintf("Failed to react to message:\n %s", err.Error()), false, 1) } diff --git a/internal/webserver/handler_bookmarks.go b/internal/webserver/handler_bookmarks.go index 9fce875..51c263a 100644 --- a/internal/webserver/handler_bookmarks.go +++ b/internal/webserver/handler_bookmarks.go @@ -20,7 +20,7 @@ func (app *Application) Bookmarks(w http.ResponseWriter, r *http.Request) { } // Run scraper - trove, err := scraper.GetBookmarks(300) // TODO: parameterizable + trove, err := app.API.GetBookmarks(300) // TODO: parameterizable if err != nil && !errors.Is(err, scraper.END_OF_FEED) { app.ErrorLog.Print(err) panic(err) // Return a toast diff --git a/internal/webserver/handler_login.go b/internal/webserver/handler_login.go index d1e47d0..c260f5c 100644 --- a/internal/webserver/handler_login.go +++ b/internal/webserver/handler_login.go @@ -82,7 +82,7 @@ func (app *Application) after_login(w http.ResponseWriter, r *http.Request, api panic_if(err) // Scrape the user's feed - trove, err := scraper.GetHomeTimeline("", true) + trove, err := app.API.GetHomeTimeline("", true) if err != nil { app.ErrorLog.Printf("Initial timeline scrape failed: %s", err.Error()) http.Redirect(w, r, "/", 303) @@ -92,7 +92,7 @@ func (app *Application) after_login(w http.ResponseWriter, r *http.Request, api go app.Profile.SaveTweetTrove(trove, true) // Scrape the user's followers - trove, err = scraper.GetFollowees(user.ID, 1000) + trove, err = app.API.GetFollowees(user.ID, 1000) if err != nil { app.ErrorLog.Printf("Failed to scrape followers: %s", err.Error()) http.Redirect(w, r, "/", 303) diff --git a/internal/webserver/handler_messages.go b/internal/webserver/handler_messages.go index 63acdfe..64890f6 100644 --- a/internal/webserver/handler_messages.go +++ b/internal/webserver/handler_messages.go @@ -33,7 +33,7 @@ func (app *Application) message_mark_as_read(w http.ResponseWriter, r *http.Requ c.PageSize = 1 chat_contents := app.Profile.GetChatRoomMessagesByCursor(c) last_message_id := chat_contents.MessageIDs[len(chat_contents.MessageIDs)-1] - panic_if(scraper.MarkDMChatRead(room_id, last_message_id)) + panic_if(app.API.MarkDMChatRead(room_id, last_message_id)) room := chat_contents.Rooms[room_id] participant, is_ok := room.Participants[app.ActiveUser.ID] if !is_ok { @@ -66,7 +66,7 @@ func (app *Application) message_send(w http.ResponseWriter, r *http.Request) { in_reply_to_id = 0 } - trove, err := scraper.SendDMMessage(room_id, message_data.Text, scraper.DMMessageID(in_reply_to_id)) + trove, err := app.API.SendDMMessage(room_id, message_data.Text, scraper.DMMessageID(in_reply_to_id)) if err != nil { panic(err) } @@ -96,7 +96,7 @@ func (app *Application) message_detail(w http.ResponseWriter, r *http.Request) { data_, err := io.ReadAll(r.Body) panic_if(err) panic_if(json.Unmarshal(data_, &data)) - panic_if(scraper.SendDMReaction(room_id, data.MessageID, data.Reacc)) + panic_if(app.API.SendDMReaction(room_id, data.MessageID, data.Reacc)) dm_message := global_data.Messages[data.MessageID] dm_message.Reactions[app.ActiveUser.ID] = scraper.DMReaction{ @@ -118,7 +118,7 @@ func (app *Application) message_detail(w http.ResponseWriter, r *http.Request) { if r.URL.Query().Has("scrape") && !app.IsScrapingDisabled { max_id := scraper.DMMessageID(^uint(0) >> 1) - trove, err := scraper.GetConversation(room_id, max_id, 50) // TODO: parameterizable + trove, err := app.API.GetConversation(room_id, max_id, 50) // TODO: parameterizable if err != nil { panic(err) } diff --git a/internal/webserver/handler_search.go b/internal/webserver/handler_search.go index 179a0bc..b167621 100644 --- a/internal/webserver/handler_search.go +++ b/internal/webserver/handler_search.go @@ -103,7 +103,7 @@ func (app *Application) Search(w http.ResponseWriter, r *http.Request) { } // Run scraper - trove, err := scraper.Search(search_text, 1) // TODO: parameterizable + trove, err := app.API.Search(search_text, 1) // TODO: parameterizable if err != nil && !errors.Is(err, scraper.END_OF_FEED) { app.ErrorLog.Print(err) // TOOD: show error in UI diff --git a/internal/webserver/handler_tweet_detail.go b/internal/webserver/handler_tweet_detail.go index 6f61b01..1d739b0 100644 --- a/internal/webserver/handler_tweet_detail.go +++ b/internal/webserver/handler_tweet_detail.go @@ -49,7 +49,7 @@ func (app *Application) ensure_tweet(id scraper.TweetID, is_forced bool, is_conv } if is_needing_scrape && !app.IsScrapingDisabled { - trove, err := scraper.GetTweetFullAPIV2(id, 50) // TODO: parameterizable + trove, err := app.API.GetTweetFullAPIV2(id, 50) // TODO: parameterizable // Save the trove unless there was an unrecoverable error if err == nil || errors.Is(err, scraper.END_OF_FEED) || errors.Is(err, scraper.ErrRateLimited) { @@ -73,7 +73,7 @@ func (app *Application) ensure_tweet(id scraper.TweetID, is_forced bool, is_conv func (app *Application) LikeTweet(w http.ResponseWriter, r *http.Request) { tweet := get_tweet_from_context(r.Context()) - like, err := scraper.LikeTweet(tweet.ID) + like, err := app.API.LikeTweet(tweet.ID) // "Already Liked This Tweet" is no big deal-- we can just update the UI as if it succeeded if err != nil && !errors.Is(err, scraper.AlreadyLikedThisTweet) { // It's a different error @@ -87,7 +87,7 @@ func (app *Application) LikeTweet(w http.ResponseWriter, r *http.Request) { } func (app *Application) UnlikeTweet(w http.ResponseWriter, r *http.Request) { tweet := get_tweet_from_context(r.Context()) - err := scraper.UnlikeTweet(tweet.ID) + err := app.API.UnlikeTweet(tweet.ID) // As above, "Haven't Liked This Tweet" is no big deal-- we can just update the UI as if the request succeeded if err != nil && !errors.Is(err, scraper.HaventLikedThisTweet) { // It's a different error diff --git a/internal/webserver/handler_user_feed.go b/internal/webserver/handler_user_feed.go index 5540f14..3c85aa3 100644 --- a/internal/webserver/handler_user_feed.go +++ b/internal/webserver/handler_user_feed.go @@ -46,7 +46,7 @@ func (app *Application) UserFeed(w http.ResponseWriter, r *http.Request) { if len(parts) == 1 { // The URL is just the user handle // Run scraper - trove, err := scraper.GetUserFeed(user.ID, 50) // TODO: parameterizable + trove, err := app.API.GetUserFeed(user.ID, 50) // TODO: parameterizable if err != nil { app.ErrorLog.Print(err) // TOOD: show error in UI @@ -54,7 +54,7 @@ func (app *Application) UserFeed(w http.ResponseWriter, r *http.Request) { app.Profile.SaveTweetTrove(trove, false) go app.Profile.SaveTweetTrove(trove, true) } else if len(parts) == 2 && parts[1] == "likes" { - trove, err := scraper.GetUserLikes(user.ID, 50) // TODO: parameterizable + trove, err := app.API.GetUserLikes(user.ID, 50) // TODO: parameterizable if err != nil { app.ErrorLog.Print(err) // TOOD: show error in UI @@ -158,7 +158,7 @@ func (app *Application) UserFollowees(w http.ResponseWriter, r *http.Request, us } // Run scraper - trove, err := scraper.GetFollowees(user.ID, 200) // TODO: parameterizable + trove, err := app.API.GetFollowees(user.ID, 200) // TODO: parameterizable if err != nil { app.ErrorLog.Print(err) // TOOD: show error in UI @@ -184,7 +184,7 @@ func (app *Application) UserFollowers(w http.ResponseWriter, r *http.Request, us } // Run scraper - trove, err := scraper.GetFollowers(user.ID, 200) // TODO: parameterizable + trove, err := app.API.GetFollowers(user.ID, 200) // TODO: parameterizable if err != nil { app.ErrorLog.Print(err) // TOOD: show error in UI diff --git a/internal/webserver/server.go b/internal/webserver/server.go index 09fe4c9..9e3a5d2 100644 --- a/internal/webserver/server.go +++ b/internal/webserver/server.go @@ -31,6 +31,7 @@ type Application struct { Profile persistence.Profile ActiveUser scraper.User IsScrapingDisabled bool + API scraper.API } func NewApp(profile persistence.Profile) Application { @@ -44,6 +45,11 @@ func NewApp(profile persistence.Profile) Application { ActiveUser: get_default_user(), IsScrapingDisabled: true, // Until an active user is set } + + // Can ignore errors; if not authenticated, it won't be used for anything. + // GetUser and Login both create a new session. + ret.API, _ = scraper.NewGuestSession() //nolint:errcheck // see above + ret.Middlewares = []Middleware{ secureHeaders, ret.logRequest, @@ -69,7 +75,7 @@ func (app *Application) SetActiveUser(handle scraper.UserHandle) error { if err != nil { return fmt.Errorf("set active user to %q: %w", handle, err) } - scraper.InitApi(app.Profile.LoadSession(handle)) + app.API = app.Profile.LoadSession(handle) app.ActiveUser = user app.IsScrapingDisabled = false } diff --git a/internal/webserver/stopwatch.go b/internal/webserver/stopwatch.go index bf2653f..d0ea02a 100644 --- a/internal/webserver/stopwatch.go +++ b/internal/webserver/stopwatch.go @@ -31,7 +31,7 @@ func (app *Application) background_scrape() { } fmt.Println("Scraping home timeline...") - trove, err := scraper.GetHomeTimeline("", is_following_only) + trove, err := app.API.GetHomeTimeline("", is_following_only) if err != nil { app.ErrorLog.Printf("Background scrape failed: %s", err.Error()) return @@ -66,7 +66,7 @@ func (app *Application) background_user_likes_scrape() { } fmt.Println("Scraping user likes...") - trove, err := scraper.GetUserLikes(app.ActiveUser.ID, 50) // TODO: parameterizable + trove, err := app.API.GetUserLikes(app.ActiveUser.ID, 50) // TODO: parameterizable if err != nil { app.ErrorLog.Printf("Background scrape failed: %s", err.Error()) return @@ -105,9 +105,9 @@ func (app *Application) background_dm_polling_scrape() { var trove scraper.TweetTrove var err error if inbox_cursor == "" { - trove, inbox_cursor, err = scraper.GetInbox(0) + trove, inbox_cursor, err = app.API.GetInbox(0) } else { - trove, inbox_cursor, err = scraper.PollInboxUpdates(inbox_cursor) + trove, inbox_cursor, err = app.API.PollInboxUpdates(inbox_cursor) } if err != nil { panic(err) diff --git a/pkg/scraper/api_types_lists.go b/pkg/scraper/api_types_lists.go index 1cbcf93..dc19fb5 100644 --- a/pkg/scraper/api_types_lists.go +++ b/pkg/scraper/api_types_lists.go @@ -61,10 +61,6 @@ func (api *API) GetFollowees(user_id UserID, how_many int) (TweetTrove, error) { return api.GetPaginatedQuery(PaginatedFollowees{user_id}, how_many) } -func GetFollowees(user_id UserID, how_many int) (TweetTrove, error) { - return the_api.GetFollowees(user_id, how_many) -} - func (api *API) GetFollowersPage(user_id UserID, cursor string) (APIV2Response, error) { url, err := url.Parse(GraphqlURL{ BaseUrl: "https://twitter.com/i/api/graphql/3_7xfjmh897x8h_n6QBqTA/Followers", @@ -121,7 +117,3 @@ func (p PaginatedFollowers) ToTweetTrove(r APIV2Response) (TweetTrove, error) { func (api *API) GetFollowers(user_id UserID, how_many int) (TweetTrove, error) { return api.GetPaginatedQuery(PaginatedFollowers{user_id}, how_many) } - -func GetFollowers(user_id UserID, how_many int) (TweetTrove, error) { - return the_api.GetFollowers(user_id, how_many) -} diff --git a/pkg/scraper/api_types_posting.go b/pkg/scraper/api_types_posting.go index d733f66..ac2b1c0 100644 --- a/pkg/scraper/api_types_posting.go +++ b/pkg/scraper/api_types_posting.go @@ -86,10 +86,3 @@ func (api API) UnlikeTweet(id TweetID) error { } return nil } - -func LikeTweet(id TweetID) (Like, error) { - return the_api.LikeTweet(id) -} -func UnlikeTweet(id TweetID) error { - return the_api.UnlikeTweet(id) -} diff --git a/pkg/scraper/api_types_spaces.go b/pkg/scraper/api_types_spaces.go index 10fbcf6..6d55a05 100644 --- a/pkg/scraper/api_types_spaces.go +++ b/pkg/scraper/api_types_spaces.go @@ -140,7 +140,3 @@ func (api *API) FetchSpaceDetail(id SpaceID) (TweetTrove, error) { } return space_response.ToTweetTrove(), nil } - -func FetchSpaceDetail(id SpaceID) (TweetTrove, error) { - return the_api.FetchSpaceDetail(id) -} diff --git a/pkg/scraper/api_types_v2.go b/pkg/scraper/api_types_v2.go index 0cd13f1..d0dc6a1 100644 --- a/pkg/scraper/api_types_v2.go +++ b/pkg/scraper/api_types_v2.go @@ -950,7 +950,7 @@ func (api *API) GetPaginatedQuery(pq PaginatedQuery, count int) (TweetTrove, err } fmt.Println("------------") - err2 = trove.PostProcess() + err2 = trove.PostProcess(api) if err2 != nil { return TweetTrove{}, fmt.Errorf("failed to post-process tweet trove: %w", err2) } @@ -1022,10 +1022,6 @@ func (api *API) GetUserFeed(user_id UserID, min_tweets int) (trove TweetTrove, e return api.GetPaginatedQuery(PaginatedUserFeed{user_id}, min_tweets) } -func GetUserFeed(user_id UserID, min_tweets int) (trove TweetTrove, err error) { - return the_api.GetUserFeed(user_id, min_tweets) -} - // Paginated Tweet Detail (conversation) // ------------------------------------- @@ -1116,9 +1112,6 @@ func (api *API) GetTweetFullAPIV2(id TweetID, how_many int) (TweetTrove, error) return trove, err } -func GetTweetFullAPIV2(id TweetID, how_many int) (TweetTrove, error) { - return the_api.GetTweetFullAPIV2(id, how_many) -} // Paginated User Likes // -------------------- @@ -1192,10 +1185,6 @@ func (api *API) GetUserLikes(user_id UserID, how_many int) (TweetTrove, error) { return api.GetPaginatedQuery(PaginatedUserLikes{user_id}, how_many) } -func GetUserLikes(user_id UserID, how_many int) (TweetTrove, error) { - return the_api.GetUserLikes(user_id, how_many) -} - // Paginated Bookmarks // ------------------- @@ -1265,10 +1254,6 @@ func (api *API) GetBookmarks(how_many int) (TweetTrove, error) { return api.GetPaginatedQuery(PaginatedBookmarks{api.UserID}, how_many) } -func GetBookmarks(how_many int) (TweetTrove, error) { - return the_api.GetBookmarks(how_many) -} - // Paginated Home Timeline // ----------------------- @@ -1331,10 +1316,6 @@ func (api *API) GetHomeTimeline(cursor string, is_following_only bool) (TweetTro return trove, err } -func GetHomeTimeline(cursor string, is_following_only bool) (TweetTrove, error) { - return the_api.GetHomeTimeline(cursor, is_following_only) -} - // Get User // -------- @@ -1447,6 +1428,3 @@ func (p PaginatedSearch) ToTweetTrove(r APIV2Response) (TweetTrove, error) { func (api *API) Search(query string, min_results int) (trove TweetTrove, err error) { return api.GetPaginatedQuery(PaginatedSearch{query}, min_results) } -func Search(query string, min_results int) (trove TweetTrove, err error) { - return the_api.Search(query, min_results) -} diff --git a/pkg/scraper/dm_trove.go b/pkg/scraper/dm_trove.go index 2777903..5aaa8e9 100644 --- a/pkg/scraper/dm_trove.go +++ b/pkg/scraper/dm_trove.go @@ -37,9 +37,6 @@ func (api *API) GetInbox(how_many int) (TweetTrove, string, error) { return trove, cursor, nil } -func GetInbox(how_many int) (TweetTrove, string, error) { - return the_api.GetInbox(how_many) -} func (api *API) GetConversation(id DMChatRoomID, max_id DMMessageID, how_many int) (TweetTrove, error) { if !api.IsAuthenticated { @@ -64,22 +61,3 @@ func (api *API) GetConversation(id DMChatRoomID, max_id DMMessageID, how_many in return trove, nil } -func GetConversation(id DMChatRoomID, max_id DMMessageID, how_many int) (TweetTrove, error) { - return the_api.GetConversation(id, max_id, how_many) -} - -func PollInboxUpdates(cursor string) (TweetTrove, string, error) { - return the_api.PollInboxUpdates(cursor) -} - -func SendDMMessage(room_id DMChatRoomID, text string, in_reply_to_id DMMessageID) (TweetTrove, error) { - return the_api.SendDMMessage(room_id, text, in_reply_to_id) -} - -func SendDMReaction(room_id DMChatRoomID, message_id DMMessageID, reacc string) error { - return the_api.SendDMReaction(room_id, message_id, reacc) -} - -func MarkDMChatRead(room_id DMChatRoomID, read_message_id DMMessageID) error { - return the_api.MarkDMChatRead(room_id, read_message_id) -} diff --git a/pkg/scraper/tweet.go b/pkg/scraper/tweet.go index 33d6fdf..81f99c9 100644 --- a/pkg/scraper/tweet.go +++ b/pkg/scraper/tweet.go @@ -268,6 +268,3 @@ func (api *API) GetTweet(id TweetID) (Tweet, error) { tweet.IsConversationScraped = true return tweet, nil } -func GetTweet(id TweetID) (Tweet, error) { - return the_api.GetTweet(id) -} diff --git a/pkg/scraper/tweet_trove.go b/pkg/scraper/tweet_trove.go index 9783906..2ba0757 100644 --- a/pkg/scraper/tweet_trove.go +++ b/pkg/scraper/tweet_trove.go @@ -154,11 +154,11 @@ func (trove *TweetTrove) FillMissingUserIDs() { } } -func (trove *TweetTrove) FillSpaceDetails() error { +func (trove *TweetTrove) FillSpaceDetails(api *API) error { fmt.Println("Filling space details") for i := range trove.Spaces { fmt.Printf("Getting space: %q\n", trove.Spaces[i].ID) - new_trove, err := FetchSpaceDetail(trove.Spaces[i].ID) + new_trove, err := api.FetchSpaceDetail(trove.Spaces[i].ID) if err != nil { return err } @@ -177,10 +177,10 @@ func (trove *TweetTrove) FillSpaceDetails() error { return nil } -func (trove *TweetTrove) PostProcess() error { +func (trove *TweetTrove) PostProcess(api *API) error { trove.FetchTombstoneUsers() trove.FillMissingUserIDs() - err := trove.FillSpaceDetails() + err := trove.FillSpaceDetails(api) if err != nil { return err }