Move common type definitions (Tweet, User, etc) from 'scraper' package to 'persistence'

This commit is contained in:
Alessio 2025-02-14 15:54:36 -08:00
parent 4abbb93c63
commit 041af0f91d
90 changed files with 281 additions and 285 deletions

View File

@ -9,6 +9,7 @@ import (
"runtime" "runtime"
"strconv" "strconv"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
"gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper" "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
"gitlab.com/offline-twitter/twitter_offline_engine/pkg/terminal_utils" "gitlab.com/offline-twitter/twitter_offline_engine/pkg/terminal_utils"
) )
@ -55,14 +56,14 @@ func happy_exit(text string, exit_err error) {
* *
* returns: the id at the end of the tweet: e.g., 1395882872729477131 * returns: the id at the end of the tweet: e.g., 1395882872729477131
*/ */
func extract_id_from(url string) (scraper.TweetID, error) { func extract_id_from(url string) (TweetID, error) {
_, id, is_ok := scraper.TryParseTweetUrl(url) _, id, is_ok := scraper.TryParseTweetUrl(url)
if is_ok { if is_ok {
return id, nil return id, nil
} }
num, err := strconv.Atoi(url) num, err := strconv.Atoi(url)
return scraper.TweetID(num), err return TweetID(num), err
} }
// Get a sensible default path to create a default profile. Uses `XDG_DATA_HOME` if available // Get a sensible default path to create a default profile. Uses `XDG_DATA_HOME` if available
@ -98,7 +99,7 @@ func is_scrape_failure(err error) bool {
} }
// DUPE: full_save_tweet_trove // DUPE: full_save_tweet_trove
func full_save_tweet_trove(trove scraper.TweetTrove) { func full_save_tweet_trove(trove TweetTrove) {
conflicting_users := profile.SaveTweetTrove(trove, true, api.DownloadMedia) conflicting_users := profile.SaveTweetTrove(trove, true, api.DownloadMedia)
for _, u_id := range conflicting_users { for _, u_id := range conflicting_users {
fmt.Printf(terminal_utils.COLOR_YELLOW+ fmt.Printf(terminal_utils.COLOR_YELLOW+
@ -110,7 +111,7 @@ func full_save_tweet_trove(trove scraper.TweetTrove) {
if errors.Is(err, scraper.ErrDoesntExist) { if errors.Is(err, scraper.ErrDoesntExist) {
// Mark them as deleted. // Mark them as deleted.
// Handle and display name won't be updated if the user exists. // Handle and display name won't be updated if the user exists.
updated_user = scraper.User{ID: u_id, DisplayName: "<Unknown User>", Handle: "<UNKNOWN USER>", IsDeleted: true} updated_user = User{ID: u_id, DisplayName: "<Unknown User>", Handle: "<UNKNOWN USER>", IsDeleted: true}
} else if err != nil { } else if err != nil {
panic(fmt.Errorf("error scraping conflicting user (ID %d): %w", u_id, err)) panic(fmt.Errorf("error scraping conflicting user (ID %d): %w", u_id, err))
} }

View File

@ -15,12 +15,12 @@ import (
"time" "time"
"gitlab.com/offline-twitter/twitter_offline_engine/internal/webserver" "gitlab.com/offline-twitter/twitter_offline_engine/internal/webserver"
"gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence" . "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
"gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper" "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )
// Global variable referencing the open data profile // Global variable referencing the open data profile
var profile persistence.Profile var profile Profile
var version_string string var version_string string
@ -119,7 +119,7 @@ func main() {
} }
// Path exists and is a directory; safe to continue // Path exists and is a directory; safe to continue
} }
profile, err = persistence.LoadProfile(*profile_dir) profile, err = LoadProfile(*profile_dir)
if err != nil { if err != nil {
if *use_default_profile { if *use_default_profile {
create_profile(*profile_dir) create_profile(*profile_dir)
@ -133,7 +133,7 @@ func main() {
// Lop off the ".session" suffix (allows using `--session asdf.session` which lets you tab-autocomplete at command line) // Lop off the ".session" suffix (allows using `--session asdf.session` which lets you tab-autocomplete at command line)
*session_name = (*session_name)[:len(*session_name)-8] *session_name = (*session_name)[:len(*session_name)-8]
} }
profile.LoadSession(scraper.UserHandle(*session_name), &api) profile.LoadSession(UserHandle(*session_name), &api)
} else { } else {
var err error var err error
api, err = scraper.NewGuestSession() api, err = scraper.NewGuestSession()
@ -162,15 +162,15 @@ func main() {
} }
login(target, password) login(target, password)
case "fetch_user": case "fetch_user":
fetch_user(scraper.UserHandle(target)) fetch_user(UserHandle(target))
case "fetch_user_by_id": case "fetch_user_by_id":
id, err := strconv.Atoi(target) id, err := strconv.Atoi(target)
if err != nil { if err != nil {
panic(err) panic(err)
} }
fetch_user_by_id(scraper.UserID(id)) fetch_user_by_id(UserID(id))
case "download_user_content": case "download_user_content":
download_user_content(scraper.UserHandle(target)) download_user_content(UserHandle(target))
case "fetch_tweet_only": case "fetch_tweet_only":
fetch_tweet_only(target) fetch_tweet_only(target)
case "fetch_tweet": case "fetch_tweet":
@ -280,25 +280,25 @@ func login(username string, password string) {
* - target_dir: the location of the new data dir. * - target_dir: the location of the new data dir.
*/ */
func create_profile(target_dir string) { func create_profile(target_dir string) {
_, err := persistence.NewProfile(target_dir) _, err := NewProfile(target_dir)
if err != nil { if err != nil {
panic(err) panic(err)
} }
} }
func _fetch_user_by_id(id scraper.UserID) error { func _fetch_user_by_id(id UserID) error {
user, err := scraper.GetUserByID(id) user, err := scraper.GetUserByID(id)
if errors.Is(err, scraper.ErrDoesntExist) { if errors.Is(err, scraper.ErrDoesntExist) {
// Mark them as deleted. // Mark them as deleted.
// Handle and display name won't be updated if the user exists. // Handle and display name won't be updated if the user exists.
user = scraper.User{ID: id, DisplayName: "<Unknown User>", Handle: "<UNKNOWN USER>", IsDeleted: true} user = User{ID: id, DisplayName: "<Unknown User>", Handle: "<UNKNOWN USER>", IsDeleted: true}
} else if err != nil { } else if err != nil {
return fmt.Errorf("scraping error on user ID %d: %w", id, err) return fmt.Errorf("scraping error on user ID %d: %w", id, err)
} }
log.Debugf("%#v\n", user) log.Debugf("%#v\n", user)
err = profile.SaveUser(&user) err = profile.SaveUser(&user)
var conflict_err persistence.ErrConflictingUserHandle var conflict_err ErrConflictingUserHandle
if errors.As(err, &conflict_err) { if errors.As(err, &conflict_err) {
log.Warnf( log.Warnf(
"Conflicting user handle found (ID %d); old user has been marked deleted. Rescraping them", "Conflicting user handle found (ID %d); old user has been marked deleted. Rescraping them",
@ -319,7 +319,7 @@ func _fetch_user_by_id(id scraper.UserID) error {
return nil return nil
} }
func fetch_user(handle scraper.UserHandle) { func fetch_user(handle UserHandle) {
user, err := api.GetUser(handle) user, err := api.GetUser(handle)
if errors.Is(err, scraper.ErrDoesntExist) { if errors.Is(err, scraper.ErrDoesntExist) {
// There's several reasons we could get a ErrDoesntExist: // There's several reasons we could get a ErrDoesntExist:
@ -335,7 +335,7 @@ func fetch_user(handle scraper.UserHandle) {
log.Debugf("%#v\n", user) log.Debugf("%#v\n", user)
err = profile.SaveUser(&user) err = profile.SaveUser(&user)
var conflict_err persistence.ErrConflictingUserHandle var conflict_err ErrConflictingUserHandle
if errors.As(err, &conflict_err) { if errors.As(err, &conflict_err) {
log.Warnf( log.Warnf(
"Conflicting user handle found (ID %d); old user has been marked deleted. Rescraping them", "Conflicting user handle found (ID %d); old user has been marked deleted. Rescraping them",
@ -352,7 +352,7 @@ func fetch_user(handle scraper.UserHandle) {
happy_exit("Saved the user", nil) happy_exit("Saved the user", nil)
} }
func fetch_user_by_id(id scraper.UserID) { func fetch_user_by_id(id UserID) {
err := _fetch_user_by_id(id) err := _fetch_user_by_id(id)
if err != nil { if err != nil {
die(err.Error(), false, -1) die(err.Error(), false, -1)
@ -382,7 +382,7 @@ func fetch_tweet_only(tweet_identifier string) {
if !ok { if !ok {
panic("Trove didn't contain its own tweet!") panic("Trove didn't contain its own tweet!")
} }
tweet.LastScrapedAt = scraper.Timestamp{time.Now()} tweet.LastScrapedAt = Timestamp{time.Now()}
tweet.IsConversationScraped = true tweet.IsConversationScraped = true
log.Debug(tweet) log.Debug(tweet)
@ -422,7 +422,7 @@ func fetch_tweet_conversation(tweet_identifier string, how_many int) {
* - handle: the user handle to get * - handle: the user handle to get
*/ */
func fetch_user_feed(handle string, how_many int) { func fetch_user_feed(handle string, how_many int) {
user, err := profile.GetUserByHandle(scraper.UserHandle(handle)) user, err := profile.GetUserByHandle(UserHandle(handle))
if is_scrape_failure(err) { if is_scrape_failure(err) {
die(fmt.Sprintf("Error getting user: %s\n %s", handle, err.Error()), false, -1) die(fmt.Sprintf("Error getting user: %s\n %s", handle, err.Error()), false, -1)
} }
@ -440,7 +440,7 @@ func fetch_user_feed(handle string, how_many int) {
} }
func get_user_likes(handle string, how_many int) { func get_user_likes(handle string, how_many int) {
user, err := profile.GetUserByHandle(scraper.UserHandle(handle)) user, err := profile.GetUserByHandle(UserHandle(handle))
if err != nil { if err != nil {
die(fmt.Sprintf("Error getting user: %s\n %s", handle, err.Error()), false, -1) die(fmt.Sprintf("Error getting user: %s\n %s", handle, err.Error()), false, -1)
} }
@ -458,7 +458,7 @@ func get_user_likes(handle string, how_many int) {
} }
func get_followees(handle string, how_many int) { func get_followees(handle string, how_many int) {
user, err := profile.GetUserByHandle(scraper.UserHandle(handle)) user, err := profile.GetUserByHandle(UserHandle(handle))
if err != nil { if err != nil {
die(fmt.Sprintf("Error getting user: %s\n %s", handle, err.Error()), false, -1) die(fmt.Sprintf("Error getting user: %s\n %s", handle, err.Error()), false, -1)
} }
@ -473,7 +473,7 @@ func get_followees(handle string, how_many int) {
happy_exit(fmt.Sprintf("Saved %d followees", len(trove.Users)), err) happy_exit(fmt.Sprintf("Saved %d followees", len(trove.Users)), err)
} }
func get_followers(handle string, how_many int) { func get_followers(handle string, how_many int) {
user, err := profile.GetUserByHandle(scraper.UserHandle(handle)) user, err := profile.GetUserByHandle(UserHandle(handle))
if err != nil { if err != nil {
die(fmt.Sprintf("Error getting user: %s\n %s", handle, err.Error()), false, -1) die(fmt.Sprintf("Error getting user: %s\n %s", handle, err.Error()), false, -1)
} }
@ -528,7 +528,7 @@ func download_tweet_content(tweet_identifier string) {
} }
} }
func download_user_content(handle scraper.UserHandle) { func download_user_content(handle UserHandle) {
user, err := profile.GetUserByHandle(handle) user, err := profile.GetUserByHandle(handle)
if err != nil { if err != nil {
panic("Couldn't get the user from database: " + err.Error()) panic("Couldn't get the user from database: " + err.Error())
@ -550,7 +550,7 @@ func search(query string, how_many int) {
} }
func follow_user(handle string, is_followed bool) { func follow_user(handle string, is_followed bool) {
user, err := profile.GetUserByHandle(scraper.UserHandle(handle)) user, err := profile.GetUserByHandle(UserHandle(handle))
if err != nil { if err != nil {
panic("Couldn't get the user from database: " + err.Error()) panic("Couldn't get the user from database: " + err.Error())
} }
@ -612,11 +612,11 @@ func fetch_inbox(how_many int) {
} }
func fetch_dm(id string, how_many int) { func fetch_dm(id string, how_many int) {
room, err := profile.GetChatRoom(scraper.DMChatRoomID(id)) room, err := profile.GetChatRoom(DMChatRoomID(id))
if is_scrape_failure(err) { if is_scrape_failure(err) {
panic(err) panic(err)
} }
max_id := scraper.DMMessageID(^uint(0) >> 1) max_id := DMMessageID(^uint(0) >> 1)
trove, err := api.GetConversation(room.ID, max_id, how_many) trove, err := api.GetConversation(room.ID, max_id, how_many)
if err != nil { if err != nil {
die(fmt.Sprintf("Failed to fetch dm:\n %s", err.Error()), false, 1) die(fmt.Sprintf("Failed to fetch dm:\n %s", err.Error()), false, 1)
@ -629,12 +629,12 @@ func fetch_dm(id string, how_many int) {
} }
func send_dm(room_id string, text string, in_reply_to_id int) { func send_dm(room_id string, text string, in_reply_to_id int) {
room, err := profile.GetChatRoom(scraper.DMChatRoomID(room_id)) room, err := profile.GetChatRoom(DMChatRoomID(room_id))
if err != nil { if err != nil {
die(fmt.Sprintf("No such chat room: %d", in_reply_to_id), false, 1) die(fmt.Sprintf("No such chat room: %d", in_reply_to_id), false, 1)
} }
trove, err := api.SendDMMessage(room.ID, text, scraper.DMMessageID(in_reply_to_id)) trove, err := api.SendDMMessage(room.ID, text, DMMessageID(in_reply_to_id))
if err != nil { if err != nil {
die(fmt.Sprintf("Failed to send dm:\n %s", err.Error()), false, 1) die(fmt.Sprintf("Failed to send dm:\n %s", err.Error()), false, 1)
} }
@ -643,15 +643,15 @@ func send_dm(room_id string, text string, in_reply_to_id int) {
} }
func send_dm_reacc(room_id string, in_reply_to_id int, reacc string) { func send_dm_reacc(room_id string, in_reply_to_id int, reacc string) {
room, err := profile.GetChatRoom(scraper.DMChatRoomID(room_id)) room, err := profile.GetChatRoom(DMChatRoomID(room_id))
if err != nil { if err != nil {
die(fmt.Sprintf("No such chat room: %d", in_reply_to_id), false, 1) die(fmt.Sprintf("No such chat room: %d", in_reply_to_id), false, 1)
} }
_, err = profile.GetChatMessage(scraper.DMMessageID(in_reply_to_id)) _, err = profile.GetChatMessage(DMMessageID(in_reply_to_id))
if err != nil { if err != nil {
die(fmt.Sprintf("No such message: %d", in_reply_to_id), false, 1) die(fmt.Sprintf("No such message: %d", in_reply_to_id), false, 1)
} }
err = api.SendDMReaction(room.ID, scraper.DMMessageID(in_reply_to_id), reacc) err = api.SendDMReaction(room.ID, DMMessageID(in_reply_to_id), reacc)
if err != nil { if err != nil {
die(fmt.Sprintf("Failed to react to message:\n %s", err.Error()), false, 1) die(fmt.Sprintf("Failed to react to message:\n %s", err.Error()), false, 1)
} }

View File

@ -4,7 +4,7 @@ import (
"errors" "errors"
"net/http" "net/http"
"gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence" . "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
"gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper" "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )
@ -29,7 +29,7 @@ func (app *Application) Bookmarks(w http.ResponseWriter, r *http.Request) {
app.full_save_tweet_trove(trove) app.full_save_tweet_trove(trove)
} }
c := persistence.NewUserFeedBookmarksCursor(app.ActiveUser.Handle) c := NewUserFeedBookmarksCursor(app.ActiveUser.Handle)
err := parse_cursor_value(&c, r) err := parse_cursor_value(&c, r)
if err != nil { if err != nil {
app.error_400_with_message(w, r, "invalid cursor (must be a number)") app.error_400_with_message(w, r, "invalid cursor (must be a number)")
@ -37,11 +37,11 @@ func (app *Application) Bookmarks(w http.ResponseWriter, r *http.Request) {
} }
feed, err := app.Profile.NextPage(c, app.ActiveUser.ID) feed, err := app.Profile.NextPage(c, app.ActiveUser.ID)
if err != nil && !errors.Is(err, persistence.ErrEndOfFeed) { if err != nil && !errors.Is(err, ErrEndOfFeed) {
panic(err) panic(err)
} }
if is_htmx(r) && c.CursorPosition == persistence.CURSOR_MIDDLE { if is_htmx(r) && c.CursorPosition == CURSOR_MIDDLE {
// It's a Show More request // It's a Show More request
app.buffered_render_htmx(w, "timeline", PageGlobalData{TweetTrove: feed.TweetTrove}, feed) app.buffered_render_htmx(w, "timeline", PageGlobalData{TweetTrove: feed.TweetTrove}, feed)
} else { } else {

View File

@ -4,7 +4,7 @@ import (
"net/http" "net/http"
"strings" "strings"
"gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper" . "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
) )
// TODO: deprecated-offline-follows // TODO: deprecated-offline-follows
@ -22,7 +22,7 @@ func (app *Application) UserFollow(w http.ResponseWriter, r *http.Request) {
app.error_400_with_message(w, r, "Bad URL: "+r.URL.Path) app.error_400_with_message(w, r, "Bad URL: "+r.URL.Path)
return return
} }
user, err := app.Profile.GetUserByHandle(scraper.UserHandle(parts[1])) user, err := app.Profile.GetUserByHandle(UserHandle(parts[1]))
if err != nil { if err != nil {
app.error_404(w, r) app.error_404(w, r)
return return
@ -46,7 +46,7 @@ func (app *Application) UserUnfollow(w http.ResponseWriter, r *http.Request) {
app.error_400_with_message(w, r, "Bad URL: "+r.URL.Path) app.error_400_with_message(w, r, "Bad URL: "+r.URL.Path)
return return
} }
user, err := app.Profile.GetUserByHandle(scraper.UserHandle(parts[1])) user, err := app.Profile.GetUserByHandle(UserHandle(parts[1]))
if err != nil { if err != nil {
app.error_404(w, r) app.error_404(w, r)
return return

View File

@ -10,13 +10,12 @@ import (
"strconv" "strconv"
"strings" "strings"
"gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence" . "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )
type ListData struct { type ListData struct {
List List
Feed persistence.Feed Feed Feed
UserIDs []UserID UserIDs []UserID
ActiveTab string ActiveTab string
} }
@ -36,17 +35,17 @@ func NewListData(users []User) (ListData, TweetTrove) {
func (app *Application) ListDetailFeed(w http.ResponseWriter, r *http.Request) { func (app *Application) ListDetailFeed(w http.ResponseWriter, r *http.Request) {
list := get_list_from_context(r.Context()) list := get_list_from_context(r.Context())
c := persistence.NewListCursor(list.ID) c := NewListCursor(list.ID)
err := parse_cursor_value(&c, r) err := parse_cursor_value(&c, r)
if err != nil { if err != nil {
app.error_400_with_message(w, r, "invalid cursor (must be a number)") app.error_400_with_message(w, r, "invalid cursor (must be a number)")
return return
} }
feed, err := app.Profile.NextPage(c, app.ActiveUser.ID) feed, err := app.Profile.NextPage(c, app.ActiveUser.ID)
if err != nil && !errors.Is(err, persistence.ErrEndOfFeed) { if err != nil && !errors.Is(err, ErrEndOfFeed) {
panic(err) panic(err)
} }
if is_htmx(r) && c.CursorPosition == persistence.CURSOR_MIDDLE { if is_htmx(r) && c.CursorPosition == CURSOR_MIDDLE {
// It's a Show More request // It's a Show More request
app.buffered_render_htmx(w, "timeline", PageGlobalData{TweetTrove: feed.TweetTrove}, feed) app.buffered_render_htmx(w, "timeline", PageGlobalData{TweetTrove: feed.TweetTrove}, feed)
} else { } else {

View File

@ -7,12 +7,13 @@ import (
"io" "io"
"net/http" "net/http"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
"gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper" "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )
type LoginData struct { type LoginData struct {
LoginForm LoginForm
ExistingSessions []scraper.UserHandle ExistingSessions []UserHandle
} }
type LoginForm struct { type LoginForm struct {
@ -112,7 +113,7 @@ func (app *Application) ChangeSession(w http.ResponseWriter, r *http.Request) {
formdata, err := io.ReadAll(r.Body) formdata, err := io.ReadAll(r.Body)
panic_if(err) panic_if(err)
panic_if(json.Unmarshal(formdata, &form)) // TODO: HTTP 400 not 500 panic_if(json.Unmarshal(formdata, &form)) // TODO: HTTP 400 not 500
err = app.SetActiveUser(scraper.UserHandle(form.AccountName)) err = app.SetActiveUser(UserHandle(form.AccountName))
if err != nil { if err != nil {
app.error_400_with_message(w, r, fmt.Sprintf("User not in database: %s", form.AccountName)) app.error_400_with_message(w, r, fmt.Sprintf("User not in database: %s", form.AccountName))
return return

View File

@ -11,15 +11,15 @@ import (
"strings" "strings"
"time" "time"
"gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence" . "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
"gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper" "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )
type MessageData struct { type MessageData struct {
persistence.DMChatView DMChatView
LatestPollingTimestamp int LatestPollingTimestamp int
ScrollBottom bool ScrollBottom bool
UnreadRoomIDs map[scraper.DMChatRoomID]bool UnreadRoomIDs map[DMChatRoomID]bool
} }
func (app *Application) messages_index(w http.ResponseWriter, r *http.Request) { func (app *Application) messages_index(w http.ResponseWriter, r *http.Request) {
@ -30,7 +30,7 @@ func (app *Application) messages_index(w http.ResponseWriter, r *http.Request) {
func (app *Application) message_mark_as_read(w http.ResponseWriter, r *http.Request) { func (app *Application) message_mark_as_read(w http.ResponseWriter, r *http.Request) {
room_id := get_room_id_from_context(r.Context()) room_id := get_room_id_from_context(r.Context())
c := persistence.NewConversationCursor(room_id) c := NewConversationCursor(room_id)
c.PageSize = 1 c.PageSize = 1
chat_contents := app.Profile.GetChatRoomMessagesByCursor(c) chat_contents := app.Profile.GetChatRoomMessagesByCursor(c)
last_message_id := chat_contents.MessageIDs[len(chat_contents.MessageIDs)-1] last_message_id := chat_contents.MessageIDs[len(chat_contents.MessageIDs)-1]
@ -76,7 +76,7 @@ func (app *Application) message_send(w http.ResponseWriter, r *http.Request) {
app.error_401(w, r) app.error_401(w, r)
return return
} }
trove, err := app.API.SendDMMessage(room_id, message_data.Text, scraper.DMMessageID(in_reply_to_id)) trove, err := app.API.SendDMMessage(room_id, message_data.Text, DMMessageID(in_reply_to_id))
if err != nil { if err != nil {
panic(err) panic(err)
} }
@ -107,8 +107,8 @@ func (app *Application) message_detail(w http.ResponseWriter, r *http.Request) {
return return
} }
var data struct { var data struct {
MessageID scraper.DMMessageID `json:"message_id,string"` MessageID DMMessageID `json:"message_id,string"`
Reacc string `json:"reacc"` Reacc string `json:"reacc"`
} }
data_, err := io.ReadAll(r.Body) data_, err := io.ReadAll(r.Body)
panic_if(err) panic_if(err)
@ -129,11 +129,11 @@ func (app *Application) message_detail(w http.ResponseWriter, r *http.Request) {
panic(global_data) panic(global_data)
} }
} }
dm_message.Reactions[app.ActiveUser.ID] = scraper.DMReaction{ dm_message.Reactions[app.ActiveUser.ID] = DMReaction{
ID: 0, // Hopefully will be OK temporarily ID: 0, // Hopefully will be OK temporarily
DMMessageID: dm_message.ID, DMMessageID: dm_message.ID,
SenderID: app.ActiveUser.ID, SenderID: app.ActiveUser.ID,
SentAt: scraper.Timestamp{time.Now()}, SentAt: Timestamp{time.Now()},
Emoji: data.Reacc, Emoji: data.Reacc,
} }
global_data.Messages[dm_message.ID] = dm_message global_data.Messages[dm_message.ID] = dm_message
@ -147,7 +147,7 @@ func (app *Application) message_detail(w http.ResponseWriter, r *http.Request) {
} }
if r.URL.Query().Has("scrape") && !app.IsScrapingDisabled { if r.URL.Query().Has("scrape") && !app.IsScrapingDisabled {
max_id := scraper.DMMessageID(^uint(0) >> 1) max_id := DMMessageID(^uint(0) >> 1)
trove, err := app.API.GetConversation(room_id, max_id, 50) // TODO: parameterizable trove, err := app.API.GetConversation(room_id, max_id, 50) // TODO: parameterizable
if err != nil { if err != nil {
panic(err) panic(err)
@ -168,12 +168,12 @@ func (app *Application) message_detail(w http.ResponseWriter, r *http.Request) {
chat_view_data.ScrollBottom = true chat_view_data.ScrollBottom = true
} }
c := persistence.NewConversationCursor(room_id) c := NewConversationCursor(room_id)
c.SinceTimestamp = scraper.TimestampFromUnixMilli(int64(chat_view_data.LatestPollingTimestamp)) c.SinceTimestamp = TimestampFromUnixMilli(int64(chat_view_data.LatestPollingTimestamp))
if cursor_value := r.URL.Query().Get("cursor"); cursor_value != "" { if cursor_value := r.URL.Query().Get("cursor"); cursor_value != "" {
until_time, err := strconv.Atoi(cursor_value) until_time, err := strconv.Atoi(cursor_value)
panic_if(err) // TODO: 400 not 500 panic_if(err) // TODO: 400 not 500
c.UntilTimestamp = scraper.TimestampFromUnixMilli(int64(until_time)) c.UntilTimestamp = TimestampFromUnixMilli(int64(until_time))
} }
chat_contents := app.Profile.GetChatRoomMessagesByCursor(c) chat_contents := app.Profile.GetChatRoomMessagesByCursor(c)
chat_view_data.DMChatView.MergeWith(chat_contents.TweetTrove) chat_view_data.DMChatView.MergeWith(chat_contents.TweetTrove)
@ -210,7 +210,7 @@ func (app *Application) message_detail(w http.ResponseWriter, r *http.Request) {
func (app *Application) get_message_global_data() (MessageData, PageGlobalData) { func (app *Application) get_message_global_data() (MessageData, PageGlobalData) {
// Get message list previews // Get message list previews
chat_view_data := MessageData{DMChatView: app.Profile.GetChatRoomsPreview(app.ActiveUser.ID)} chat_view_data := MessageData{DMChatView: app.Profile.GetChatRoomsPreview(app.ActiveUser.ID)}
chat_view_data.UnreadRoomIDs = make(map[scraper.DMChatRoomID]bool) chat_view_data.UnreadRoomIDs = make(map[DMChatRoomID]bool)
for _, id := range app.Profile.GetUnreadConversations(app.ActiveUser.ID) { for _, id := range app.Profile.GetUnreadConversations(app.ActiveUser.ID) {
chat_view_data.UnreadRoomIDs[id] = true chat_view_data.UnreadRoomIDs[id] = true
} }
@ -223,7 +223,7 @@ func (app *Application) get_message_global_data() (MessageData, PageGlobalData)
func (app *Application) messages_refresh_list(w http.ResponseWriter, r *http.Request) { func (app *Application) messages_refresh_list(w http.ResponseWriter, r *http.Request) {
chat_view_data, global_data := app.get_message_global_data() chat_view_data, global_data := app.get_message_global_data()
chat_view_data.ActiveRoomID = scraper.DMChatRoomID(r.URL.Query().Get("active-chat")) chat_view_data.ActiveRoomID = DMChatRoomID(r.URL.Query().Get("active-chat"))
app.buffered_render_htmx(w, "chat-list", global_data, chat_view_data) app.buffered_render_htmx(w, "chat-list", global_data, chat_view_data)
} }
@ -250,7 +250,7 @@ func (app *Application) Messages(w http.ResponseWriter, r *http.Request) {
app.messages_refresh_list(w, r) app.messages_refresh_list(w, r)
return return
} }
room_id := scraper.DMChatRoomID(parts[0]) room_id := DMChatRoomID(parts[0])
// Messages index // Messages index
if room_id == "" { if room_id == "" {
@ -267,12 +267,12 @@ func (app *Application) Messages(w http.ResponseWriter, r *http.Request) {
const ROOM_ID_KEY = key("room_id") // type `key` is defined in "handler_tweet_detail" const ROOM_ID_KEY = key("room_id") // type `key` is defined in "handler_tweet_detail"
func add_room_id_to_context(ctx context.Context, room_id scraper.DMChatRoomID) context.Context { func add_room_id_to_context(ctx context.Context, room_id DMChatRoomID) context.Context {
return context.WithValue(ctx, ROOM_ID_KEY, room_id) return context.WithValue(ctx, ROOM_ID_KEY, room_id)
} }
func get_room_id_from_context(ctx context.Context) scraper.DMChatRoomID { func get_room_id_from_context(ctx context.Context) DMChatRoomID {
room_id, is_ok := ctx.Value(ROOM_ID_KEY).(scraper.DMChatRoomID) room_id, is_ok := ctx.Value(ROOM_ID_KEY).(DMChatRoomID)
if !is_ok { if !is_ok {
panic("room_id not found in context") panic("room_id not found in context")
} }

View File

@ -12,7 +12,7 @@ import (
"golang.org/x/net/html" "golang.org/x/net/html"
"gitlab.com/offline-twitter/twitter_offline_engine/internal/webserver" "gitlab.com/offline-twitter/twitter_offline_engine/internal/webserver"
"gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper" . "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
) )
func TestMessagesIndexPageRequiresActiveUser(t *testing.T) { func TestMessagesIndexPageRequiresActiveUser(t *testing.T) {
@ -63,7 +63,7 @@ func TestMessagesRoomRequiresCorrectUser(t *testing.T) {
recorder := httptest.NewRecorder() recorder := httptest.NewRecorder()
app := webserver.NewApp(profile) app := webserver.NewApp(profile)
app.IsScrapingDisabled = true app.IsScrapingDisabled = true
app.ActiveUser = scraper.User{ID: 782982734, Handle: "Not a real user"} // Simulate a login app.ActiveUser = User{ID: 782982734, Handle: "Not a real user"} // Simulate a login
app.WithMiddlewares().ServeHTTP(recorder, httptest.NewRequest("GET", "/messages/1488963321701171204-1178839081222115328", nil)) app.WithMiddlewares().ServeHTTP(recorder, httptest.NewRequest("GET", "/messages/1488963321701171204-1178839081222115328", nil))
resp2 := recorder.Result() resp2 := recorder.Result()
require.Equal(404, resp2.StatusCode) require.Equal(404, resp2.StatusCode)

View File

@ -8,24 +8,24 @@ import (
"strconv" "strconv"
"strings" "strings"
"gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence" . "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
"gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper" "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )
type SearchPageData struct { type SearchPageData struct {
persistence.Feed Feed
SearchText string SearchText string
SortOrder persistence.SortOrder SortOrder SortOrder
SortOrderOptions []string SortOrderOptions []string
IsUsersSearch bool IsUsersSearch bool
UserIDs []scraper.UserID UserIDs []UserID
// TODO: fill out the search text in the search bar as well (needs modifying the base template) // TODO: fill out the search text in the search bar as well (needs modifying the base template)
} }
func NewSearchPageData() SearchPageData { func NewSearchPageData() SearchPageData {
ret := SearchPageData{SortOrderOptions: []string{}, Feed: persistence.NewFeed()} ret := SearchPageData{SortOrderOptions: []string{}, Feed: NewFeed()}
for i := 0; i < 4; i++ { // Don't include "Liked At" option which is #4 for i := 0; i < 4; i++ { // Don't include "Liked At" option which is #4
ret.SortOrderOptions = append(ret.SortOrderOptions, persistence.SortOrder(i).String()) ret.SortOrderOptions = append(ret.SortOrderOptions, SortOrder(i).String())
} }
return ret return ret
} }
@ -34,7 +34,7 @@ func (app *Application) SearchUsers(w http.ResponseWriter, r *http.Request) {
ret := NewSearchPageData() ret := NewSearchPageData()
ret.IsUsersSearch = true ret.IsUsersSearch = true
ret.SearchText = strings.Trim(r.URL.Path, "/") ret.SearchText = strings.Trim(r.URL.Path, "/")
ret.UserIDs = []scraper.UserID{} ret.UserIDs = []UserID{}
for _, u := range app.Profile.SearchUsers(ret.SearchText) { for _, u := range app.Profile.SearchUsers(ret.SearchText) {
ret.TweetTrove.Users[u.ID] = u ret.TweetTrove.Users[u.ID] = u
ret.UserIDs = append(ret.UserIDs, u.ID) ret.UserIDs = append(ret.UserIDs, u.ID)
@ -110,7 +110,7 @@ func (app *Application) Search(w http.ResponseWriter, r *http.Request) {
app.full_save_tweet_trove(trove) app.full_save_tweet_trove(trove)
} }
c, err := persistence.NewCursorFromSearchQuery(search_text) c, err := NewCursorFromSearchQuery(search_text)
if err != nil { if err != nil {
app.error_400_with_message(w, r, err.Error()) app.error_400_with_message(w, r, err.Error())
return return
@ -121,13 +121,13 @@ func (app *Application) Search(w http.ResponseWriter, r *http.Request) {
return return
} }
var is_ok bool var is_ok bool
c.SortOrder, is_ok = persistence.SortOrderFromString(r.URL.Query().Get("sort-order")) c.SortOrder, is_ok = SortOrderFromString(r.URL.Query().Get("sort-order"))
if !is_ok && r.URL.Query().Get("sort-order") != "" { if !is_ok && r.URL.Query().Get("sort-order") != "" {
app.error_400_with_message(w, r, "Invalid sort order") app.error_400_with_message(w, r, "Invalid sort order")
} }
feed, err := app.Profile.NextPage(c, app.ActiveUser.ID) feed, err := app.Profile.NextPage(c, app.ActiveUser.ID)
if err != nil && !errors.Is(err, persistence.ErrEndOfFeed) { if err != nil && !errors.Is(err, ErrEndOfFeed) {
panic(err) panic(err)
} }
@ -136,7 +136,7 @@ func (app *Application) Search(w http.ResponseWriter, r *http.Request) {
data.SearchText = search_text data.SearchText = search_text
data.SortOrder = c.SortOrder data.SortOrder = c.SortOrder
if is_htmx(r) && c.CursorPosition == persistence.CURSOR_MIDDLE { if is_htmx(r) && c.CursorPosition == CURSOR_MIDDLE {
// It's a Show More request // It's a Show More request
app.buffered_render_htmx(w, "timeline", PageGlobalData{TweetTrove: data.Feed.TweetTrove, SearchText: search_text}, data) app.buffered_render_htmx(w, "timeline", PageGlobalData{TweetTrove: data.Feed.TweetTrove, SearchText: search_text}, data)
} else { } else {

View File

@ -5,12 +5,11 @@ import (
"net/http" "net/http"
"strings" "strings"
"gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence" . "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
"gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )
type TimelineData struct { type TimelineData struct {
persistence.Feed Feed
ActiveTab string ActiveTab string
} }
@ -19,7 +18,7 @@ type TimelineData struct {
func (app *Application) OfflineTimeline(w http.ResponseWriter, r *http.Request) { func (app *Application) OfflineTimeline(w http.ResponseWriter, r *http.Request) {
app.traceLog.Printf("'Timeline' handler (path: %q)", r.URL.Path) app.traceLog.Printf("'Timeline' handler (path: %q)", r.URL.Path)
c := persistence.NewTimelineCursor() c := NewTimelineCursor()
err := parse_cursor_value(&c, r) err := parse_cursor_value(&c, r)
if err != nil { if err != nil {
app.error_400_with_message(w, r, "invalid cursor (must be a number)") app.error_400_with_message(w, r, "invalid cursor (must be a number)")
@ -27,11 +26,11 @@ func (app *Application) OfflineTimeline(w http.ResponseWriter, r *http.Request)
} }
feed, err := app.Profile.NextPage(c, app.ActiveUser.ID) feed, err := app.Profile.NextPage(c, app.ActiveUser.ID)
if err != nil && !errors.Is(err, persistence.ErrEndOfFeed) { if err != nil && !errors.Is(err, ErrEndOfFeed) {
panic(err) panic(err)
} }
if is_htmx(r) && c.CursorPosition == persistence.CURSOR_MIDDLE { if is_htmx(r) && c.CursorPosition == CURSOR_MIDDLE {
// It's a Show More request // It's a Show More request
app.buffered_render_htmx(w, "timeline", PageGlobalData{TweetTrove: feed.TweetTrove}, feed) app.buffered_render_htmx(w, "timeline", PageGlobalData{TweetTrove: feed.TweetTrove}, feed)
} else { } else {
@ -53,14 +52,14 @@ func (app *Application) Timeline(w http.ResponseWriter, r *http.Request) {
return return
} }
c := persistence.Cursor{ c := Cursor{
Keywords: []string{}, Keywords: []string{},
ToUserHandles: []scraper.UserHandle{}, ToUserHandles: []UserHandle{},
SinceTimestamp: scraper.TimestampFromUnix(0), SinceTimestamp: TimestampFromUnix(0),
UntilTimestamp: scraper.TimestampFromUnix(0), UntilTimestamp: TimestampFromUnix(0),
CursorPosition: persistence.CURSOR_START, CursorPosition: CURSOR_START,
CursorValue: 0, CursorValue: 0,
SortOrder: persistence.SORT_ORDER_NEWEST, SortOrder: SORT_ORDER_NEWEST,
PageSize: 50, PageSize: 50,
FollowedByUserHandle: app.ActiveUser.Handle, FollowedByUserHandle: app.ActiveUser.Handle,
@ -72,11 +71,11 @@ func (app *Application) Timeline(w http.ResponseWriter, r *http.Request) {
} }
feed, err := app.Profile.NextPage(c, app.ActiveUser.ID) feed, err := app.Profile.NextPage(c, app.ActiveUser.ID)
if err != nil && !errors.Is(err, persistence.ErrEndOfFeed) { if err != nil && !errors.Is(err, ErrEndOfFeed) {
panic(err) panic(err)
} }
if is_htmx(r) && c.CursorPosition == persistence.CURSOR_MIDDLE { if is_htmx(r) && c.CursorPosition == CURSOR_MIDDLE {
// It's a Show More request // It's a Show More request
app.buffered_render_htmx(w, "timeline", PageGlobalData{TweetTrove: feed.TweetTrove}, feed) app.buffered_render_htmx(w, "timeline", PageGlobalData{TweetTrove: feed.TweetTrove}, feed)
} else { } else {

View File

@ -8,31 +8,31 @@ import (
"strconv" "strconv"
"strings" "strings"
"gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence" . "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
"gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper" "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )
var ErrNotFound = errors.New("not found") var ErrNotFound = errors.New("not found")
type TweetDetailData struct { type TweetDetailData struct {
persistence.TweetDetailView TweetDetailView
MainTweetID scraper.TweetID MainTweetID TweetID
} }
func NewTweetDetailData() TweetDetailData { func NewTweetDetailData() TweetDetailData {
return TweetDetailData{ return TweetDetailData{
TweetDetailView: persistence.NewTweetDetailView(), TweetDetailView: NewTweetDetailView(),
} }
} }
func (app *Application) ensure_tweet(id scraper.TweetID, is_forced bool, is_conversation_required bool) (scraper.Tweet, error) { func (app *Application) ensure_tweet(id TweetID, is_forced bool, is_conversation_required bool) (Tweet, error) {
is_available := false is_available := false
is_needing_scrape := is_forced is_needing_scrape := is_forced
// Check if tweet is already in DB // Check if tweet is already in DB
tweet, err := app.Profile.GetTweetById(id) tweet, err := app.Profile.GetTweetById(id)
if err != nil { if err != nil {
if errors.Is(err, persistence.ErrNotInDatabase) { if errors.Is(err, ErrNotInDatabase) {
is_needing_scrape = true is_needing_scrape = true
is_available = false is_available = false
} else { } else {
@ -58,14 +58,14 @@ func (app *Application) ensure_tweet(id scraper.TweetID, is_forced bool, is_conv
} }
if err != nil && !errors.Is(err, scraper.END_OF_FEED) { if err != nil && !errors.Is(err, scraper.END_OF_FEED) {
return scraper.Tweet{}, fmt.Errorf("scraper error: %w", err) return Tweet{}, fmt.Errorf("scraper error: %w", err)
} }
} else if is_needing_scrape { } else if is_needing_scrape {
app.InfoLog.Printf("Would have scraped Tweet: %d", id) app.InfoLog.Printf("Would have scraped Tweet: %d", id)
} }
if !is_available { if !is_available {
return scraper.Tweet{}, ErrNotFound return Tweet{}, ErrNotFound
} }
return tweet, nil return tweet, nil
} }
@ -92,7 +92,7 @@ func (app *Application) UnlikeTweet(w http.ResponseWriter, r *http.Request) {
// It's a different error // It's a different error
panic(err) panic(err)
} }
err = app.Profile.DeleteLike(scraper.Like{UserID: app.ActiveUser.ID, TweetID: tweet.ID}) err = app.Profile.DeleteLike(Like{UserID: app.ActiveUser.ID, TweetID: tweet.ID})
panic_if(err) panic_if(err)
tweet.IsLikedByCurrentUser = false tweet.IsLikedByCurrentUser = false
@ -108,7 +108,7 @@ func (app *Application) TweetDetail(w http.ResponseWriter, r *http.Request) {
app.error_400_with_message(w, r, fmt.Sprintf("Invalid tweet ID: %q", parts[1])) app.error_400_with_message(w, r, fmt.Sprintf("Invalid tweet ID: %q", parts[1]))
return return
} }
tweet_id := scraper.TweetID(val) tweet_id := TweetID(val)
data := NewTweetDetailData() data := NewTweetDetailData()
data.MainTweetID = tweet_id data.MainTweetID = tweet_id
@ -169,12 +169,12 @@ type key string
const TWEET_KEY = key("tweet") const TWEET_KEY = key("tweet")
func add_tweet_to_context(ctx context.Context, tweet scraper.Tweet) context.Context { func add_tweet_to_context(ctx context.Context, tweet Tweet) context.Context {
return context.WithValue(ctx, TWEET_KEY, tweet) return context.WithValue(ctx, TWEET_KEY, tweet)
} }
func get_tweet_from_context(ctx context.Context) scraper.Tweet { func get_tweet_from_context(ctx context.Context) Tweet {
tweet, is_ok := ctx.Value(TWEET_KEY).(scraper.Tweet) tweet, is_ok := ctx.Value(TWEET_KEY).(Tweet)
if !is_ok { if !is_ok {
panic("Tweet not found in context") panic("Tweet not found in context")
} }

View File

@ -11,7 +11,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"golang.org/x/net/html" "golang.org/x/net/html"
"gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper" . "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
) )
func TestTweetDetail(t *testing.T) { func TestTweetDetail(t *testing.T) {
@ -105,7 +105,7 @@ func TestLongTweet(t *testing.T) {
paragraphs := cascadia.QueryAll(root, selector(".tweet .text")) paragraphs := cascadia.QueryAll(root, selector(".tweet .text"))
assert.Len(paragraphs, 22) assert.Len(paragraphs, 22)
twt, err := profile.GetTweetById(scraper.TweetID(1695110851324256692)) twt, err := profile.GetTweetById(TweetID(1695110851324256692))
require.NoError(err) require.NoError(err)
for i, s := range strings.Split(twt.Text, "\n") { for i, s := range strings.Split(twt.Text, "\n") {
assert.Equal(strings.TrimSpace(s), strings.TrimSpace(paragraphs[i].FirstChild.Data)) assert.Equal(strings.TrimSpace(s), strings.TrimSpace(paragraphs[i].FirstChild.Data))

View File

@ -6,8 +6,7 @@ import (
"net/http" "net/http"
"strings" "strings"
"gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence" . "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
"gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )
func (app *Application) UserFeed(w http.ResponseWriter, r *http.Request) { func (app *Application) UserFeed(w http.ResponseWriter, r *http.Request) {
@ -15,10 +14,10 @@ func (app *Application) UserFeed(w http.ResponseWriter, r *http.Request) {
parts := strings.Split(strings.Trim(r.URL.Path, "/"), "/") parts := strings.Split(strings.Trim(r.URL.Path, "/"), "/")
user, err := app.Profile.GetUserByHandle(scraper.UserHandle(parts[0])) user, err := app.Profile.GetUserByHandle(UserHandle(parts[0]))
if errors.Is(err, persistence.ErrNotInDatabase) { if errors.Is(err, ErrNotInDatabase) {
if !app.IsScrapingDisabled { if !app.IsScrapingDisabled {
user, err = app.API.GetUser(scraper.UserHandle(parts[0])) user, err = app.API.GetUser(UserHandle(parts[0]))
} }
if err != nil { // ErrDoesntExist or otherwise if err != nil { // ErrDoesntExist or otherwise
app.error_404(w, r) app.error_404(w, r)
@ -47,7 +46,7 @@ func (app *Application) UserFeed(w http.ResponseWriter, r *http.Request) {
} }
// Update the user themself // Update the user themself
user, err = app.API.GetUser(scraper.UserHandle(parts[0])) user, err = app.API.GetUser(UserHandle(parts[0]))
panic_if(err) panic_if(err)
panic_if(app.Profile.SaveUser(&user)) // TODO: handle conflicting users panic_if(app.Profile.SaveUser(&user)) // TODO: handle conflicting users
panic_if(app.Profile.DownloadUserContentFor(&user, app.API.DownloadMedia)) panic_if(app.Profile.DownloadUserContentFor(&user, app.API.DownloadMedia))
@ -70,17 +69,17 @@ func (app *Application) UserFeed(w http.ResponseWriter, r *http.Request) {
} }
} }
var c persistence.Cursor var c Cursor
if len(parts) > 1 && parts[1] == "likes" { if len(parts) > 1 && parts[1] == "likes" {
c = persistence.NewUserFeedLikesCursor(user.Handle) c = NewUserFeedLikesCursor(user.Handle)
} else { } else {
c = persistence.NewUserFeedCursor(user.Handle) c = NewUserFeedCursor(user.Handle)
} }
if len(parts) > 1 && parts[1] == "without_replies" { if len(parts) > 1 && parts[1] == "without_replies" {
c.FilterReplies = persistence.EXCLUDE c.FilterReplies = EXCLUDE
} }
if len(parts) > 1 && parts[1] == "media" { if len(parts) > 1 && parts[1] == "media" {
c.FilterMedia = persistence.REQUIRE c.FilterMedia = REQUIRE
} }
err = parse_cursor_value(&c, r) err = parse_cursor_value(&c, r)
if err != nil { if err != nil {
@ -89,15 +88,15 @@ func (app *Application) UserFeed(w http.ResponseWriter, r *http.Request) {
} }
feed, err := app.Profile.NextPage(c, app.ActiveUser.ID) feed, err := app.Profile.NextPage(c, app.ActiveUser.ID)
if err != nil && !errors.Is(err, persistence.ErrEndOfFeed) { if err != nil && !errors.Is(err, ErrEndOfFeed) {
panic(err) panic(err)
} }
feed.Users[user.ID] = user feed.Users[user.ID] = user
data := struct { data := struct {
persistence.Feed Feed
scraper.UserID UserID
PinnedTweet scraper.Tweet PinnedTweet Tweet
FeedType string FeedType string
}{Feed: feed, UserID: user.ID} }{Feed: feed, UserID: user.ID}
@ -109,17 +108,17 @@ func (app *Application) UserFeed(w http.ResponseWriter, r *http.Request) {
// Add a pinned tweet if there is one and it's in the DB; otherwise skip // Add a pinned tweet if there is one and it's in the DB; otherwise skip
// Also, only show pinned tweets on default tab (tweets+replies) or "without_replies" tab // Also, only show pinned tweets on default tab (tweets+replies) or "without_replies" tab
if user.PinnedTweetID != scraper.TweetID(0) && (len(parts) <= 1 || parts[1] == "without_replies") { if user.PinnedTweetID != TweetID(0) && (len(parts) <= 1 || parts[1] == "without_replies") {
data.PinnedTweet, err = app.Profile.GetTweetById(user.PinnedTweetID) data.PinnedTweet, err = app.Profile.GetTweetById(user.PinnedTweetID)
if err != nil && !errors.Is(err, persistence.ErrNotInDatabase) { if err != nil && !errors.Is(err, ErrNotInDatabase) {
panic(err) panic(err)
} }
feed.TweetTrove.Tweets[data.PinnedTweet.ID] = data.PinnedTweet feed.TweetTrove.Tweets[data.PinnedTweet.ID] = data.PinnedTweet
// Fetch quoted tweet if necessary // Fetch quoted tweet if necessary
if data.PinnedTweet.QuotedTweetID != scraper.TweetID(0) { if data.PinnedTweet.QuotedTweetID != TweetID(0) {
feed.TweetTrove.Tweets[data.PinnedTweet.QuotedTweetID], err = app.Profile.GetTweetById(data.PinnedTweet.QuotedTweetID) feed.TweetTrove.Tweets[data.PinnedTweet.QuotedTweetID], err = app.Profile.GetTweetById(data.PinnedTweet.QuotedTweetID)
if err != nil && !errors.Is(err, persistence.ErrNotInDatabase) { if err != nil && !errors.Is(err, ErrNotInDatabase) {
panic(err) panic(err)
} }
// And the user // And the user
@ -129,7 +128,7 @@ func (app *Application) UserFeed(w http.ResponseWriter, r *http.Request) {
} }
} }
if is_htmx(r) && c.CursorPosition == persistence.CURSOR_MIDDLE { if is_htmx(r) && c.CursorPosition == CURSOR_MIDDLE {
// It's a Show More request // It's a Show More request
app.buffered_render_htmx(w, "timeline", PageGlobalData{TweetTrove: feed.TweetTrove}, data) app.buffered_render_htmx(w, "timeline", PageGlobalData{TweetTrove: feed.TweetTrove}, data)
} else { } else {
@ -139,14 +138,14 @@ func (app *Application) UserFeed(w http.ResponseWriter, r *http.Request) {
type FollowsData struct { type FollowsData struct {
Title string Title string
HeaderUserID scraper.UserID HeaderUserID UserID
UserIDs []scraper.UserID UserIDs []UserID
} }
func NewFollowsData(users []scraper.User) (FollowsData, scraper.TweetTrove) { func NewFollowsData(users []User) (FollowsData, TweetTrove) {
trove := scraper.NewTweetTrove() trove := NewTweetTrove()
data := FollowsData{ data := FollowsData{
UserIDs: []scraper.UserID{}, UserIDs: []UserID{},
} }
for _, u := range users { for _, u := range users {
trove.Users[u.ID] = u trove.Users[u.ID] = u
@ -155,7 +154,7 @@ func NewFollowsData(users []scraper.User) (FollowsData, scraper.TweetTrove) {
return data, trove return data, trove
} }
func (app *Application) UserFollowees(w http.ResponseWriter, r *http.Request, user scraper.User) { func (app *Application) UserFollowees(w http.ResponseWriter, r *http.Request, user User) {
if r.URL.Query().Has("scrape") { if r.URL.Query().Has("scrape") {
if app.IsScrapingDisabled { if app.IsScrapingDisabled {
app.InfoLog.Printf("Would have scraped: %s", r.URL.Path) app.InfoLog.Printf("Would have scraped: %s", r.URL.Path)
@ -180,7 +179,7 @@ func (app *Application) UserFollowees(w http.ResponseWriter, r *http.Request, us
app.buffered_render_page(w, "tpl/follows.tpl", PageGlobalData{TweetTrove: trove}, data) app.buffered_render_page(w, "tpl/follows.tpl", PageGlobalData{TweetTrove: trove}, data)
} }
func (app *Application) UserFollowers(w http.ResponseWriter, r *http.Request, user scraper.User) { func (app *Application) UserFollowers(w http.ResponseWriter, r *http.Request, user User) {
if r.URL.Query().Has("scrape") { if r.URL.Query().Has("scrape") {
if app.IsScrapingDisabled { if app.IsScrapingDisabled {
app.InfoLog.Printf("Would have scraped: %s", r.URL.Path) app.InfoLog.Printf("Would have scraped: %s", r.URL.Path)

View File

@ -11,8 +11,7 @@ import (
"github.com/Masterminds/sprig/v3" "github.com/Masterminds/sprig/v3"
"gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence" . "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
"gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )
type NotificationBubbles struct { type NotificationBubbles struct {
@ -22,35 +21,35 @@ type NotificationBubbles struct {
// TODO: this name sucks // TODO: this name sucks
type PageGlobalData struct { type PageGlobalData struct {
scraper.TweetTrove TweetTrove
SearchText string SearchText string
FocusedTweetID scraper.TweetID FocusedTweetID TweetID
Toasts []Toast Toasts []Toast
NotificationBubbles NotificationBubbles
} }
func (d PageGlobalData) Tweet(id scraper.TweetID) scraper.Tweet { func (d PageGlobalData) Tweet(id TweetID) Tweet {
return d.Tweets[id] return d.Tweets[id]
} }
func (d PageGlobalData) User(id scraper.UserID) scraper.User { func (d PageGlobalData) User(id UserID) User {
return d.Users[id] return d.Users[id]
} }
func (d PageGlobalData) Retweet(id scraper.TweetID) scraper.Retweet { func (d PageGlobalData) Retweet(id TweetID) Retweet {
return d.Retweets[id] return d.Retweets[id]
} }
func (d PageGlobalData) Space(id scraper.SpaceID) scraper.Space { func (d PageGlobalData) Space(id SpaceID) Space {
return d.Spaces[id] return d.Spaces[id]
} }
func (d PageGlobalData) Notification(id scraper.NotificationID) scraper.Notification { func (d PageGlobalData) Notification(id NotificationID) Notification {
return d.Notifications[id] return d.Notifications[id]
} }
func (d PageGlobalData) Message(id scraper.DMMessageID) scraper.DMMessage { func (d PageGlobalData) Message(id DMMessageID) DMMessage {
return d.Messages[id] return d.Messages[id]
} }
func (d PageGlobalData) ChatRoom(id scraper.DMChatRoomID) scraper.DMChatRoom { func (d PageGlobalData) ChatRoom(id DMChatRoomID) DMChatRoom {
return d.Rooms[id] return d.Rooms[id]
} }
func (d PageGlobalData) GetFocusedTweetID() scraper.TweetID { func (d PageGlobalData) GetFocusedTweetID() TweetID {
return d.FocusedTweetID return d.FocusedTweetID
} }
func (d PageGlobalData) GetSearchText() string { func (d PageGlobalData) GetSearchText() string {
@ -143,18 +142,18 @@ func (app *Application) make_funcmap(global_data PageGlobalData) template.FuncMa
"focused_tweet_id": global_data.GetFocusedTweetID, "focused_tweet_id": global_data.GetFocusedTweetID,
"search_text": global_data.GetSearchText, "search_text": global_data.GetSearchText,
"global_data": global_data.GlobalData, // This fucking sucks "global_data": global_data.GlobalData, // This fucking sucks
"active_user": func() scraper.User { "active_user": func() User {
return app.ActiveUser return app.ActiveUser
}, },
// Utility functions // Utility functions
"get_tombstone_text": func(t scraper.Tweet) string { "get_tombstone_text": func(t Tweet) string {
if t.TombstoneText != "" { if t.TombstoneText != "" {
return t.TombstoneText return t.TombstoneText
} }
return t.TombstoneType return t.TombstoneType
}, },
"cursor_to_query_params": func(c persistence.Cursor) string { "cursor_to_query_params": func(c Cursor) string {
result := url.Values{} result := url.Values{}
result.Set("cursor", fmt.Sprint(c.CursorValue)) result.Set("cursor", fmt.Sprint(c.CursorValue))
result.Set("sort-order", c.SortOrder.String()) result.Set("sort-order", c.SortOrder.String())

View File

@ -14,7 +14,7 @@ import (
"strings" "strings"
"time" "time"
"gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence" . "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
"gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper" "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )
@ -28,14 +28,14 @@ type Application struct {
Middlewares []Middleware Middlewares []Middleware
Profile persistence.Profile Profile Profile
ActiveUser scraper.User ActiveUser User
IsScrapingDisabled bool IsScrapingDisabled bool
API scraper.API API scraper.API
LastReadNotificationSortIndex int64 LastReadNotificationSortIndex int64
} }
func NewApp(profile persistence.Profile) Application { func NewApp(profile Profile) Application {
ret := Application{ ret := Application{
accessLog: log.New(os.Stdout, "ACCESS\t", log.Ldate|log.Ltime), accessLog: log.New(os.Stdout, "ACCESS\t", log.Ldate|log.Ltime),
traceLog: log.New(os.Stdout, "TRACE\t", log.Ldate|log.Ltime), traceLog: log.New(os.Stdout, "TRACE\t", log.Ldate|log.Ltime),
@ -67,7 +67,7 @@ func (app *Application) WithMiddlewares() http.Handler {
return ret return ret
} }
func (app *Application) SetActiveUser(handle scraper.UserHandle) error { func (app *Application) SetActiveUser(handle UserHandle) error {
if handle == "no account" { if handle == "no account" {
app.ActiveUser = get_default_user() app.ActiveUser = get_default_user()
app.IsScrapingDisabled = true // API requests will fail b/c not logged in app.IsScrapingDisabled = true // API requests will fail b/c not logged in
@ -83,12 +83,12 @@ func (app *Application) SetActiveUser(handle scraper.UserHandle) error {
return nil return nil
} }
func get_default_user() scraper.User { func get_default_user() User {
return scraper.User{ return User{
ID: 0, ID: 0,
Handle: "[nobody]", Handle: "[nobody]",
DisplayName: "[Not logged in]", DisplayName: "[Not logged in]",
ProfileImageLocalPath: path.Base(scraper.DEFAULT_PROFILE_IMAGE_URL), ProfileImageLocalPath: path.Base(DEFAULT_PROFILE_IMAGE_URL),
IsContentDownloaded: true, IsContentDownloaded: true,
} }
} }
@ -189,7 +189,7 @@ func openWebPage(url string) {
} }
} }
func parse_cursor_value(c *persistence.Cursor, r *http.Request) error { func parse_cursor_value(c *Cursor, r *http.Request) error {
cursor_param := r.URL.Query().Get("cursor") cursor_param := r.URL.Query().Get("cursor")
if cursor_param != "" { if cursor_param != "" {
var err error var err error
@ -197,7 +197,7 @@ func parse_cursor_value(c *persistence.Cursor, r *http.Request) error {
if err != nil { if err != nil {
return fmt.Errorf("attempted to parse cursor value %q as int: %w", c.CursorValue, err) return fmt.Errorf("attempted to parse cursor value %q as int: %w", c.CursorValue, err)
} }
c.CursorPosition = persistence.CURSOR_MIDDLE c.CursorPosition = CURSOR_MIDDLE
} }
return nil return nil
} }

View File

@ -10,8 +10,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"gitlab.com/offline-twitter/twitter_offline_engine/internal/webserver" "gitlab.com/offline-twitter/twitter_offline_engine/internal/webserver"
"gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence" . "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
"gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )
type CapturingWriter struct { type CapturingWriter struct {
@ -23,11 +22,11 @@ func (w *CapturingWriter) Write(p []byte) (int, error) {
return len(p), nil return len(p), nil
} }
var profile persistence.Profile var profile Profile
func init() { func init() {
var err error var err error
profile, err = persistence.LoadProfile("../../sample_data/profile") profile, err = LoadProfile("../../sample_data/profile")
if err != nil { if err != nil {
panic(err) panic(err)
} }
@ -55,7 +54,7 @@ func do_request_with_active_user(req *http.Request) *http.Response {
recorder := httptest.NewRecorder() recorder := httptest.NewRecorder()
app := webserver.NewApp(profile) app := webserver.NewApp(profile)
app.IsScrapingDisabled = true app.IsScrapingDisabled = true
app.ActiveUser = scraper.User{ID: 1488963321701171204, Handle: "Offline_Twatter"} // Simulate a login app.ActiveUser = User{ID: 1488963321701171204, Handle: "Offline_Twatter"} // Simulate a login
app.WithMiddlewares().ServeHTTP(recorder, req) app.WithMiddlewares().ServeHTTP(recorder, req)
return recorder.Result() return recorder.Result()
} }

View File

@ -8,12 +8,13 @@ import (
"runtime/debug" "runtime/debug"
"time" "time"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
"gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper" "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )
type BackgroundTask struct { type BackgroundTask struct {
Name string Name string
GetTroveFunc func(*scraper.API) scraper.TweetTrove GetTroveFunc func(*scraper.API) TweetTrove
StartDelay time.Duration StartDelay time.Duration
Period time.Duration Period time.Duration
@ -81,7 +82,7 @@ func (app *Application) start_background() {
timeline_task := BackgroundTask{ timeline_task := BackgroundTask{
Name: "home timeline", Name: "home timeline",
GetTroveFunc: func(api *scraper.API) scraper.TweetTrove { GetTroveFunc: func(api *scraper.API) TweetTrove {
should_do_following_only := is_following_only%is_following_only_frequency == 0 should_do_following_only := is_following_only%is_following_only_frequency == 0
trove, err := api.GetHomeTimeline("", should_do_following_only) trove, err := api.GetHomeTimeline("", should_do_following_only)
if err != nil && !errors.Is(err, scraper.END_OF_FEED) && !errors.Is(err, scraper.ErrRateLimited) { if err != nil && !errors.Is(err, scraper.END_OF_FEED) && !errors.Is(err, scraper.ErrRateLimited) {
@ -97,7 +98,7 @@ func (app *Application) start_background() {
likes_task := BackgroundTask{ likes_task := BackgroundTask{
Name: "user likes", Name: "user likes",
GetTroveFunc: func(api *scraper.API) scraper.TweetTrove { GetTroveFunc: func(api *scraper.API) TweetTrove {
trove, err := api.GetUserLikes(api.UserID, 50) // TODO: parameterizable trove, err := api.GetUserLikes(api.UserID, 50) // TODO: parameterizable
if err != nil && !errors.Is(err, scraper.END_OF_FEED) && !errors.Is(err, scraper.ErrRateLimited) { if err != nil && !errors.Is(err, scraper.END_OF_FEED) && !errors.Is(err, scraper.ErrRateLimited) {
panic(err) panic(err)
@ -112,8 +113,8 @@ func (app *Application) start_background() {
dms_task := BackgroundTask{ dms_task := BackgroundTask{
Name: "DM inbox", Name: "DM inbox",
GetTroveFunc: func(api *scraper.API) scraper.TweetTrove { GetTroveFunc: func(api *scraper.API) TweetTrove {
var trove scraper.TweetTrove var trove TweetTrove
var err error var err error
if inbox_cursor == "" { if inbox_cursor == "" {
trove, inbox_cursor, err = api.GetInbox(0) trove, inbox_cursor, err = api.GetInbox(0)
@ -133,7 +134,7 @@ func (app *Application) start_background() {
notifications_task := BackgroundTask{ notifications_task := BackgroundTask{
Name: "DM inbox", Name: "DM inbox",
GetTroveFunc: func(api *scraper.API) scraper.TweetTrove { GetTroveFunc: func(api *scraper.API) TweetTrove {
trove, last_unread_notification_sort_index, err := api.GetNotifications(1) // Just 1 page trove, last_unread_notification_sort_index, err := api.GetNotifications(1) // Just 1 page
if err != nil && !errors.Is(err, scraper.END_OF_FEED) && !errors.Is(err, scraper.ErrRateLimited) { if err != nil && !errors.Is(err, scraper.END_OF_FEED) && !errors.Is(err, scraper.ErrRateLimited) {
panic(err) panic(err)
@ -150,7 +151,7 @@ func (app *Application) start_background() {
bookmarks_task := BackgroundTask{ bookmarks_task := BackgroundTask{
Name: "bookmarks", Name: "bookmarks",
GetTroveFunc: func(api *scraper.API) scraper.TweetTrove { GetTroveFunc: func(api *scraper.API) TweetTrove {
trove, err := app.API.GetBookmarks(10) trove, err := app.API.GetBookmarks(10)
if err != nil && !errors.Is(err, scraper.END_OF_FEED) && !errors.Is(err, scraper.ErrRateLimited) { if err != nil && !errors.Is(err, scraper.END_OF_FEED) && !errors.Is(err, scraper.ErrRateLimited) {
panic(err) panic(err)
@ -165,7 +166,7 @@ func (app *Application) start_background() {
own_profile_task := BackgroundTask{ own_profile_task := BackgroundTask{
Name: "user profile", Name: "user profile",
GetTroveFunc: func(api *scraper.API) scraper.TweetTrove { GetTroveFunc: func(api *scraper.API) TweetTrove {
trove, err := app.API.GetUserFeed(api.UserID, 1) trove, err := app.API.GetUserFeed(api.UserID, 1)
if err != nil && !errors.Is(err, scraper.END_OF_FEED) && !errors.Is(err, scraper.ErrRateLimited) { if err != nil && !errors.Is(err, scraper.END_OF_FEED) && !errors.Is(err, scraper.ErrRateLimited) {
panic(err) panic(err)

View File

@ -4,7 +4,8 @@ import (
"errors" "errors"
"fmt" "fmt"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper" . "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
"gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )
// DUPE: full_save_tweet_trove // DUPE: full_save_tweet_trove
@ -16,8 +17,8 @@ func (app *Application) full_save_tweet_trove(trove TweetTrove) {
for _, u_id := range conflicting_users { for _, u_id := range conflicting_users {
app.InfoLog.Printf("Conflicting user handle found (ID %d); old user has been marked deleted. Rescraping manually", u_id) app.InfoLog.Printf("Conflicting user handle found (ID %d); old user has been marked deleted. Rescraping manually", u_id)
// Rescrape // Rescrape
updated_user, err := GetUserByID(u_id) updated_user, err := scraper.GetUserByID(u_id)
if errors.Is(err, ErrDoesntExist) { if errors.Is(err, scraper.ErrDoesntExist) {
// Mark them as deleted. // Mark them as deleted.
// Handle and display name won't be updated if the user exists. // Handle and display name won't be updated if the user exists.
updated_user = User{ID: u_id, DisplayName: "<Unknown User>", Handle: "<UNKNOWN USER>", IsDeleted: true} updated_user = User{ID: u_id, DisplayName: "<Unknown User>", Handle: "<UNKNOWN USER>", IsDeleted: true}

View File

@ -1,4 +1,4 @@
package scraper package persistence
type BookmarkSortID int64 type BookmarkSortID int64

View File

@ -2,8 +2,6 @@ package persistence
import ( import (
"fmt" "fmt"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )
func (p Profile) SaveBookmark(l Bookmark) error { func (p Profile) SaveBookmark(l Bookmark) error {

View File

@ -6,7 +6,6 @@ import (
"strings" "strings"
"github.com/jmoiron/sqlx" "github.com/jmoiron/sqlx"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )
var ( var (

View File

@ -7,7 +7,6 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence" . "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )
// A feed should load // A feed should load

View File

@ -6,8 +6,6 @@ import (
"strconv" "strconv"
"strings" "strings"
"time" "time"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )
type SortOrder int type SortOrder int

View File

@ -9,7 +9,6 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence" . "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )
func TestTokenizeSearchString(t *testing.T) { func TestTokenizeSearchString(t *testing.T) {

View File

@ -9,7 +9,6 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence" . "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )
// Use a cursor, sort by newest // Use a cursor, sort by newest

View File

@ -1,4 +1,4 @@
package scraper package persistence
type DMChatRoomID string type DMChatRoomID string

View File

@ -1,4 +1,4 @@
package scraper package persistence
type DMMessageID int type DMMessageID int

View File

@ -7,8 +7,6 @@ import (
"strings" "strings"
"github.com/jmoiron/sqlx" "github.com/jmoiron/sqlx"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )
const ( const (

View File

@ -9,7 +9,6 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence" . "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )
func TestSaveAndLoadChatRoom(t *testing.T) { func TestSaveAndLoadChatRoom(t *testing.T) {

14
pkg/persistence/errors.go Normal file
View File

@ -0,0 +1,14 @@
package persistence
import (
"errors"
)
// Downloader errors
var (
ErrorDMCA = errors.New("video is DMCAed, unable to download (HTTP 403 Forbidden)")
ErrMediaDownload404 = errors.New("media download HTTP 404")
// TODO: this DEFINITELY does not belong here
ErrRequestTimeout = errors.New("request timed out")
)

View File

@ -1,9 +1,5 @@
package persistence package persistence
import (
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
)
func (p Profile) SaveFollow(follower_id UserID, followee_id UserID) { func (p Profile) SaveFollow(follower_id UserID, followee_id UserID) {
_, err := p.DB.Exec(` _, err := p.DB.Exec(`
insert into follows (follower_id, followee_id) insert into follows (follower_id, followee_id)

View File

@ -7,7 +7,6 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence" . "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )
func TestSaveAndLoadFollows(t *testing.T) { func TestSaveAndLoadFollows(t *testing.T) {

View File

@ -1,4 +1,4 @@
package scraper package persistence
type ImageID int64 type ImageID int64

View File

@ -1,4 +1,4 @@
package scraper package persistence
type LikeSortID int64 type LikeSortID int64

View File

@ -2,8 +2,6 @@ package persistence
import ( import (
"fmt" "fmt"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )
func (p Profile) SaveLike(l Like) error { func (p Profile) SaveLike(l Like) error {

View File

@ -1,4 +1,4 @@
package scraper package persistence
type ListID int64 type ListID int64
type OnlineListID int64 type OnlineListID int64

View File

@ -4,8 +4,6 @@ import (
"database/sql" "database/sql"
"errors" "errors"
"fmt" "fmt"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )
// Create an empty list, or rename an existing list // Create an empty list, or rename an existing list

View File

@ -10,7 +10,6 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence" . "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )
func TestSaveAndLoadOfflineList(t *testing.T) { func TestSaveAndLoadOfflineList(t *testing.T) {

View File

@ -5,8 +5,6 @@ import (
"fmt" "fmt"
"os" "os"
"path/filepath" "path/filepath"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )
type MediaDownloader interface { type MediaDownloader interface {

View File

@ -6,7 +6,7 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper" . "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
) )
// Some types to spy on a MediaDownloader // Some types to spy on a MediaDownloader

View File

@ -2,8 +2,6 @@ package persistence
import ( import (
"fmt" "fmt"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )
// Save an Image // Save an Image

View File

@ -8,7 +8,7 @@ import (
"github.com/go-test/deep" "github.com/go-test/deep"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper" . "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
) )
// Create an Image, save it, reload it, and make sure it comes back the same // Create an Image, save it, reload it, and make sure it comes back the same

View File

@ -1,4 +1,4 @@
package scraper package persistence
type NotificationID string type NotificationID string

View File

@ -4,8 +4,6 @@ import (
"database/sql" "database/sql"
"errors" "errors"
"fmt" "fmt"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )
func (p Profile) SaveNotification(n Notification) { func (p Profile) SaveNotification(n Notification) {

View File

@ -8,7 +8,6 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence" . "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )
func TestSaveAndLoadNotification(t *testing.T) { func TestSaveAndLoadNotification(t *testing.T) {

View File

@ -1,4 +1,4 @@
package scraper package persistence
import ( import (
"time" "time"

View File

@ -8,8 +8,6 @@ import (
sql "github.com/jmoiron/sqlx" sql "github.com/jmoiron/sqlx"
_ "github.com/mattn/go-sqlite3" _ "github.com/mattn/go-sqlite3"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )
//go:embed schema.sql //go:embed schema.sql

View File

@ -1,4 +1,4 @@
package scraper package persistence
type Retweet struct { type Retweet struct {
RetweetID TweetID `db:"retweet_id"` RetweetID TweetID `db:"retweet_id"`

View File

@ -2,8 +2,6 @@ package persistence
import ( import (
"fmt" "fmt"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )
// Save a Retweet. Do nothing if it already exists, because none of its parameters are modifiable. // Save a Retweet. Do nothing if it already exists, because none of its parameters are modifiable.

View File

@ -6,8 +6,6 @@ import (
"os" "os"
log "github.com/sirupsen/logrus" log "github.com/sirupsen/logrus"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )
func (p Profile) SaveSession(userhandle UserHandle, data []byte) { func (p Profile) SaveSession(userhandle UserHandle, data []byte) {

View File

@ -1,4 +1,4 @@
package scraper package persistence
import ( import (
"fmt" "fmt"

View File

@ -4,8 +4,6 @@ import (
"database/sql" "database/sql"
"errors" "errors"
"fmt" "fmt"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )
type SpaceParticipant struct { type SpaceParticipant struct {

View File

@ -9,7 +9,7 @@ import (
"github.com/go-test/deep" "github.com/go-test/deep"
"math/rand" "math/rand"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper" . "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
) )
// Create a Space, save it, reload it, and make sure it comes back the same // Create a Space, save it, reload it, and make sure it comes back the same

View File

@ -1,11 +1,11 @@
package scraper_test package persistence_test
import ( import (
"testing" "testing"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper" . "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
) )
func TestFormatSpaceDuration(t *testing.T) { func TestFormatSpaceDuration(t *testing.T) {

View File

@ -1,4 +1,4 @@
package scraper package persistence
import ( import (
"database/sql/driver" "database/sql/driver"

View File

@ -1,4 +1,4 @@
package scraper package persistence
import ( import (
"database/sql/driver" "database/sql/driver"
@ -46,9 +46,9 @@ type Tweet struct {
User *User `db:"user"` User *User `db:"user"`
// For processing tombstones // For processing tombstones
UserHandle UserHandle UserHandle UserHandle
in_reply_to_user_handle UserHandle InReplyToUserHandle UserHandle
in_reply_to_user_id UserID InReplyToUserID UserID
Images []Image Images []Image
Videos []Video Videos []Video

View File

@ -4,8 +4,6 @@ import (
"database/sql" "database/sql"
"errors" "errors"
"fmt" "fmt"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )
func (p Profile) SaveTweet(t Tweet) error { func (p Profile) SaveTweet(t Tweet) error {

View File

@ -9,7 +9,6 @@ import (
"github.com/go-test/deep" "github.com/go-test/deep"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence" . "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )
// Create a Tweet, save it, reload it, and make sure it comes back the same // Create a Tweet, save it, reload it, and make sure it comes back the same

View File

@ -1,4 +1,4 @@
package scraper package persistence
import ( import (
"fmt" "fmt"

View File

@ -4,8 +4,6 @@ import (
"errors" "errors"
"fmt" "fmt"
"path" "path"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )
// Convenience function that saves all the objects in a TweetTrove. // Convenience function that saves all the objects in a TweetTrove.

View File

@ -1,4 +1,4 @@
package scraper_test package persistence_test
import ( import (
"testing" "testing"
@ -6,7 +6,7 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper" . "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
) )
func TestMergeTweetTroves(t *testing.T) { func TestMergeTweetTroves(t *testing.T) {

View File

@ -1,4 +1,4 @@
package scraper package persistence
import ( import (
"net/url" "net/url"

View File

@ -1,4 +1,4 @@
package scraper package persistence
import ( import (
"fmt" "fmt"
@ -80,32 +80,6 @@ func GetUnknownUserWithHandle(handle UserHandle) User {
} }
} }
/**
* Make a filename for the profile image, that hopefully won't clobber other ones
*/
func (u User) compute_profile_image_local_path() string {
return string(u.Handle) + "_profile_" + path.Base(u.ProfileImageUrl)
}
/**
* Make a filename for the banner image, that hopefully won't clobber other ones.
* Add a file extension if necessary (seems to be necessary).
* If there is no banner image, just return nothing.
*/
func (u User) compute_banner_image_local_path() string {
if u.BannerImageUrl == "" {
return ""
}
base_name := path.Base(u.BannerImageUrl)
// Check if it has an extension (e.g., ".png" or ".jpeg")
if !regexp.MustCompile(`\.\w{2,4}$`).MatchString(base_name) {
// If it doesn't have an extension, add one
base_name += ".jpg"
}
return string(u.Handle) + "_banner_" + base_name
}
/** /**
* Get the URL where we would expect to find a User's tiny profile image * Get the URL where we would expect to find a User's tiny profile image
*/ */

View File

@ -8,8 +8,6 @@ import (
"github.com/jmoiron/sqlx" "github.com/jmoiron/sqlx"
"github.com/mattn/go-sqlite3" "github.com/mattn/go-sqlite3"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )
type ErrConflictingUserHandle struct { type ErrConflictingUserHandle struct {

View File

@ -12,7 +12,6 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence" . "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )
// Create a user, save it, reload it, and make sure it comes back the same // Create a user, save it, reload it, and make sure it comes back the same

View File

@ -6,7 +6,6 @@ import (
"time" "time"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence" . "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )
// Load a test profile, or create it if it doesn't exist. // Load a test profile, or create it if it doesn't exist.

View File

@ -8,7 +8,6 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence" . "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )
func TestVersionUpgrade(t *testing.T) { func TestVersionUpgrade(t *testing.T) {

View File

@ -1,4 +1,4 @@
package scraper package persistence
type VideoID int64 type VideoID int64

View File

@ -11,12 +11,9 @@ var (
EXTERNAL_API_ERROR = errors.New("Unexpected result from external API") EXTERNAL_API_ERROR = errors.New("Unexpected result from external API")
ErrorIsTombstone = errors.New("tweet is a tombstone") ErrorIsTombstone = errors.New("tweet is a tombstone")
ErrRateLimited = errors.New("rate limited") ErrRateLimited = errors.New("rate limited")
ErrorDMCA = errors.New("video is DMCAed, unable to download (HTTP 403 Forbidden)")
ErrMediaDownload404 = errors.New("media download HTTP 404")
ErrLoginRequired = errors.New("login required; please provide `--session <user>` flag") ErrLoginRequired = errors.New("login required; please provide `--session <user>` flag")
ErrSessionInvalidated = errors.New("session invalidated by Twitter") ErrSessionInvalidated = errors.New("session invalidated by Twitter")
// These are not API errors, but network errors generally // These are not API errors, but network errors generally
ErrNoInternet = errors.New("no internet connection") ErrNoInternet = errors.New("no internet connection")
ErrRequestTimeout = errors.New("request timed out")
) )

View File

@ -3,6 +3,8 @@ package scraper
import ( import (
"encoding/json" "encoding/json"
"net/url" "net/url"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
) )
type GraphqlVariables struct { type GraphqlVariables struct {

View File

@ -4,6 +4,8 @@ import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"strings" "strings"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
) )
const LOGIN_URL = "https://twitter.com/i/api/1.1/onboarding/task.json" const LOGIN_URL = "https://twitter.com/i/api/1.1/onboarding/task.json"

View File

@ -14,6 +14,8 @@ import (
"time" "time"
log "github.com/sirupsen/logrus" log "github.com/sirupsen/logrus"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
) )
type API struct { type API struct {

View File

@ -8,6 +8,7 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper" . "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )

View File

@ -5,6 +5,8 @@ import (
"fmt" "fmt"
log "github.com/sirupsen/logrus" log "github.com/sirupsen/logrus"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
) )
func (api *API) FillSpaceDetails(trove *TweetTrove) error { func (api *API) FillSpaceDetails(trove *TweetTrove) error {

View File

@ -12,6 +12,8 @@ import (
"strconv" "strconv"
"strings" "strings"
"time" "time"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
) )
// ------------------------------------------------------------------------- // -------------------------------------------------------------------------
@ -533,8 +535,8 @@ func ParseSingleTweet(t APITweet) (ret Tweet, err error) {
ret.IsConversationScraped = false // Safe due to the "No Worsening" principle ret.IsConversationScraped = false // Safe due to the "No Worsening" principle
// Extra data that can help piece together tombstoned tweet info // Extra data that can help piece together tombstoned tweet info
ret.in_reply_to_user_id = UserID(t.InReplyToUserID) ret.InReplyToUserID = UserID(t.InReplyToUserID)
ret.in_reply_to_user_handle = UserHandle(t.InReplyToScreenName) ret.InReplyToUserHandle = UserHandle(t.InReplyToScreenName)
return return
} }
@ -658,8 +660,8 @@ func ParseSingleUser(apiUser APIUser) (ret User, err error) {
} }
ret.BannerImageUrl = apiUser.ProfileBannerURL ret.BannerImageUrl = apiUser.ProfileBannerURL
ret.ProfileImageLocalPath = ret.compute_profile_image_local_path() ret.ProfileImageLocalPath = compute_profile_image_local_path(ret)
ret.BannerImageLocalPath = ret.compute_banner_image_local_path() ret.BannerImageLocalPath = compute_banner_image_local_path(ret)
if len(apiUser.PinnedTweetIdsStr) > 0 { if len(apiUser.PinnedTweetIdsStr) > 0 {
ret.PinnedTweetID = TweetID(idstr_to_int(apiUser.PinnedTweetIdsStr[0])) ret.PinnedTweetID = TweetID(idstr_to_int(apiUser.PinnedTweetIdsStr[0]))

View File

@ -9,6 +9,8 @@ import (
"strings" "strings"
"github.com/google/uuid" "github.com/google/uuid"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
) )
type APIDMReaction struct { type APIDMReaction struct {

View File

@ -8,6 +8,7 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper" . "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )

View File

@ -2,6 +2,8 @@ package scraper
import ( import (
"net/url" "net/url"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
) )
func (api *API) GetFolloweesPage(user_id UserID, cursor string) (APIV2Response, error) { func (api *API) GetFolloweesPage(user_id UserID, cursor string) (APIV2Response, error) {

View File

@ -10,6 +10,8 @@ import (
"time" "time"
log "github.com/sirupsen/logrus" log "github.com/sirupsen/logrus"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
) )
// TODO: pagination // TODO: pagination

View File

@ -9,6 +9,7 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper" . "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )

View File

@ -4,6 +4,8 @@ import (
"errors" "errors"
"fmt" "fmt"
"strings" "strings"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
) )
var AlreadyLikedThisTweet error = errors.New("already liked this tweet") var AlreadyLikedThisTweet error = errors.New("already liked this tweet")

View File

@ -3,6 +3,8 @@ package scraper
import ( import (
"fmt" "fmt"
"net/url" "net/url"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
) )
type SpaceResponse struct { type SpaceResponse struct {

View File

@ -8,6 +8,7 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper" . "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )

View File

@ -11,6 +11,7 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper" . "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )

View File

@ -4,6 +4,10 @@ import (
"errors" "errors"
"fmt" "fmt"
"net/url" "net/url"
"path/filepath"
"regexp"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
) )
type UserResponse struct { type UserResponse struct {
@ -177,3 +181,25 @@ func (api API) GetUserByID(u_id UserID) (User, error) {
} }
return ParseSingleUser(apiUser) return ParseSingleUser(apiUser)
} }
// Make a filename for the profile image, that hopefully won't clobber other ones
func compute_profile_image_local_path(u User) string {
return string(u.Handle) + "_profile_" + filepath.Base(u.ProfileImageUrl)
}
// Make a filename for the banner image, that hopefully won't clobber other ones.
// Add a file extension if necessary (seems to be necessary).
// If there is no banner image, just return nothing.
func compute_banner_image_local_path(u User) string {
if u.BannerImageUrl == "" {
return ""
}
base_name := filepath.Base(u.BannerImageUrl)
// Check if it has an extension (e.g., ".png" or ".jpeg")
if !regexp.MustCompile(`\.\w{2,4}$`).MatchString(base_name) {
// If it doesn't have an extension, add one
base_name += ".jpg"
}
return string(u.Handle) + "_banner_" + base_name
}

View File

@ -8,6 +8,7 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper" . "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )

View File

@ -10,6 +10,8 @@ import (
"time" "time"
log "github.com/sirupsen/logrus" log "github.com/sirupsen/logrus"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
) )
type CardValue struct { type CardValue struct {
@ -777,10 +779,10 @@ func (api_response APIV2Response) ToTweetTrove() (TweetTrove, error) {
panic(fmt.Sprintf("Tombstoned tweet has no ID (should be %d)", tweet.InReplyToID)) panic(fmt.Sprintf("Tombstoned tweet has no ID (should be %d)", tweet.InReplyToID))
} }
// Fill out the replied tweet's UserID using this tweet's "in_reply_to_user_id". // Fill out the replied tweet's UserID using this tweet's "InReplyToUserID".
// If this tweet doesn't have it (i.e., this tweet is also a tombstone), create a fake user instead, and add it to the tweet trove. // If this tweet doesn't have it (i.e., this tweet is also a tombstone), create a fake user instead, and add it to the tweet trove.
if replied_tweet.UserID == 0 || replied_tweet.UserID == GetUnknownUser().ID { if replied_tweet.UserID == 0 || replied_tweet.UserID == GetUnknownUser().ID {
replied_tweet.UserID = tweet.in_reply_to_user_id replied_tweet.UserID = tweet.InReplyToUserID
if replied_tweet.UserID == 0 || replied_tweet.UserID == GetUnknownUser().ID { if replied_tweet.UserID == 0 || replied_tweet.UserID == GetUnknownUser().ID {
fake_user := GetUnknownUser() fake_user := GetUnknownUser()
ret.Users[fake_user.ID] = fake_user ret.Users[fake_user.ID] = fake_user
@ -793,7 +795,7 @@ func (api_response APIV2Response) ToTweetTrove() (TweetTrove, error) {
existing_user = User{ID: replied_tweet.UserID} existing_user = User{ID: replied_tweet.UserID}
} }
if existing_user.Handle == "" { if existing_user.Handle == "" {
existing_user.Handle = tweet.in_reply_to_user_handle existing_user.Handle = tweet.InReplyToUserHandle
} }
ret.Users[replied_tweet.UserID] = existing_user ret.Users[replied_tweet.UserID] = existing_user
ret.TombstoneUsers = append(ret.TombstoneUsers, existing_user.Handle) ret.TombstoneUsers = append(ret.TombstoneUsers, existing_user.Handle)

View File

@ -9,6 +9,7 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper" . "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )

View File

@ -12,6 +12,7 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper" . "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )

View File

@ -6,6 +6,8 @@ import (
"net/url" "net/url"
"regexp" "regexp"
"time" "time"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/persistence"
) )
/** /**