Enable 'wrapcheck' linter
This commit is contained in:
parent
8261087103
commit
f08da27cc2
@ -23,7 +23,7 @@ linters:
|
||||
- unused
|
||||
- varcheck
|
||||
- whitespace
|
||||
# - wrapcheck
|
||||
- wrapcheck
|
||||
- lll
|
||||
- godox
|
||||
- errorlint
|
||||
@ -533,17 +533,11 @@ linters-settings:
|
||||
# multi-if: false # Enforces newlines (or comments) after every multi-line if statement
|
||||
# multi-func: false # Enforces newlines (or comments) after every multi-line function signature
|
||||
|
||||
# wrapcheck:
|
||||
# # An array of strings that specify substrings of signatures to ignore.
|
||||
# # If this set, it will override the default set of ignored signatures.
|
||||
# # See https://github.com/tomarrell/wrapcheck#configuration for more information.
|
||||
# ignoreSigs:
|
||||
# - .Errorf(
|
||||
# - errors.New(
|
||||
# - errors.Unwrap(
|
||||
# - .Wrap(
|
||||
# - .Wrapf(
|
||||
# - .WithMessage(
|
||||
wrapcheck:
|
||||
# An array of strings that specify substrings of signatures to ignore.
|
||||
# If this set, it will override the default set of ignored signatures.
|
||||
# See https://github.com/tomarrell/wrapcheck#configuration for more information.
|
||||
ignoreSigs:
|
||||
|
||||
# # The custom section can be used to define linter plugins to be loaded at runtime.
|
||||
# # See README doc for more info.
|
||||
|
@ -28,7 +28,7 @@ func (d DefaultDownloader) Curl(url string, outpath string) error {
|
||||
println(url)
|
||||
resp, err := http.Get(url)
|
||||
if err != nil {
|
||||
return err
|
||||
return fmt.Errorf("Error executing HTTP GET(%q):\n %w", url, err)
|
||||
}
|
||||
if resp.StatusCode != 200 {
|
||||
return fmt.Errorf("Error %s: %s", url, resp.Status)
|
||||
@ -53,7 +53,7 @@ func (p Profile) download_tweet_image(img *scraper.Image, downloader MediaDownlo
|
||||
outfile := path.Join(p.ProfileDir, "images", img.LocalFilename)
|
||||
err := downloader.Curl(img.RemoteURL, outfile)
|
||||
if err != nil {
|
||||
return err
|
||||
return fmt.Errorf("Error downloading tweet image (TweetID %d):\n %w", img.TweetID, err)
|
||||
}
|
||||
img.IsDownloaded = true
|
||||
return p.SaveImage(*img)
|
||||
@ -67,14 +67,14 @@ func (p Profile) download_tweet_video(v *scraper.Video, downloader MediaDownload
|
||||
outfile := path.Join(p.ProfileDir, "videos", v.LocalFilename)
|
||||
err := downloader.Curl(v.RemoteURL, outfile)
|
||||
if err != nil {
|
||||
return err
|
||||
return fmt.Errorf("Error downloading video (TweetID %d):\n %w", v.TweetID, err)
|
||||
}
|
||||
|
||||
// Download the thumbnail
|
||||
outfile = path.Join(p.ProfileDir, "video_thumbnails", v.ThumbnailLocalPath)
|
||||
err = downloader.Curl(v.ThumbnailRemoteUrl, outfile)
|
||||
if err != nil {
|
||||
return err
|
||||
return fmt.Errorf("Error downloading video thumbnail (TweetID %d):\n %w", v.TweetID, err)
|
||||
}
|
||||
|
||||
v.IsDownloaded = true
|
||||
@ -89,7 +89,7 @@ func (p Profile) download_link_thumbnail(url *scraper.Url, downloader MediaDownl
|
||||
outfile := path.Join(p.ProfileDir, "link_preview_images", url.ThumbnailLocalPath)
|
||||
err := downloader.Curl(url.ThumbnailRemoteUrl, outfile)
|
||||
if err != nil {
|
||||
return err
|
||||
return fmt.Errorf("Error downloading link thumbnail (TweetID %d):\n %w", url.TweetID, err)
|
||||
}
|
||||
}
|
||||
url.IsContentDownloaded = true
|
||||
@ -166,7 +166,7 @@ func (p Profile) DownloadUserContentWithInjector(u *scraper.User, downloader Med
|
||||
|
||||
err := downloader.Curl(target_url, outfile)
|
||||
if err != nil {
|
||||
return err
|
||||
return fmt.Errorf("Error downloading profile image for user %q:\n %w", u.Handle, err)
|
||||
}
|
||||
|
||||
// Skip it if there's no banner image
|
||||
@ -179,7 +179,7 @@ func (p Profile) DownloadUserContentWithInjector(u *scraper.User, downloader Med
|
||||
err = downloader.Curl(u.BannerImageUrl+"/600x200", outfile)
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
return fmt.Errorf("Error downloading banner image for user %q:\n %w", u.Handle, err)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,8 @@
|
||||
package persistence
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"offline_twitter/scraper"
|
||||
)
|
||||
|
||||
@ -12,15 +14,18 @@ import (
|
||||
*/
|
||||
func (p Profile) SaveImage(img scraper.Image) error {
|
||||
_, err := p.DB.Exec(`
|
||||
insert into images (id, tweet_id, width, height, remote_url, local_filename, is_downloaded)
|
||||
values (?, ?, ?, ?, ?, ?, ?)
|
||||
on conflict do update
|
||||
set is_downloaded=(is_downloaded or ?)
|
||||
`,
|
||||
insert into images (id, tweet_id, width, height, remote_url, local_filename, is_downloaded)
|
||||
values (?, ?, ?, ?, ?, ?, ?)
|
||||
on conflict do update
|
||||
set is_downloaded=(is_downloaded or ?)
|
||||
`,
|
||||
img.ID, img.TweetID, img.Width, img.Height, img.RemoteURL, img.LocalFilename, img.IsDownloaded,
|
||||
img.IsDownloaded,
|
||||
)
|
||||
return err
|
||||
if err != nil {
|
||||
return fmt.Errorf("Error saving image (tweet ID %d):\n %w", img.TweetID, err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
/**
|
||||
@ -31,19 +36,22 @@ func (p Profile) SaveImage(img scraper.Image) error {
|
||||
*/
|
||||
func (p Profile) SaveVideo(vid scraper.Video) error {
|
||||
_, err := p.DB.Exec(`
|
||||
insert into videos (id, tweet_id, width, height, remote_url, local_filename, thumbnail_remote_url, thumbnail_local_filename,
|
||||
duration, view_count, is_downloaded, is_gif)
|
||||
values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
on conflict do update
|
||||
set is_downloaded=(is_downloaded or ?),
|
||||
view_count=max(view_count, ?)
|
||||
`,
|
||||
insert into videos (id, tweet_id, width, height, remote_url, local_filename, thumbnail_remote_url, thumbnail_local_filename,
|
||||
duration, view_count, is_downloaded, is_gif)
|
||||
values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
on conflict do update
|
||||
set is_downloaded=(is_downloaded or ?),
|
||||
view_count=max(view_count, ?)
|
||||
`,
|
||||
vid.ID, vid.TweetID, vid.Width, vid.Height, vid.RemoteURL, vid.LocalFilename, vid.ThumbnailRemoteUrl, vid.ThumbnailLocalPath,
|
||||
vid.Duration, vid.ViewCount, vid.IsDownloaded, vid.IsGif,
|
||||
|
||||
vid.IsDownloaded, vid.ViewCount,
|
||||
)
|
||||
return err
|
||||
if err != nil {
|
||||
return fmt.Errorf("Error saving video (tweet ID %d):\n %w", vid.TweetID, err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
/**
|
||||
@ -51,18 +59,21 @@ func (p Profile) SaveVideo(vid scraper.Video) error {
|
||||
*/
|
||||
func (p Profile) SaveUrl(url scraper.Url) error {
|
||||
_, err := p.DB.Exec(`
|
||||
insert into urls (tweet_id, domain, text, short_text, title, description, creator_id, site_id, thumbnail_width, thumbnail_height,
|
||||
thumbnail_remote_url, thumbnail_local_path, has_card, has_thumbnail, is_content_downloaded)
|
||||
values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
on conflict do update
|
||||
set is_content_downloaded=(is_content_downloaded or ?)
|
||||
`,
|
||||
insert into urls (tweet_id, domain, text, short_text, title, description, creator_id, site_id, thumbnail_width, thumbnail_height,
|
||||
thumbnail_remote_url, thumbnail_local_path, has_card, has_thumbnail, is_content_downloaded)
|
||||
values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
on conflict do update
|
||||
set is_content_downloaded=(is_content_downloaded or ?)
|
||||
`,
|
||||
url.TweetID, url.Domain, url.Text, url.ShortText, url.Title, url.Description, url.CreatorID, url.SiteID, url.ThumbnailWidth,
|
||||
url.ThumbnailHeight, url.ThumbnailRemoteUrl, url.ThumbnailLocalPath, url.HasCard, url.HasThumbnail, url.IsContentDownloaded,
|
||||
|
||||
url.IsContentDownloaded,
|
||||
)
|
||||
return err
|
||||
if err != nil {
|
||||
return fmt.Errorf("Error saving Url (tweet ID %d):\n %w", url.TweetID, err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
/**
|
||||
@ -70,22 +81,25 @@ func (p Profile) SaveUrl(url scraper.Url) error {
|
||||
*/
|
||||
func (p Profile) SavePoll(poll scraper.Poll) error {
|
||||
_, err := p.DB.Exec(`
|
||||
insert into polls (id, tweet_id, num_choices, choice1, choice1_votes, choice2, choice2_votes, choice3, choice3_votes, choice4,
|
||||
choice4_votes, voting_duration, voting_ends_at, last_scraped_at)
|
||||
values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
on conflict do update
|
||||
set choice1_votes=?,
|
||||
choice2_votes=?,
|
||||
choice3_votes=?,
|
||||
choice4_votes=?,
|
||||
last_scraped_at=?
|
||||
`,
|
||||
insert into polls (id, tweet_id, num_choices, choice1, choice1_votes, choice2, choice2_votes, choice3, choice3_votes, choice4,
|
||||
choice4_votes, voting_duration, voting_ends_at, last_scraped_at)
|
||||
values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
on conflict do update
|
||||
set choice1_votes=?,
|
||||
choice2_votes=?,
|
||||
choice3_votes=?,
|
||||
choice4_votes=?,
|
||||
last_scraped_at=?
|
||||
`,
|
||||
poll.ID, poll.TweetID, poll.NumChoices, poll.Choice1, poll.Choice1_Votes, poll.Choice2, poll.Choice2_Votes, poll.Choice3,
|
||||
poll.Choice3_Votes, poll.Choice4, poll.Choice4_Votes, poll.VotingDuration, poll.VotingEndsAt, poll.LastUpdatedAt,
|
||||
|
||||
poll.Choice1_Votes, poll.Choice2_Votes, poll.Choice3_Votes, poll.Choice4_Votes, poll.LastUpdatedAt,
|
||||
)
|
||||
return err
|
||||
if err != nil {
|
||||
return fmt.Errorf("Error saving Poll (tweet ID %d):\n %w", poll.TweetID, err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
/**
|
||||
@ -93,8 +107,8 @@ func (p Profile) SavePoll(poll scraper.Poll) error {
|
||||
*/
|
||||
func (p Profile) GetImagesForTweet(t scraper.Tweet) (imgs []scraper.Image, err error) {
|
||||
err = p.DB.Select(&imgs,
|
||||
"select id, tweet_id, width, height, remote_url, local_filename, is_downloaded from images where tweet_id=?",
|
||||
t.ID)
|
||||
"select id, tweet_id, width, height, remote_url, local_filename, is_downloaded from images where tweet_id=?",
|
||||
t.ID)
|
||||
return
|
||||
}
|
||||
|
||||
@ -103,12 +117,12 @@ func (p Profile) GetImagesForTweet(t scraper.Tweet) (imgs []scraper.Image, err e
|
||||
*/
|
||||
func (p Profile) GetVideosForTweet(t scraper.Tweet) (vids []scraper.Video, err error) {
|
||||
err = p.DB.Select(&vids, `
|
||||
select id, tweet_id, width, height, remote_url, local_filename, thumbnail_remote_url, thumbnail_local_filename, duration,
|
||||
view_count, is_downloaded, is_gif
|
||||
from videos
|
||||
where tweet_id = ?
|
||||
`, t.ID)
|
||||
return
|
||||
select id, tweet_id, width, height, remote_url, local_filename, thumbnail_remote_url, thumbnail_local_filename, duration,
|
||||
view_count, is_downloaded, is_gif
|
||||
from videos
|
||||
where tweet_id = ?
|
||||
`, t.ID)
|
||||
return
|
||||
}
|
||||
|
||||
/**
|
||||
@ -116,13 +130,13 @@ func (p Profile) GetVideosForTweet(t scraper.Tweet) (vids []scraper.Video, err e
|
||||
*/
|
||||
func (p Profile) GetUrlsForTweet(t scraper.Tweet) (urls []scraper.Url, err error) {
|
||||
err = p.DB.Select(&urls, `
|
||||
select tweet_id, domain, text, short_text, title, description, creator_id, site_id, thumbnail_width, thumbnail_height,
|
||||
thumbnail_remote_url, thumbnail_local_path, has_card, has_thumbnail, is_content_downloaded
|
||||
from urls
|
||||
where tweet_id = ?
|
||||
order by rowid
|
||||
`, t.ID)
|
||||
return
|
||||
select tweet_id, domain, text, short_text, title, description, creator_id, site_id, thumbnail_width, thumbnail_height,
|
||||
thumbnail_remote_url, thumbnail_local_path, has_card, has_thumbnail, is_content_downloaded
|
||||
from urls
|
||||
where tweet_id = ?
|
||||
order by rowid
|
||||
`, t.ID)
|
||||
return
|
||||
}
|
||||
|
||||
/**
|
||||
@ -130,10 +144,10 @@ func (p Profile) GetUrlsForTweet(t scraper.Tweet) (urls []scraper.Url, err error
|
||||
*/
|
||||
func (p Profile) GetPollsForTweet(t scraper.Tweet) (polls []scraper.Poll, err error) {
|
||||
err = p.DB.Select(&polls, `
|
||||
select id, tweet_id, num_choices, choice1, choice1_votes, choice2, choice2_votes, choice3, choice3_votes, choice4, choice4_votes,
|
||||
voting_duration, voting_ends_at, last_scraped_at
|
||||
from polls
|
||||
where tweet_id = ?
|
||||
`, t.ID)
|
||||
return
|
||||
select id, tweet_id, num_choices, choice1, choice1_votes, choice2, choice2_votes, choice3, choice3_votes, choice4, choice4_votes,
|
||||
voting_duration, voting_ends_at, last_scraped_at
|
||||
from polls
|
||||
where tweet_id = ?
|
||||
`, t.ID)
|
||||
return
|
||||
}
|
||||
|
@ -5,6 +5,7 @@ import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path"
|
||||
|
||||
sql "github.com/jmoiron/sqlx"
|
||||
"github.com/jmoiron/sqlx/reflectx"
|
||||
_ "github.com/mattn/go-sqlite3"
|
||||
@ -51,7 +52,7 @@ func NewProfile(target_dir string) (Profile, error) {
|
||||
fmt.Printf("Creating new profile: %s\n", target_dir)
|
||||
err := os.Mkdir(target_dir, os.FileMode(0755))
|
||||
if err != nil {
|
||||
return Profile{}, err
|
||||
return Profile{}, fmt.Errorf("Error creating directory %q:\n %w", target_dir, err)
|
||||
}
|
||||
|
||||
// Create `twitter.db`
|
||||
@ -66,46 +67,46 @@ func NewProfile(target_dir string) (Profile, error) {
|
||||
settings := Settings{}
|
||||
data, err := yaml.Marshal(&settings)
|
||||
if err != nil {
|
||||
return Profile{}, err
|
||||
return Profile{}, fmt.Errorf("Error YAML-marshalling [empty!] settings file:\n %w", err)
|
||||
}
|
||||
err = os.WriteFile(settings_file, data, os.FileMode(0644))
|
||||
if err != nil {
|
||||
return Profile{}, err
|
||||
return Profile{}, fmt.Errorf("Error creating settings file %q:\n %w", settings_file, err)
|
||||
}
|
||||
|
||||
// Create `profile_images`
|
||||
fmt.Printf("Creating............. %s/\n", profile_images_dir)
|
||||
err = os.Mkdir(profile_images_dir, os.FileMode(0755))
|
||||
if err != nil {
|
||||
return Profile{}, err
|
||||
return Profile{}, fmt.Errorf("Error creating %q:\n %w", profile_images_dir, err)
|
||||
}
|
||||
|
||||
// Create `link_thumbnail_images`
|
||||
fmt.Printf("Creating............. %s/\n", link_thumbnails_dir)
|
||||
err = os.Mkdir(link_thumbnails_dir, os.FileMode(0755))
|
||||
if err != nil {
|
||||
return Profile{}, err
|
||||
return Profile{}, fmt.Errorf("Error creating %q:\n %w", link_thumbnails_dir, err)
|
||||
}
|
||||
|
||||
// Create `images`
|
||||
fmt.Printf("Creating............. %s/\n", images_dir)
|
||||
err = os.Mkdir(images_dir, os.FileMode(0755))
|
||||
if err != nil {
|
||||
return Profile{}, err
|
||||
return Profile{}, fmt.Errorf("Error creating %q:\n %w", images_dir, err)
|
||||
}
|
||||
|
||||
// Create `videos`
|
||||
fmt.Printf("Creating............. %s/\n", videos_dir)
|
||||
err = os.Mkdir(videos_dir, os.FileMode(0755))
|
||||
if err != nil {
|
||||
return Profile{}, err
|
||||
return Profile{}, fmt.Errorf("Error creating %q:\n %w", videos_dir, err)
|
||||
}
|
||||
|
||||
// Create `video_thumbnails`
|
||||
fmt.Printf("Creating............. %s/\n", video_thumbnails_dir)
|
||||
err = os.Mkdir(video_thumbnails_dir, os.FileMode(0755))
|
||||
if err != nil {
|
||||
return Profile{}, err
|
||||
return Profile{}, fmt.Errorf("Error creating %q:\n %w", video_thumbnails_dir, err)
|
||||
}
|
||||
|
||||
return Profile{target_dir, settings, db}, nil
|
||||
@ -135,12 +136,12 @@ func LoadProfile(profile_dir string) (Profile, error) {
|
||||
|
||||
settings_data, err := os.ReadFile(settings_file)
|
||||
if err != nil {
|
||||
return Profile{}, err
|
||||
return Profile{}, fmt.Errorf("Error reading %q:\n %w", settings_file, err)
|
||||
}
|
||||
settings := Settings{}
|
||||
err = yaml.Unmarshal(settings_data, &settings)
|
||||
if err != nil {
|
||||
return Profile{}, err
|
||||
return Profile{}, fmt.Errorf("Error YAML-unmarshalling %q:\n %w", settings_file, err)
|
||||
}
|
||||
|
||||
db := sql.MustOpen("sqlite3", fmt.Sprintf("%s?_foreign_keys=on&_journal_mode=WAL", sqlite_file))
|
||||
|
@ -1,6 +1,8 @@
|
||||
package persistence
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"offline_twitter/scraper"
|
||||
)
|
||||
|
||||
@ -15,7 +17,10 @@ func (p Profile) SaveRetweet(r scraper.Retweet) error {
|
||||
`,
|
||||
r.RetweetID, r.TweetID, r.RetweetedByID, r.RetweetedAt.Unix(),
|
||||
)
|
||||
return err
|
||||
if err != nil {
|
||||
return fmt.Errorf("Error executing SaveRetweet(%d):\n %w", r.RetweetID, err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
/**
|
||||
@ -28,5 +33,8 @@ func (p Profile) GetRetweetById(id scraper.TweetID) (scraper.Retweet, error) {
|
||||
from retweets
|
||||
where retweet_id = ?
|
||||
`, id)
|
||||
return r, err
|
||||
if err != nil {
|
||||
return r, fmt.Errorf("Error executing GetRetweetById(%d):\n %w", id, err)
|
||||
}
|
||||
return r, nil
|
||||
}
|
||||
|
@ -2,8 +2,9 @@ package persistence
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"strings"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"offline_twitter/scraper"
|
||||
)
|
||||
@ -37,7 +38,7 @@ func (p Profile) SaveTweet(t scraper.Tweet) error {
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
return fmt.Errorf("Error executing SaveTweet(ID %d):\n %w", t.ID, err)
|
||||
}
|
||||
for _, url := range t.Urls {
|
||||
err := p.SaveUrl(url)
|
||||
@ -60,7 +61,7 @@ func (p Profile) SaveTweet(t scraper.Tweet) error {
|
||||
for _, hashtag := range t.Hashtags {
|
||||
_, err := db.Exec("insert into hashtags (tweet_id, text) values (?, ?) on conflict do nothing", t.ID, hashtag)
|
||||
if err != nil {
|
||||
return err
|
||||
return fmt.Errorf("Error inserting hashtag %q on tweet ID %d:\n %w", hashtag, t.ID, err)
|
||||
}
|
||||
}
|
||||
for _, poll := range t.Polls {
|
||||
@ -72,7 +73,7 @@ func (p Profile) SaveTweet(t scraper.Tweet) error {
|
||||
|
||||
err = tx.Commit()
|
||||
if err != nil {
|
||||
return err
|
||||
return fmt.Errorf("Error committing SaveTweet transaction:\n %w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@ -104,7 +105,7 @@ func (p Profile) GetTweetById(id scraper.TweetID) (scraper.Tweet, error) {
|
||||
`)
|
||||
|
||||
if err != nil {
|
||||
return scraper.Tweet{}, err
|
||||
return scraper.Tweet{}, fmt.Errorf("Error preparing statement in GetTweetByID(%d):\n %w", id, err)
|
||||
}
|
||||
defer stmt.Close()
|
||||
|
||||
@ -118,7 +119,7 @@ func (p Profile) GetTweetById(id scraper.TweetID) (scraper.Tweet, error) {
|
||||
&t.QuotedTweetID, &mentions, &reply_mentions, &hashtags, &t.TombstoneType, &t.IsStub, &t.IsContentDownloaded,
|
||||
&t.IsConversationScraped, &t.LastScrapedAt)
|
||||
if err != nil {
|
||||
return t, err
|
||||
return t, fmt.Errorf("Error parsing result in GetTweetByID(%d):\n %w", id, err)
|
||||
}
|
||||
|
||||
t.Mentions = []scraper.UserHandle{}
|
||||
|
@ -1,9 +1,10 @@
|
||||
package persistence
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"errors"
|
||||
"database/sql"
|
||||
"errors"
|
||||
"fmt"
|
||||
|
||||
"offline_twitter/scraper"
|
||||
)
|
||||
|
||||
@ -59,7 +60,7 @@ func (p Profile) SaveUser(u *scraper.User) error {
|
||||
u.ProfileImageUrl, u.ProfileImageLocalPath, u.BannerImageUrl, u.BannerImageLocalPath, u.PinnedTweetID, u.IsContentDownloaded,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
return fmt.Errorf("Error executing SaveUser(%s):\n %w", u.Handle, err)
|
||||
}
|
||||
|
||||
return nil
|
||||
@ -140,7 +141,10 @@ func (p Profile) GetUserByID(id scraper.UserID) (scraper.User, error) {
|
||||
if errors.Is(err, sql.ErrNoRows) {
|
||||
return ret, ErrNotInDatabase{"User", id}
|
||||
}
|
||||
return ret, err
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -87,7 +87,7 @@ func (p Profile) GetDatabaseVersion() (int, error) {
|
||||
|
||||
err := row.Scan(&version)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
return 0, fmt.Errorf("Error checking database version:\n %w", err)
|
||||
}
|
||||
return version, nil
|
||||
}
|
||||
|
@ -20,12 +20,12 @@ func (api API) GetFeedFor(user_id UserID, cursor string) (TweetResponse, error)
|
||||
client := &http.Client{Timeout: 10 * time.Second}
|
||||
req, err := http.NewRequest("GET", fmt.Sprintf("%s%d.json", API_USER_TIMELINE_BASE_PATH, user_id), nil)
|
||||
if err != nil {
|
||||
return TweetResponse{}, err
|
||||
return TweetResponse{}, fmt.Errorf("Error initializing HTTP request for GetFeedFor(%d):\n %w", user_id, err)
|
||||
}
|
||||
|
||||
err = ApiRequestAddTokens(req)
|
||||
if err != nil {
|
||||
return TweetResponse{}, err
|
||||
return TweetResponse{}, fmt.Errorf("Error adding tokens to HTTP request:\n %w", err)
|
||||
}
|
||||
|
||||
ApiRequestAddAllParams(req)
|
||||
@ -36,7 +36,7 @@ func (api API) GetFeedFor(user_id UserID, cursor string) (TweetResponse, error)
|
||||
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return TweetResponse{}, err
|
||||
return TweetResponse{}, fmt.Errorf("Error executing HTTP request for GetFeedFor(%d):\n %w", user_id, err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
@ -54,13 +54,16 @@ func (api API) GetFeedFor(user_id UserID, cursor string) (TweetResponse, error)
|
||||
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return TweetResponse{}, err
|
||||
return TweetResponse{}, fmt.Errorf("Error reading response body for GetUserFeedFor(%d):\n %w", user_id, err)
|
||||
}
|
||||
log.Debug(string(body))
|
||||
|
||||
var response TweetResponse
|
||||
err = json.Unmarshal(body, &response)
|
||||
return response, err
|
||||
if err != nil {
|
||||
return response, fmt.Errorf("Error parsing API response for GetUserFeedFor(%d):\n %w", user_id, err)
|
||||
}
|
||||
return response, nil
|
||||
}
|
||||
|
||||
/**
|
||||
@ -104,17 +107,16 @@ func (api API) GetMoreTweetsFromFeed(user_id UserID, response *TweetResponse, mi
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
func (api API) GetTweet(id TweetID, cursor string) (TweetResponse, error) {
|
||||
client := &http.Client{Timeout: 10 * time.Second}
|
||||
req, err := http.NewRequest("GET", fmt.Sprintf("%s%d.json", API_CONVERSATION_BASE_PATH, id), nil)
|
||||
if err != nil {
|
||||
return TweetResponse{}, err
|
||||
return TweetResponse{}, fmt.Errorf("Error initializing HTTP request:\n %w", err)
|
||||
}
|
||||
|
||||
err = ApiRequestAddTokens(req)
|
||||
if err != nil {
|
||||
return TweetResponse{}, err
|
||||
return TweetResponse{}, fmt.Errorf("Error adding tokens to HTTP request:\n %w", err)
|
||||
}
|
||||
|
||||
ApiRequestAddAllParams(req)
|
||||
@ -124,7 +126,7 @@ func (api API) GetTweet(id TweetID, cursor string) (TweetResponse, error) {
|
||||
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return TweetResponse{}, err
|
||||
return TweetResponse{}, fmt.Errorf("Error executing HTTP request:\n %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
@ -138,13 +140,16 @@ func (api API) GetTweet(id TweetID, cursor string) (TweetResponse, error) {
|
||||
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return TweetResponse{}, err
|
||||
return TweetResponse{}, fmt.Errorf("Error reading HTTP request:\n %w", err)
|
||||
}
|
||||
log.Debug(string(body))
|
||||
|
||||
var response TweetResponse
|
||||
err = json.Unmarshal(body, &response)
|
||||
return response, err
|
||||
if err != nil {
|
||||
return response, fmt.Errorf("Error parsing API response for GetTweet(%d):\n %w", id, err)
|
||||
}
|
||||
return response, nil
|
||||
}
|
||||
|
||||
// Resend the request to get more replies if necessary
|
||||
@ -178,27 +183,26 @@ func UpdateQueryCursor(req *http.Request, new_cursor string, is_tweet bool) {
|
||||
req.URL.RawQuery = query.Encode()
|
||||
}
|
||||
|
||||
|
||||
func (api API) GetUser(handle UserHandle) (APIUser, error) {
|
||||
client := &http.Client{Timeout: 10 * time.Second}
|
||||
req, err := http.NewRequest(
|
||||
"GET",
|
||||
"https://api.twitter.com/graphql/4S2ihIKfF3xhp-ENxvUAfQ/UserByScreenName?variables=%7B%22screen_name%22%3A%22" + string(handle) +
|
||||
"https://api.twitter.com/graphql/4S2ihIKfF3xhp-ENxvUAfQ/UserByScreenName?variables=%7B%22screen_name%22%3A%22"+string(handle)+
|
||||
"%22%2C%22withHighlightedLabel%22%3Atrue%7D",
|
||||
nil)
|
||||
if err != nil {
|
||||
return APIUser{}, err
|
||||
return APIUser{}, fmt.Errorf("Error initializing HTTP request:\n %w", err)
|
||||
}
|
||||
err = ApiRequestAddTokens(req)
|
||||
if err != nil {
|
||||
return APIUser{}, err
|
||||
return APIUser{}, fmt.Errorf("Error adding tokens to HTTP request:\n %w", err)
|
||||
}
|
||||
|
||||
var response UserResponse
|
||||
for retries := 0; retries < 3; retries += 1 {
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return APIUser{}, err
|
||||
return APIUser{}, fmt.Errorf("Error executing HTTP request for GetUser(%s):\n %w", handle, err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
@ -214,13 +218,13 @@ func (api API) GetUser(handle UserHandle) (APIUser, error) {
|
||||
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return APIUser{}, err
|
||||
return APIUser{}, fmt.Errorf("Error retrieving API response to GetUser(%s):\n %w", handle, err)
|
||||
}
|
||||
log.Debug(string(body))
|
||||
|
||||
err = json.Unmarshal(body, &response)
|
||||
if err != nil {
|
||||
return APIUser{}, err
|
||||
return APIUser{}, fmt.Errorf("Error parsing API response to GetUser(%s):\n %w", handle, err)
|
||||
}
|
||||
|
||||
// Retry ONLY if the error is code 50 (random authentication failure), NOT on real errors
|
||||
@ -240,16 +244,16 @@ func (api API) Search(query string, cursor string) (TweetResponse, error) {
|
||||
client := &http.Client{Timeout: 10 * time.Second}
|
||||
req, err := http.NewRequest(
|
||||
"GET",
|
||||
"https://twitter.com/i/api/2/search/adaptive.json?count=50&spelling_corrections=1&query_source=typed_query&pc=1&q=" +
|
||||
"https://twitter.com/i/api/2/search/adaptive.json?count=50&spelling_corrections=1&query_source=typed_query&pc=1&q="+
|
||||
url.QueryEscape(query),
|
||||
nil)
|
||||
if err != nil {
|
||||
return TweetResponse{}, err
|
||||
return TweetResponse{}, fmt.Errorf("Error initializing HTTP request:\n %w", err)
|
||||
}
|
||||
|
||||
err = ApiRequestAddTokens(req)
|
||||
if err != nil {
|
||||
return TweetResponse{}, err
|
||||
return TweetResponse{}, fmt.Errorf("Error adding tokens to HTTP request:\n %w", err)
|
||||
}
|
||||
|
||||
ApiRequestAddAllParams(req)
|
||||
@ -261,7 +265,7 @@ func (api API) Search(query string, cursor string) (TweetResponse, error) {
|
||||
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return TweetResponse{}, err
|
||||
return TweetResponse{}, fmt.Errorf("Error executing HTTP request for Search(%q):\n %w", query, err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
@ -275,13 +279,16 @@ func (api API) Search(query string, cursor string) (TweetResponse, error) {
|
||||
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return TweetResponse{}, err
|
||||
return TweetResponse{}, fmt.Errorf("Error retrieving API response for Search(%q):\n %w", query, err)
|
||||
}
|
||||
// fmt.Println(string(body))
|
||||
|
||||
var response TweetResponse
|
||||
err = json.Unmarshal(body, &response)
|
||||
return response, err
|
||||
if err != nil {
|
||||
return response, fmt.Errorf("Error parsing API response to Search(%q):\n %w", query, err)
|
||||
}
|
||||
return response, nil
|
||||
}
|
||||
|
||||
func (api API) GetMoreTweetsFromSearch(query string, response *TweetResponse, max_results int) error {
|
||||
@ -311,10 +318,9 @@ func (api API) GetMoreTweetsFromSearch(query string, response *TweetResponse, ma
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
// Add Bearer token and guest token
|
||||
func ApiRequestAddTokens(req *http.Request) error {
|
||||
req.Header.Set("Authorization", "Bearer " + BEARER_TOKEN)
|
||||
req.Header.Set("Authorization", "Bearer "+BEARER_TOKEN)
|
||||
req.Header.Set("x-twitter-client-language", "en")
|
||||
|
||||
guestToken, err := GetGuestToken()
|
||||
|
@ -378,12 +378,12 @@ func (api API) GetGraphqlFeedFor(user_id UserID, cursor string) (APIV2Response,
|
||||
client := &http.Client{Timeout: 10 * time.Second}
|
||||
req, err := http.NewRequest("GET", get_graphql_user_timeline_url(user_id, cursor), nil)
|
||||
if err != nil {
|
||||
return APIV2Response{}, err
|
||||
return APIV2Response{}, fmt.Errorf("Error initializing HTTP request:\n %w", err)
|
||||
}
|
||||
|
||||
err = ApiRequestAddTokens(req)
|
||||
if err != nil {
|
||||
return APIV2Response{}, err
|
||||
return APIV2Response{}, fmt.Errorf("Error adding tokens to HTTP request:\n %w", err)
|
||||
}
|
||||
|
||||
if cursor != "" {
|
||||
@ -392,7 +392,7 @@ func (api API) GetGraphqlFeedFor(user_id UserID, cursor string) (APIV2Response,
|
||||
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return APIV2Response{}, err
|
||||
return APIV2Response{}, fmt.Errorf("Error executing HTTP request:\n %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
@ -410,13 +410,16 @@ func (api API) GetGraphqlFeedFor(user_id UserID, cursor string) (APIV2Response,
|
||||
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return APIV2Response{}, err
|
||||
return APIV2Response{}, fmt.Errorf("Error reading HTTP response body:\n %w", err)
|
||||
}
|
||||
log.Debug(string(body))
|
||||
|
||||
var response APIV2Response
|
||||
err = json.Unmarshal(body, &response)
|
||||
return response, err
|
||||
if err != nil {
|
||||
return response, fmt.Errorf("Error parsing API response for GetGraphqlFeedFor(%d):\n %w", user_id, err)
|
||||
}
|
||||
return response, nil
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -22,13 +22,13 @@ func GetGuestToken() (string, error) {
|
||||
client := &http.Client{Timeout: 10 * time.Second}
|
||||
req, err := http.NewRequest("POST", "https://api.twitter.com/1.1/guest/activate.json", nil)
|
||||
if err != nil {
|
||||
return "", err
|
||||
return "", fmt.Errorf("Error initializing HTTP request:\n %w", err)
|
||||
}
|
||||
req.Header.Set("Authorization", "Bearer " + BEARER_TOKEN)
|
||||
req.Header.Set("Authorization", "Bearer "+BEARER_TOKEN)
|
||||
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return "", err
|
||||
return "", fmt.Errorf("Error executing HTTP request:\n %w", err)
|
||||
}
|
||||
|
||||
defer resp.Body.Close()
|
||||
@ -43,12 +43,12 @@ func GetGuestToken() (string, error) {
|
||||
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return "", err
|
||||
return "", fmt.Errorf("Error reading HTTP response body:\n %w", err)
|
||||
}
|
||||
|
||||
err = json.Unmarshal(body, &guestToken)
|
||||
if err != nil {
|
||||
return "", err
|
||||
return "", fmt.Errorf("Error parsing API response:\n %w", err)
|
||||
}
|
||||
|
||||
guestToken.RefreshedAt = time.Now()
|
||||
|
@ -1,9 +1,9 @@
|
||||
package scraper
|
||||
|
||||
import (
|
||||
"time"
|
||||
"fmt"
|
||||
"database/sql/driver"
|
||||
"fmt"
|
||||
"time"
|
||||
)
|
||||
|
||||
type Timestamp struct {
|
||||
@ -11,7 +11,7 @@ type Timestamp struct {
|
||||
}
|
||||
|
||||
func (t Timestamp) Value() (driver.Value, error) {
|
||||
return t.Unix(), nil
|
||||
return t.Unix(), nil
|
||||
}
|
||||
|
||||
func (t *Timestamp) Scan(src interface{}) error {
|
||||
@ -32,7 +32,7 @@ func TimestampFromString(s string) (Timestamp, error) {
|
||||
if err == nil {
|
||||
return Timestamp{tmp}, nil
|
||||
}
|
||||
return Timestamp{}, err
|
||||
return Timestamp{}, fmt.Errorf("Error parsing timestamp:\n %w", err)
|
||||
}
|
||||
|
||||
func TimestampFromUnix(num int64) Timestamp {
|
||||
|
@ -200,7 +200,7 @@ func GetTweet(id TweetID) (Tweet, error) {
|
||||
single_tweet, ok := tweet_response.GlobalObjects.Tweets[fmt.Sprint(id)]
|
||||
|
||||
if !ok {
|
||||
return Tweet{}, fmt.Errorf("Didn't get the tweet!\n%v", tweet_response)
|
||||
return Tweet{}, fmt.Errorf("Didn't get the tweet!")
|
||||
}
|
||||
|
||||
return ParseSingleTweet(single_tweet)
|
||||
|
Loading…
x
Reference in New Issue
Block a user