REFACTOR: convert function comments to proper godoc
- also fix indentation in versions.go file
This commit is contained in:
parent
160be3f045
commit
604d5b9ce2
@ -21,13 +21,11 @@ type DefaultDownloader struct{}
|
||||
|
||||
var ErrorDMCA error = errors.New("video is DMCAed, unable to download (HTTP 403 Forbidden)")
|
||||
|
||||
/**
|
||||
* Download a file over HTTP and save it.
|
||||
*
|
||||
* args:
|
||||
* - url: the remote file to download
|
||||
* - outpath: the path on disk to save it to
|
||||
*/
|
||||
// Download a file over HTTP and save it.
|
||||
//
|
||||
// args:
|
||||
// - url: the remote file to download
|
||||
// - outpath: the path on disk to save it to
|
||||
func (d DefaultDownloader) Curl(url string, outpath string) error {
|
||||
println(url)
|
||||
resp, err := http.Get(url)
|
||||
@ -81,9 +79,7 @@ func (d DefaultDownloader) Curl(url string, outpath string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
/**
|
||||
* Downloads an Image, and if successful, marks it as downloaded in the DB
|
||||
*/
|
||||
// Downloads an Image, and if successful, marks it as downloaded in the DB
|
||||
func (p Profile) download_tweet_image(img *scraper.Image, downloader MediaDownloader) error {
|
||||
outfile := path.Join(p.ProfileDir, "images", img.LocalFilename)
|
||||
err := downloader.Curl(img.RemoteURL, outfile)
|
||||
@ -94,9 +90,7 @@ func (p Profile) download_tweet_image(img *scraper.Image, downloader MediaDownlo
|
||||
return p.SaveImage(*img)
|
||||
}
|
||||
|
||||
/**
|
||||
* Downloads a Video and its thumbnail, and if successful, marks it as downloaded in the DB
|
||||
*/
|
||||
// Downloads a Video and its thumbnail, and if successful, marks it as downloaded in the DB
|
||||
func (p Profile) download_tweet_video(v *scraper.Video, downloader MediaDownloader) error {
|
||||
// Download the video
|
||||
outfile := path.Join(p.ProfileDir, "videos", v.LocalFilename)
|
||||
@ -122,9 +116,7 @@ func (p Profile) download_tweet_video(v *scraper.Video, downloader MediaDownload
|
||||
return p.SaveVideo(*v)
|
||||
}
|
||||
|
||||
/**
|
||||
* Downloads an URL thumbnail image, and if successful, marks it as downloaded in the DB
|
||||
*/
|
||||
// Downloads an URL thumbnail image, and if successful, marks it as downloaded in the DB
|
||||
func (p Profile) download_link_thumbnail(url *scraper.Url, downloader MediaDownloader) error {
|
||||
if url.HasCard && url.HasThumbnail {
|
||||
outfile := path.Join(p.ProfileDir, "link_preview_images", url.ThumbnailLocalPath)
|
||||
@ -137,18 +129,13 @@ func (p Profile) download_link_thumbnail(url *scraper.Url, downloader MediaDownl
|
||||
return p.SaveUrl(*url)
|
||||
}
|
||||
|
||||
/**
|
||||
* Download a tweet's video and picture content.
|
||||
*
|
||||
* Wraps the `DownloadTweetContentWithInjector` method with the default (i.e., real) downloader.
|
||||
*/
|
||||
// Download a tweet's video and picture content.
|
||||
// Wraps the `DownloadTweetContentWithInjector` method with the default (i.e., real) downloader.
|
||||
func (p Profile) DownloadTweetContentFor(t *scraper.Tweet) error {
|
||||
return p.DownloadTweetContentWithInjector(t, DefaultDownloader{})
|
||||
}
|
||||
|
||||
/**
|
||||
* Enable injecting a custom MediaDownloader (i.e., for testing)
|
||||
*/
|
||||
// Enable injecting a custom MediaDownloader (i.e., for testing)
|
||||
func (p Profile) DownloadTweetContentWithInjector(t *scraper.Tweet, downloader MediaDownloader) error {
|
||||
// Check if content needs to be downloaded; if not, just return
|
||||
if !p.CheckTweetContentDownloadNeeded(*t) {
|
||||
@ -179,16 +166,12 @@ func (p Profile) DownloadTweetContentWithInjector(t *scraper.Tweet, downloader M
|
||||
return p.SaveTweet(*t)
|
||||
}
|
||||
|
||||
/**
|
||||
* Download a user's banner and profile images
|
||||
*/
|
||||
// Download a user's banner and profile images
|
||||
func (p Profile) DownloadUserContentFor(u *scraper.User) error {
|
||||
return p.DownloadUserContentWithInjector(u, DefaultDownloader{})
|
||||
}
|
||||
|
||||
/**
|
||||
* Enable injecting a custom MediaDownloader (i.e., for testing)
|
||||
*/
|
||||
// Enable injecting a custom MediaDownloader (i.e., for testing)
|
||||
func (p Profile) DownloadUserContentWithInjector(u *scraper.User, downloader MediaDownloader) error {
|
||||
if !p.CheckUserContentDownloadNeeded(*u) {
|
||||
return nil
|
||||
@ -226,11 +209,9 @@ func (p Profile) DownloadUserContentWithInjector(u *scraper.User, downloader Med
|
||||
return p.SaveUser(u)
|
||||
}
|
||||
|
||||
/**
|
||||
* Download a User's tiny profile image, if it hasn't been downloaded yet.
|
||||
* If it has been downloaded, do nothing.
|
||||
* If this user should have a big profile picture, defer to the regular `DownloadUserContentFor` method.
|
||||
*/
|
||||
// Download a User's tiny profile image, if it hasn't been downloaded yet.
|
||||
// If it has been downloaded, do nothing.
|
||||
// If this user should have a big profile picture, defer to the regular `DownloadUserContentFor` method.
|
||||
func (p Profile) DownloadUserProfileImageTiny(u *scraper.User) error {
|
||||
if p.IsFollowing(*u) {
|
||||
return p.DownloadUserContentFor(u)
|
||||
|
@ -9,9 +9,7 @@ import (
|
||||
"gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
|
||||
)
|
||||
|
||||
/**
|
||||
* Some types to spy on a MediaDownloader
|
||||
*/
|
||||
// Some types to spy on a MediaDownloader
|
||||
type SpyResult struct {
|
||||
url string
|
||||
outpath string
|
||||
@ -63,9 +61,7 @@ func test_all_downloaded(tweet scraper.Tweet, yes_or_no bool, t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Downloading a Tweet's contents should mark the Tweet as downloaded
|
||||
*/
|
||||
// Downloading a Tweet's contents should mark the Tweet as downloaded
|
||||
func TestDownloadTweetContent(t *testing.T) {
|
||||
profile_path := "test_profiles/TestMediaQueries"
|
||||
profile := create_or_load_profile(profile_path)
|
||||
@ -92,9 +88,7 @@ func TestDownloadTweetContent(t *testing.T) {
|
||||
test_all_downloaded(new_tweet, true, t)
|
||||
}
|
||||
|
||||
/**
|
||||
* Downloading a User's contents should mark the User as downloaded
|
||||
*/
|
||||
// Downloading a User's contents should mark the User as downloaded
|
||||
func TestDownloadUserContent(t *testing.T) {
|
||||
assert := assert.New(t)
|
||||
profile_path := "test_profiles/TestMediaQueries"
|
||||
@ -134,9 +128,7 @@ func TestDownloadUserContent(t *testing.T) {
|
||||
assert.True(new_user.IsContentDownloaded)
|
||||
}
|
||||
|
||||
/**
|
||||
* Should download the right stuff if User has no banner image and default profile image
|
||||
*/
|
||||
// Should download the right stuff if User has no banner image and default profile image
|
||||
func TestDownloadDefaultUserContent(t *testing.T) {
|
||||
assert := assert.New(t)
|
||||
profile_path := "test_profiles/TestMediaQueries"
|
||||
|
@ -6,12 +6,10 @@ import (
|
||||
"gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
|
||||
)
|
||||
|
||||
/**
|
||||
* Save an Image
|
||||
*
|
||||
* args:
|
||||
* - img: the Image to save
|
||||
*/
|
||||
// Save an Image
|
||||
//
|
||||
// args:
|
||||
// - img: the Image to save
|
||||
func (p Profile) SaveImage(img scraper.Image) error {
|
||||
_, err := p.DB.NamedExec(`
|
||||
insert into images (id, tweet_id, width, height, remote_url, local_filename, is_downloaded)
|
||||
@ -27,12 +25,10 @@ func (p Profile) SaveImage(img scraper.Image) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
/**
|
||||
* Save a Video
|
||||
*
|
||||
* args:
|
||||
* - img: the Video to save
|
||||
*/
|
||||
// Save a Video
|
||||
//
|
||||
// args:
|
||||
// - img: the Video to save
|
||||
func (p Profile) SaveVideo(vid scraper.Video) error {
|
||||
_, err := p.DB.NamedExec(`
|
||||
insert into videos (id, tweet_id, width, height, remote_url, local_filename, thumbnail_remote_url, thumbnail_local_filename,
|
||||
@ -52,9 +48,7 @@ func (p Profile) SaveVideo(vid scraper.Video) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
/**
|
||||
* Save an Url
|
||||
*/
|
||||
// Save an Url
|
||||
func (p Profile) SaveUrl(url scraper.Url) error {
|
||||
_, err := p.DB.NamedExec(`
|
||||
insert into urls (tweet_id, domain, text, short_text, title, description, creator_id, site_id, thumbnail_width, thumbnail_height,
|
||||
@ -73,9 +67,7 @@ func (p Profile) SaveUrl(url scraper.Url) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
/**
|
||||
* Save a Poll
|
||||
*/
|
||||
// Save a Poll
|
||||
func (p Profile) SavePoll(poll scraper.Poll) error {
|
||||
_, err := p.DB.NamedExec(`
|
||||
insert into polls (id, tweet_id, num_choices, choice1, choice1_votes, choice2, choice2_votes, choice3, choice3_votes, choice4,
|
||||
@ -97,9 +89,7 @@ func (p Profile) SavePoll(poll scraper.Poll) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the list of images for a tweet
|
||||
*/
|
||||
// Get the list of images for a tweet
|
||||
func (p Profile) GetImagesForTweet(t scraper.Tweet) (imgs []scraper.Image, err error) {
|
||||
err = p.DB.Select(&imgs,
|
||||
"select id, tweet_id, width, height, remote_url, local_filename, is_downloaded from images where tweet_id=?",
|
||||
@ -107,9 +97,7 @@ func (p Profile) GetImagesForTweet(t scraper.Tweet) (imgs []scraper.Image, err e
|
||||
return
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the list of videos for a tweet
|
||||
*/
|
||||
// Get the list of videos for a tweet
|
||||
func (p Profile) GetVideosForTweet(t scraper.Tweet) (vids []scraper.Video, err error) {
|
||||
err = p.DB.Select(&vids, `
|
||||
select id, tweet_id, width, height, remote_url, local_filename, thumbnail_remote_url, thumbnail_local_filename, duration,
|
||||
@ -120,9 +108,7 @@ func (p Profile) GetVideosForTweet(t scraper.Tweet) (vids []scraper.Video, err e
|
||||
return
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the list of Urls for a Tweet
|
||||
*/
|
||||
// Get the list of Urls for a Tweet
|
||||
func (p Profile) GetUrlsForTweet(t scraper.Tweet) (urls []scraper.Url, err error) {
|
||||
err = p.DB.Select(&urls, `
|
||||
select tweet_id, domain, text, short_text, title, description, creator_id, site_id, thumbnail_width, thumbnail_height,
|
||||
@ -134,9 +120,7 @@ func (p Profile) GetUrlsForTweet(t scraper.Tweet) (urls []scraper.Url, err error
|
||||
return
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the list of Polls for a Tweet
|
||||
*/
|
||||
// Get the list of Polls for a Tweet
|
||||
func (p Profile) GetPollsForTweet(t scraper.Tweet) (polls []scraper.Poll, err error) {
|
||||
err = p.DB.Select(&polls, `
|
||||
select id, tweet_id, num_choices, choice1, choice1_votes, choice2, choice2_votes, choice3, choice3_votes, choice4, choice4_votes,
|
||||
|
@ -12,9 +12,7 @@ import (
|
||||
"gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
|
||||
)
|
||||
|
||||
/**
|
||||
* Create an Image, save it, reload it, and make sure it comes back the same
|
||||
*/
|
||||
// Create an Image, save it, reload it, and make sure it comes back the same
|
||||
func TestSaveAndLoadImage(t *testing.T) {
|
||||
require := require.New(t)
|
||||
profile_path := "test_profiles/TestMediaQueries"
|
||||
@ -47,9 +45,7 @@ func TestSaveAndLoadImage(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Change an Image, save the changes, reload it, and check if it comes back the same
|
||||
*/
|
||||
// Change an Image, save the changes, reload it, and check if it comes back the same
|
||||
func TestModifyImage(t *testing.T) {
|
||||
require := require.New(t)
|
||||
profile_path := "test_profiles/TestMediaQueries"
|
||||
@ -78,9 +74,7 @@ func TestModifyImage(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an Video, save it, reload it, and make sure it comes back the same
|
||||
*/
|
||||
// Create an Video, save it, reload it, and make sure it comes back the same
|
||||
func TestSaveAndLoadVideo(t *testing.T) {
|
||||
require := require.New(t)
|
||||
profile_path := "test_profiles/TestMediaQueries"
|
||||
@ -116,9 +110,7 @@ func TestSaveAndLoadVideo(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Change an Video, save the changes, reload it, and check if it comes back the same
|
||||
*/
|
||||
// Change an Video, save the changes, reload it, and check if it comes back the same
|
||||
func TestModifyVideo(t *testing.T) {
|
||||
require := require.New(t)
|
||||
profile_path := "test_profiles/TestMediaQueries"
|
||||
@ -148,9 +140,7 @@ func TestModifyVideo(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an Url, save it, reload it, and make sure it comes back the same
|
||||
*/
|
||||
// Create an Url, save it, reload it, and make sure it comes back the same
|
||||
func TestSaveAndLoadUrl(t *testing.T) {
|
||||
require := require.New(t)
|
||||
profile_path := "test_profiles/TestMediaQueries"
|
||||
@ -184,9 +174,7 @@ func TestSaveAndLoadUrl(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Change an Url, save the changes, reload it, and check if it comes back the same
|
||||
*/
|
||||
// Change an Url, save the changes, reload it, and check if it comes back the same
|
||||
func TestModifyUrl(t *testing.T) {
|
||||
require := require.New(t)
|
||||
profile_path := "test_profiles/TestMediaQueries"
|
||||
@ -215,9 +203,7 @@ func TestModifyUrl(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a Poll, save it, reload it, and make sure it comes back the same
|
||||
*/
|
||||
// Create a Poll, save it, reload it, and make sure it comes back the same
|
||||
func TestSaveAndLoadPoll(t *testing.T) {
|
||||
require := require.New(t)
|
||||
profile_path := "test_profiles/TestMediaQueries"
|
||||
@ -249,9 +235,7 @@ func TestSaveAndLoadPoll(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Change an Poll, save the changes, reload it, and check if it comes back the same
|
||||
*/
|
||||
// Change an Poll, save the changes, reload it, and check if it comes back the same
|
||||
func TestModifyPoll(t *testing.T) {
|
||||
require := require.New(t)
|
||||
profile_path := "test_profiles/TestMediaQueries"
|
||||
|
@ -25,16 +25,14 @@ type Profile struct {
|
||||
|
||||
var ErrTargetAlreadyExists = fmt.Errorf("Target already exists")
|
||||
|
||||
/**
|
||||
* Create a new profile in the given location.
|
||||
* Fails if target location already exists (i.e., is a file or directory).
|
||||
*
|
||||
* args:
|
||||
* - target_dir: location to create the new profile directory
|
||||
*
|
||||
* returns:
|
||||
* - the newly created Profile
|
||||
*/
|
||||
// Create a new profile in the given location.
|
||||
// Fails if target location already exists (i.e., is a file or directory).
|
||||
//
|
||||
// args:
|
||||
// - target_dir: location to create the new profile directory
|
||||
//
|
||||
// returns:
|
||||
// - the newly created Profile
|
||||
func NewProfile(target_dir string) (Profile, error) {
|
||||
if file_exists(target_dir) {
|
||||
return Profile{}, fmt.Errorf("Could not create target %q:\n %w", target_dir, ErrTargetAlreadyExists)
|
||||
@ -112,15 +110,13 @@ func NewProfile(target_dir string) (Profile, error) {
|
||||
return Profile{target_dir, settings, db}, nil
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads the profile at the given location. Fails if the given directory is not a Profile.
|
||||
*
|
||||
* args:
|
||||
* - profile_dir: location to check for the profile
|
||||
*
|
||||
* returns:
|
||||
* - the loaded Profile
|
||||
*/
|
||||
// Loads the profile at the given location. Fails if the given directory is not a Profile.
|
||||
//
|
||||
// args:
|
||||
// - profile_dir: location to check for the profile
|
||||
//
|
||||
// returns:
|
||||
// - the loaded Profile
|
||||
func LoadProfile(profile_dir string) (Profile, error) {
|
||||
settings_file := path.Join(profile_dir, "settings.yaml")
|
||||
sqlite_file := path.Join(profile_dir, "twitter.db")
|
||||
|
@ -24,9 +24,7 @@ func file_exists(path string) bool {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Should refuse to create a Profile if the target already exists (i.e., is a file or directory).
|
||||
*/
|
||||
// Should refuse to create a Profile if the target already exists (i.e., is a file or directory).
|
||||
func TestNewProfileInvalidPath(t *testing.T) {
|
||||
require := require.New(t)
|
||||
gibberish_path := "test_profiles/fjlwrefuvaaw23efwm"
|
||||
@ -43,9 +41,7 @@ func TestNewProfileInvalidPath(t *testing.T) {
|
||||
assert.ErrorIs(t, err, persistence.ErrTargetAlreadyExists)
|
||||
}
|
||||
|
||||
/**
|
||||
* Should correctly create a new Profile
|
||||
*/
|
||||
// Should correctly create a new Profile
|
||||
func TestNewProfile(t *testing.T) {
|
||||
assert := assert.New(t)
|
||||
require := require.New(t)
|
||||
@ -90,9 +86,7 @@ func TestNewProfile(t *testing.T) {
|
||||
assert.Equal(persistence.ENGINE_DATABASE_VERSION, version)
|
||||
}
|
||||
|
||||
/**
|
||||
* Should correctly load the Profile
|
||||
*/
|
||||
// Should correctly load the Profile
|
||||
func TestLoadProfile(t *testing.T) {
|
||||
require := require.New(t)
|
||||
|
||||
@ -111,9 +105,7 @@ func TestLoadProfile(t *testing.T) {
|
||||
assert.Equal(t, profile_path, profile.ProfileDir)
|
||||
}
|
||||
|
||||
/**
|
||||
* Test the ToSnakeCase implementation
|
||||
*/
|
||||
// Test the ToSnakeCase implementation
|
||||
func TestSnakeCase(t *testing.T) {
|
||||
assert := assert.New(t)
|
||||
|
||||
|
@ -6,9 +6,7 @@ import (
|
||||
"gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
|
||||
)
|
||||
|
||||
/**
|
||||
* Save a Retweet. Do nothing if it already exists, because none of its parameters are modifiable.
|
||||
*/
|
||||
// Save a Retweet. Do nothing if it already exists, because none of its parameters are modifiable.
|
||||
func (p Profile) SaveRetweet(r scraper.Retweet) error {
|
||||
_, err := p.DB.NamedExec(`
|
||||
insert into retweets (retweet_id, tweet_id, retweeted_by, retweeted_at)
|
||||
@ -23,9 +21,7 @@ func (p Profile) SaveRetweet(r scraper.Retweet) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve a Retweet by ID
|
||||
*/
|
||||
// Retrieve a Retweet by ID
|
||||
func (p Profile) GetRetweetById(id scraper.TweetID) (scraper.Retweet, error) {
|
||||
var r scraper.Retweet
|
||||
err := p.DB.Get(&r, `
|
||||
|
@ -12,9 +12,7 @@ type SpaceParticipant struct {
|
||||
SpaceID scraper.SpaceID `db:"space_id"`
|
||||
}
|
||||
|
||||
/**
|
||||
* Save a Space
|
||||
*/
|
||||
// Save a Space
|
||||
func (p Profile) SaveSpace(s scraper.Space) error {
|
||||
_, err := p.DB.NamedExec(`
|
||||
insert into spaces (id, created_by_id, short_url, state, title, created_at, started_at, ended_at, updated_at,
|
||||
@ -53,9 +51,7 @@ func (p Profile) SaveSpace(s scraper.Space) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a Space by ID
|
||||
*/
|
||||
// Get a Space by ID
|
||||
func (p Profile) GetSpaceById(id scraper.SpaceID) (space scraper.Space, err error) {
|
||||
err = p.DB.Get(&space,
|
||||
`select id, created_by_id, short_url, state, title, created_at, started_at, ended_at, updated_at, is_available_for_replay,
|
||||
|
@ -5,11 +5,10 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
|
||||
"gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
|
||||
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
|
||||
)
|
||||
|
||||
|
||||
func (p Profile) SaveTweet(t scraper.Tweet) error {
|
||||
func (p Profile) SaveTweet(t Tweet) error {
|
||||
db := p.DB
|
||||
|
||||
tx := db.MustBegin()
|
||||
@ -104,7 +103,7 @@ func (p Profile) SaveTweet(t scraper.Tweet) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p Profile) IsTweetInDatabase(id scraper.TweetID) bool {
|
||||
func (p Profile) IsTweetInDatabase(id TweetID) bool {
|
||||
db := p.DB
|
||||
|
||||
var dummy string
|
||||
@ -119,10 +118,10 @@ func (p Profile) IsTweetInDatabase(id scraper.TweetID) bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (p Profile) GetTweetById(id scraper.TweetID) (scraper.Tweet, error) {
|
||||
func (p Profile) GetTweetById(id TweetID) (Tweet, error) {
|
||||
db := p.DB
|
||||
|
||||
var t scraper.Tweet
|
||||
var t Tweet
|
||||
err := db.Get(&t, `
|
||||
select id, user_id, text, posted_at, num_likes, num_retweets, num_replies, num_quote_tweets, in_reply_to_id, quoted_tweet_id,
|
||||
mentions, reply_mentions, hashtags, ifnull(space_id, '') space_id, ifnull(tombstone_types.short_name, "") tombstone_type,
|
||||
@ -133,10 +132,10 @@ func (p Profile) GetTweetById(id scraper.TweetID) (scraper.Tweet, error) {
|
||||
`, id)
|
||||
|
||||
if err != nil {
|
||||
return scraper.Tweet{}, fmt.Errorf("Error executing GetTweetByID(%d):\n %w", id, err)
|
||||
return Tweet{}, fmt.Errorf("Error executing GetTweetByID(%d):\n %w", id, err)
|
||||
}
|
||||
|
||||
t.Spaces = []scraper.Space{}
|
||||
t.Spaces = []Space{}
|
||||
if t.SpaceID != "" {
|
||||
space, err := p.GetSpaceById(t.SpaceID)
|
||||
if err != nil {
|
||||
@ -172,10 +171,8 @@ func (p Profile) GetTweetById(id scraper.TweetID) (scraper.Tweet, error) {
|
||||
return t, nil
|
||||
}
|
||||
|
||||
/**
|
||||
* Populate the `User` field on a tweet with an actual User
|
||||
*/
|
||||
func (p Profile) LoadUserFor(t *scraper.Tweet) error {
|
||||
// Populate the `User` field on a tweet with an actual User
|
||||
func (p Profile) LoadUserFor(t *Tweet) error {
|
||||
if t.User != nil {
|
||||
// Already there, no need to load it
|
||||
return nil
|
||||
@ -189,10 +186,8 @@ func (p Profile) LoadUserFor(t *scraper.Tweet) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
/**
|
||||
* Return `false` if the tweet is in the DB and has had its content downloaded, `false` otherwise
|
||||
*/
|
||||
func (p Profile) CheckTweetContentDownloadNeeded(tweet scraper.Tweet) bool {
|
||||
// Return `false` if the tweet is in the DB and has had its content downloaded, `false` otherwise
|
||||
func (p Profile) CheckTweetContentDownloadNeeded(tweet Tweet) bool {
|
||||
row := p.DB.QueryRow(`select is_content_downloaded from tweets where id = ?`, tweet.ID)
|
||||
|
||||
var is_content_downloaded bool
|
||||
|
@ -11,9 +11,7 @@ import (
|
||||
"gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
|
||||
)
|
||||
|
||||
/**
|
||||
* Create a Tweet, save it, reload it, and make sure it comes back the same
|
||||
*/
|
||||
// Create a Tweet, save it, reload it, and make sure it comes back the same
|
||||
func TestSaveAndLoadTweet(t *testing.T) {
|
||||
profile_path := "test_profiles/TestTweetQueries"
|
||||
profile := create_or_load_profile(profile_path)
|
||||
@ -34,9 +32,7 @@ func TestSaveAndLoadTweet(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as above, but with a tombstone
|
||||
*/
|
||||
// Same as above, but with a tombstone
|
||||
func TestSaveAndLoadTombstone(t *testing.T) {
|
||||
profile_path := "test_profiles/TestTweetQueries"
|
||||
profile := create_or_load_profile(profile_path)
|
||||
@ -56,14 +52,12 @@ func TestSaveAndLoadTombstone(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Saving a tweet that already exists shouldn't reduce its backed-up status.
|
||||
* i.e., content which is already saved shouldn't be marked un-saved if it's removed from Twitter.
|
||||
* After all, that's the whole point of archiving.
|
||||
*
|
||||
* - is_stub should only go from "yes" to "no"
|
||||
* - is_content_downloaded should only go from "no" to "yes"
|
||||
*/
|
||||
// Saving a tweet that already exists shouldn't reduce its backed-up status.
|
||||
// i.e., content which is already saved shouldn't be marked un-saved if it's removed from Twitter.
|
||||
// After all, that's the whole point of archiving.
|
||||
//
|
||||
// - is_stub should only go from "yes" to "no"
|
||||
// - is_content_downloaded should only go from "no" to "yes"
|
||||
func TestNoWorseningTweet(t *testing.T) {
|
||||
assert := assert.New(t)
|
||||
require := require.New(t)
|
||||
@ -117,9 +111,7 @@ func TestNoWorseningTweet(t *testing.T) {
|
||||
assert.Equal(13, new_tweet.NumReplies)
|
||||
}
|
||||
|
||||
/**
|
||||
* The tweet was a tombstone and is now available; it should be updated
|
||||
*/
|
||||
// The tweet was a tombstone and is now available; it should be updated
|
||||
func TestUntombstoningTweet(t *testing.T) {
|
||||
assert := assert.New(t)
|
||||
require := require.New(t)
|
||||
@ -182,10 +174,8 @@ func TestUpgradingExpandingTweet(t *testing.T) {
|
||||
assert.Equal(new_tweet.Text, "Some long but cut-off text, but now it no longer is cut off!", "Should have extended the text")
|
||||
}
|
||||
|
||||
/**
|
||||
* The "unavailable" tombstone type is not reliable, you should be able to update away from it but
|
||||
* not toward it
|
||||
*/
|
||||
// The "unavailable" tombstone type is not reliable, you should be able to update away from it but
|
||||
// not toward it
|
||||
func TestChangingTombstoningTweet(t *testing.T) {
|
||||
assert := assert.New(t)
|
||||
require := require.New(t)
|
||||
@ -273,9 +263,7 @@ func TestModifyTweet(t *testing.T) {
|
||||
assert.Equal(new_tweet.TombstoneType, "deleted")
|
||||
}
|
||||
|
||||
/**
|
||||
* Should correctly report whether the User exists in the database
|
||||
*/
|
||||
// Should correctly report whether the User exists in the database
|
||||
func TestIsTweetInDatabase(t *testing.T) {
|
||||
require := require.New(t)
|
||||
profile_path := "test_profiles/TestTweetQueries"
|
||||
@ -293,9 +281,7 @@ func TestIsTweetInDatabase(t *testing.T) {
|
||||
assert.True(t, exists)
|
||||
}
|
||||
|
||||
/**
|
||||
* Should correctly populate the `User` field on a Tweet
|
||||
*/
|
||||
// Should correctly populate the `User` field on a Tweet
|
||||
func TestLoadUserForTweet(t *testing.T) {
|
||||
require := require.New(t)
|
||||
profile_path := "test_profiles/TestTweetQueries"
|
||||
@ -313,9 +299,7 @@ func TestLoadUserForTweet(t *testing.T) {
|
||||
require.NotNil(tweet.User, "Did not load a user. It is still nil.")
|
||||
}
|
||||
|
||||
/**
|
||||
* Test all the combinations for whether a tweet needs its content downloaded
|
||||
*/
|
||||
// Test all the combinations for whether a tweet needs its content downloaded
|
||||
func TestCheckTweetContentDownloadNeeded(t *testing.T) {
|
||||
assert := assert.New(t)
|
||||
profile_path := "test_profiles/TestTweetQueries"
|
||||
|
@ -6,10 +6,8 @@ import (
|
||||
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
|
||||
)
|
||||
|
||||
/**
|
||||
* Convenience function that saves all the objects in a TweetTrove.
|
||||
* Panics if anything goes wrong.
|
||||
*/
|
||||
// Convenience function that saves all the objects in a TweetTrove.
|
||||
// Panics if anything goes wrong.
|
||||
func (p Profile) SaveTweetTrove(trove TweetTrove) {
|
||||
for i, u := range trove.Users {
|
||||
err := p.SaveUser(&u)
|
||||
|
@ -9,13 +9,11 @@ import (
|
||||
"gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
|
||||
)
|
||||
|
||||
/**
|
||||
* Save the given User to the database.
|
||||
* If the User is already in the database, it will update most of its attributes (follower count, etc)
|
||||
*
|
||||
* args:
|
||||
* - u: the User
|
||||
*/
|
||||
// Save the given User to the database.
|
||||
// If the User is already in the database, it will update most of its attributes (follower count, etc)
|
||||
//
|
||||
// args:
|
||||
// - u: the User
|
||||
func (p Profile) SaveUser(u *scraper.User) error {
|
||||
if u.IsNeedingFakeID {
|
||||
err := p.DB.QueryRow("select id from users where lower(handle) = lower(?)", u.Handle).Scan(&u.ID)
|
||||
@ -67,15 +65,13 @@ func (p Profile) SaveUser(u *scraper.User) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the database has a User with the given user handle.
|
||||
*
|
||||
* args:
|
||||
* - handle: the user handle to search for
|
||||
*
|
||||
* returns:
|
||||
* - true if there is such a User in the database, false otherwise
|
||||
*/
|
||||
// Check if the database has a User with the given user handle.
|
||||
//
|
||||
// args:
|
||||
// - handle: the user handle to search for
|
||||
//
|
||||
// returns:
|
||||
// - true if there is such a User in the database, false otherwise
|
||||
func (p Profile) UserExists(handle scraper.UserHandle) bool {
|
||||
db := p.DB
|
||||
|
||||
@ -91,15 +87,13 @@ func (p Profile) UserExists(handle scraper.UserHandle) bool {
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve a User from the database, by handle.
|
||||
*
|
||||
* args:
|
||||
* - handle: the user handle to search for
|
||||
*
|
||||
* returns:
|
||||
* - the User, if it exists
|
||||
*/
|
||||
// Retrieve a User from the database, by handle.
|
||||
//
|
||||
// args:
|
||||
// - handle: the user handle to search for
|
||||
//
|
||||
// returns:
|
||||
// - the User, if it exists
|
||||
func (p Profile) GetUserByHandle(handle scraper.UserHandle) (scraper.User, error) {
|
||||
db := p.DB
|
||||
|
||||
@ -118,15 +112,13 @@ func (p Profile) GetUserByHandle(handle scraper.UserHandle) (scraper.User, error
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve a User from the database, by user ID.
|
||||
*
|
||||
* args:
|
||||
* - id: the user ID to search for
|
||||
*
|
||||
* returns:
|
||||
* - the User, if it exists
|
||||
*/
|
||||
// Retrieve a User from the database, by user ID.
|
||||
//
|
||||
// args:
|
||||
// - id: the user ID to search for
|
||||
//
|
||||
// returns:
|
||||
// - the User, if it exists
|
||||
func (p Profile) GetUserByID(id scraper.UserID) (scraper.User, error) {
|
||||
db := p.DB
|
||||
|
||||
@ -148,20 +140,18 @@ func (p Profile) GetUserByID(id scraper.UserID) (scraper.User, error) {
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns `true` if content download is needed, `false` otherwise
|
||||
*
|
||||
* If the user is banned, returns false because downloading will be impossible.
|
||||
*
|
||||
* If:
|
||||
* - the user isn't in the DB at all (first time scraping), OR
|
||||
* - `is_content_downloaded` is false in the DB, OR
|
||||
* - the banner / profile image URL has changed from what the DB has
|
||||
* then it needs to be downloaded.
|
||||
*
|
||||
* The `user` object will always have `is_content_downloaded` = false on every scrape. This is
|
||||
* why the No Worsening Principle is needed.
|
||||
*/
|
||||
// Returns `true` if content download is needed, `false` otherwise
|
||||
//
|
||||
// If the user is banned, returns false because downloading will be impossible.
|
||||
//
|
||||
// If:
|
||||
// - the user isn't in the DB at all (first time scraping), OR
|
||||
// - `is_content_downloaded` is false in the DB, OR
|
||||
// - the banner / profile image URL has changed from what the DB has
|
||||
// then it needs to be downloaded.
|
||||
//
|
||||
// The `user` object will always have `is_content_downloaded` = false on every scrape. This is
|
||||
// why the No Worsening Principle is needed.
|
||||
func (p Profile) CheckUserContentDownloadNeeded(user scraper.User) bool {
|
||||
row := p.DB.QueryRow(`select is_content_downloaded, profile_image_url, banner_image_url from users where id = ?`, user.ID)
|
||||
|
||||
@ -189,9 +179,7 @@ func (p Profile) CheckUserContentDownloadNeeded(user scraper.User) bool {
|
||||
return !file_exists(profile_path)
|
||||
}
|
||||
|
||||
/**
|
||||
* Follow / unfollow a user. Update the given User object's IsFollowed field.
|
||||
*/
|
||||
// Follow / unfollow a user. Update the given User object's IsFollowed field.
|
||||
func (p Profile) SetUserFollowed(user *scraper.User, is_followed bool) {
|
||||
result, err := p.DB.Exec("update users set is_followed = ? where id = ?", is_followed, user.ID)
|
||||
if err != nil {
|
||||
@ -254,16 +242,12 @@ func (p Profile) IsFollowing(user scraper.User) bool {
|
||||
return ret
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility function to compute the path to save banner image to
|
||||
*/
|
||||
// Utility function to compute the path to save banner image to
|
||||
func (p Profile) get_banner_image_output_path(u scraper.User) string {
|
||||
return path.Join(p.ProfileDir, "profile_images", u.BannerImageLocalPath)
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility function to compute the path to save profile image to
|
||||
*/
|
||||
// Utility function to compute the path to save profile image to
|
||||
func (p Profile) get_profile_image_output_path(u scraper.User) string {
|
||||
if u.ProfileImageUrl == "" {
|
||||
return path.Join(p.ProfileDir, "profile_images", path.Base(scraper.DEFAULT_PROFILE_IMAGE_URL))
|
||||
|
@ -14,9 +14,7 @@ import (
|
||||
"gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
|
||||
)
|
||||
|
||||
/**
|
||||
* Create a user, save it, reload it, and make sure it comes back the same
|
||||
*/
|
||||
// Create a user, save it, reload it, and make sure it comes back the same
|
||||
func TestSaveAndLoadUser(t *testing.T) {
|
||||
require := require.New(t)
|
||||
profile_path := "test_profiles/TestUserQueries"
|
||||
@ -44,9 +42,6 @@ func TestSaveAndLoadUser(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
func TestModifyUser(t *testing.T) {
|
||||
assert := assert.New(t)
|
||||
require := require.New(t)
|
||||
@ -117,9 +112,7 @@ func TestHandleIsCaseInsensitive(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Should correctly report whether the user exists in the database
|
||||
*/
|
||||
// Should correctly report whether the user exists in the database
|
||||
func TestUserExists(t *testing.T) {
|
||||
require := require.New(t)
|
||||
profile_path := "test_profiles/TestUserQueries"
|
||||
@ -137,9 +130,7 @@ func TestUserExists(t *testing.T) {
|
||||
require.True(exists)
|
||||
}
|
||||
|
||||
/**
|
||||
* Test scenarios relating to user content downloading
|
||||
*/
|
||||
// Test scenarios relating to user content downloading
|
||||
func TestCheckUserContentDownloadNeeded(t *testing.T) {
|
||||
assert := assert.New(t)
|
||||
profile_path := "test_profiles/TestUserQueries"
|
||||
@ -177,14 +168,12 @@ func TestCheckUserContentDownloadNeeded(t *testing.T) {
|
||||
assert.True(profile.CheckUserContentDownloadNeeded(user))
|
||||
}
|
||||
|
||||
/**
|
||||
* Make sure following works
|
||||
*
|
||||
* - users are unfollowed by default
|
||||
* - following a user makes it save as is_followed
|
||||
* - using regular save method doesn't un-follow
|
||||
* - unfollowing a user makes it save as no longer is_followed
|
||||
*/
|
||||
// Make sure following works
|
||||
//
|
||||
// - users are unfollowed by default
|
||||
// - following a user makes it save as is_followed
|
||||
// - using regular save method doesn't un-follow
|
||||
// - unfollowing a user makes it save as no longer is_followed
|
||||
func TestFollowUnfollowUser(t *testing.T) {
|
||||
assert := assert.New(t)
|
||||
|
||||
@ -222,9 +211,7 @@ func TestFollowUnfollowUser(t *testing.T) {
|
||||
assert.False(user_reloaded.IsFollowed)
|
||||
}
|
||||
|
||||
/**
|
||||
* Should correctly report whether a User is followed or not, according to the DB (not the in-memory objects)
|
||||
*/
|
||||
// Should correctly report whether a User is followed or not, according to the DB (not the in-memory objects)
|
||||
func TestIsFollowingUser(t *testing.T) {
|
||||
assert := assert.New(t)
|
||||
|
||||
@ -256,10 +243,8 @@ func TestIsFollowingUser(t *testing.T) {
|
||||
assert.False(profile.IsFollowing(user))
|
||||
}
|
||||
|
||||
/**
|
||||
* Should create a new Unknown User from the given handle.
|
||||
* The Unknown User should work consistently with other Users.
|
||||
*/
|
||||
// Should create a new Unknown User from the given handle.
|
||||
// The Unknown User should work consistently with other Users.
|
||||
func TestCreateUnknownUserWithHandle(t *testing.T) {
|
||||
assert := assert.New(t)
|
||||
|
||||
@ -287,9 +272,7 @@ func TestCreateUnknownUserWithHandle(t *testing.T) {
|
||||
assert.Equal(next_id+2, profile.NextFakeUserID())
|
||||
}
|
||||
|
||||
/**
|
||||
* Should update the unknown User's UserID with the correct ID if it already exists
|
||||
*/
|
||||
// Should update the unknown User's UserID with the correct ID if it already exists
|
||||
func TestCreateUnknownUserWithHandleThatAlreadyExists(t *testing.T) {
|
||||
assert := assert.New(t)
|
||||
|
||||
|
@ -31,9 +31,7 @@ func file_exists(path string) bool {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* https://stackoverflow.com/questions/56616196/how-to-convert-camel-case-string-to-snake-case#56616250
|
||||
*/
|
||||
// https://stackoverflow.com/questions/56616196/how-to-convert-camel-case-string-to-snake-case#56616250
|
||||
func ToSnakeCase(str string) string {
|
||||
snake := regexp.MustCompile("(.)_?([A-Z][a-z]+)").ReplaceAllString(str, "${1}_${2}")
|
||||
snake = regexp.MustCompile("([a-z0-9])_?([A-Z])").ReplaceAllString(snake, "${1}_${2}")
|
||||
|
@ -9,9 +9,7 @@ import (
|
||||
"gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
|
||||
)
|
||||
|
||||
/**
|
||||
* Load a test profile, or create it if it doesn't exist.
|
||||
*/
|
||||
// Load a test profile, or create it if it doesn't exist.
|
||||
func create_or_load_profile(profile_path string) persistence.Profile {
|
||||
var profile persistence.Profile
|
||||
var err error
|
||||
@ -40,9 +38,7 @@ func create_or_load_profile(profile_path string) persistence.Profile {
|
||||
return profile
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a stable user with a fixed ID and handle
|
||||
*/
|
||||
// Create a stable user with a fixed ID and handle
|
||||
func create_stable_user() scraper.User {
|
||||
return scraper.User{
|
||||
ID: scraper.UserID(-1),
|
||||
@ -64,9 +60,7 @@ func create_stable_user() scraper.User {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a semi-stable Image based on the given ID
|
||||
*/
|
||||
// Create a semi-stable Image based on the given ID
|
||||
func create_image_from_id(id int) scraper.Image {
|
||||
filename := fmt.Sprintf("image%d.jpg", id)
|
||||
return scraper.Image{
|
||||
@ -80,9 +74,7 @@ func create_image_from_id(id int) scraper.Image {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a semi-stable Video based on the given ID
|
||||
*/
|
||||
// Create a semi-stable Video based on the given ID
|
||||
func create_video_from_id(id int) scraper.Video {
|
||||
filename := fmt.Sprintf("video%d.jpg", id)
|
||||
return scraper.Video{
|
||||
@ -101,9 +93,7 @@ func create_video_from_id(id int) scraper.Video {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a semi-stable Url based on the given ID
|
||||
*/
|
||||
// Create a semi-stable Url based on the given ID
|
||||
func create_url_from_id(id int) scraper.Url {
|
||||
s := fmt.Sprint(id)
|
||||
return scraper.Url{
|
||||
@ -124,9 +114,7 @@ func create_url_from_id(id int) scraper.Url {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a semi-stable Poll based on the given ID
|
||||
*/
|
||||
// Create a semi-stable Poll based on the given ID
|
||||
func create_poll_from_id(id int) scraper.Poll {
|
||||
s := fmt.Sprint(id)
|
||||
return scraper.Poll{
|
||||
@ -143,9 +131,7 @@ func create_poll_from_id(id int) scraper.Poll {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a stable tweet with a fixed ID and content
|
||||
*/
|
||||
// Create a stable tweet with a fixed ID and content
|
||||
func create_stable_tweet() scraper.Tweet {
|
||||
tweet_id := scraper.TweetID(-1)
|
||||
return scraper.Tweet{
|
||||
@ -180,9 +166,7 @@ func create_stable_tweet() scraper.Tweet {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a stable retweet with a fixed ID and parameters
|
||||
*/
|
||||
// Create a stable retweet with a fixed ID and parameters
|
||||
func create_stable_retweet() scraper.Retweet {
|
||||
retweet_id := scraper.TweetID(-1)
|
||||
return scraper.Retweet{
|
||||
@ -193,9 +177,7 @@ func create_stable_retweet() scraper.Retweet {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new user with a random ID and handle
|
||||
*/
|
||||
// Create a new user with a random ID and handle
|
||||
func create_dummy_user() scraper.User {
|
||||
rand.Seed(time.Now().UnixNano())
|
||||
userID := rand.Int()
|
||||
@ -220,9 +202,7 @@ func create_dummy_user() scraper.User {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new tweet with a random ID and content
|
||||
*/
|
||||
// Create a new tweet with a random ID and content
|
||||
func create_dummy_tweet() scraper.Tweet {
|
||||
rand.Seed(time.Now().UnixNano())
|
||||
tweet_id := scraper.TweetID(rand.Int())
|
||||
@ -266,9 +246,7 @@ func create_dummy_tweet() scraper.Tweet {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a random tombstone
|
||||
*/
|
||||
// Create a random tombstone
|
||||
func create_dummy_tombstone() scraper.Tweet {
|
||||
rand.Seed(time.Now().UnixNano())
|
||||
tweet_id := scraper.TweetID(rand.Int())
|
||||
@ -285,9 +263,7 @@ func create_dummy_tombstone() scraper.Tweet {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new retweet with a random ID for a given TweetID
|
||||
*/
|
||||
// Create a new retweet with a random ID for a given TweetID
|
||||
func create_dummy_retweet(tweet_id scraper.TweetID) scraper.Retweet {
|
||||
rand.Seed(time.Now().UnixNano())
|
||||
retweet_id := scraper.TweetID(rand.Int())
|
||||
@ -300,9 +276,7 @@ func create_dummy_retweet(tweet_id scraper.TweetID) scraper.Retweet {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a semi-stable Space given an ID
|
||||
*/
|
||||
// Create a semi-stable Space given an ID
|
||||
func create_space_from_id(id int) scraper.Space {
|
||||
return scraper.Space{
|
||||
ID: scraper.SpaceID(fmt.Sprintf("some_id_%d", id)),
|
||||
|
Loading…
x
Reference in New Issue
Block a user