Add 'gofmt' linter
This commit is contained in:
parent
223734d001
commit
d1d80a91cd
@ -27,6 +27,7 @@ linters:
|
|||||||
- wrapcheck
|
- wrapcheck
|
||||||
- lll
|
- lll
|
||||||
- godox
|
- godox
|
||||||
|
- gofmt
|
||||||
- errorlint
|
- errorlint
|
||||||
- nolintlint
|
- nolintlint
|
||||||
|
|
||||||
@ -203,9 +204,9 @@ linters-settings:
|
|||||||
keywords: # default keywords are TODO, BUG, and FIXME, these can be overwritten by this setting
|
keywords: # default keywords are TODO, BUG, and FIXME, these can be overwritten by this setting
|
||||||
- XXX
|
- XXX
|
||||||
|
|
||||||
# gofmt:
|
gofmt:
|
||||||
# # simplify code: gofmt with `-s` option, true by default
|
# simplify code: gofmt with `-s` option, true by default
|
||||||
# simplify: true
|
simplify: true
|
||||||
|
|
||||||
# gofumpt:
|
# gofumpt:
|
||||||
# # Select the Go version to target. The default is `1.15`.
|
# # Select the Go version to target. The default is `1.15`.
|
||||||
|
@ -6,9 +6,9 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"math/rand"
|
"math/rand"
|
||||||
|
|
||||||
|
"github.com/go-test/deep"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
"github.com/go-test/deep"
|
|
||||||
|
|
||||||
"offline_twitter/scraper"
|
"offline_twitter/scraper"
|
||||||
)
|
)
|
||||||
|
@ -3,9 +3,9 @@ package persistence
|
|||||||
import (
|
import (
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"os"
|
||||||
"regexp"
|
"regexp"
|
||||||
"strings"
|
"strings"
|
||||||
"os"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var NotInDatabase = errors.New("Not in database")
|
var NotInDatabase = errors.New("Not in database")
|
||||||
|
@ -7,14 +7,13 @@ import (
|
|||||||
"offline_twitter/terminal_utils"
|
"offline_twitter/terminal_utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
const ENGINE_DATABASE_VERSION = 11
|
const ENGINE_DATABASE_VERSION = 11
|
||||||
|
|
||||||
|
|
||||||
type VersionMismatchError struct {
|
type VersionMismatchError struct {
|
||||||
EngineVersion int
|
EngineVersion int
|
||||||
DatabaseVersion int
|
DatabaseVersion int
|
||||||
}
|
}
|
||||||
|
|
||||||
func (e VersionMismatchError) Error() string {
|
func (e VersionMismatchError) Error() string {
|
||||||
return fmt.Sprintf(
|
return fmt.Sprintf(
|
||||||
`This profile was created with database schema version %d, which is newer than this application's database schema version, %d.
|
`This profile was created with database schema version %d, which is newer than this application's database schema version, %d.
|
||||||
@ -23,7 +22,6 @@ Please upgrade this application to a newer version to use this profile. Or down
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The Nth entry is the migration that moves you from version N to version N+1.
|
* The Nth entry is the migration that moves you from version N to version N+1.
|
||||||
* `len(MIGRATIONS)` should always equal `ENGINE_DATABASE_VERSION`.
|
* `len(MIGRATIONS)` should always equal `ENGINE_DATABASE_VERSION`.
|
||||||
|
@ -2,12 +2,13 @@ package persistence_test
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"os"
|
"os"
|
||||||
|
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
"offline_twitter/scraper"
|
|
||||||
"offline_twitter/persistence"
|
"offline_twitter/persistence"
|
||||||
|
"offline_twitter/scraper"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestVersionUpgrade(t *testing.T) {
|
func TestVersionUpgrade(t *testing.T) {
|
||||||
|
@ -1,16 +1,15 @@
|
|||||||
package scraper
|
package scraper
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"html"
|
"html"
|
||||||
"time"
|
|
||||||
"strings"
|
|
||||||
"encoding/json"
|
|
||||||
"strconv"
|
|
||||||
"sort"
|
"sort"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
type APIMedia struct {
|
type APIMedia struct {
|
||||||
ID int64 `json:"id_str,string"`
|
ID int64 `json:"id_str,string"`
|
||||||
MediaURLHttps string `json:"media_url_https"`
|
MediaURLHttps string `json:"media_url_https"`
|
||||||
@ -26,6 +25,7 @@ type SortableVariants []struct {
|
|||||||
Bitrate int `json:"bitrate,omitempty"`
|
Bitrate int `json:"bitrate,omitempty"`
|
||||||
URL string `json:"url"`
|
URL string `json:"url"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (v SortableVariants) Len() int { return len(v) }
|
func (v SortableVariants) Len() int { return len(v) }
|
||||||
func (v SortableVariants) Swap(i, j int) { v[i], v[j] = v[j], v[i] }
|
func (v SortableVariants) Swap(i, j int) { v[i], v[j] = v[j], v[i] }
|
||||||
func (v SortableVariants) Less(i, j int) bool { return v[i].Bitrate > v[j].Bitrate }
|
func (v SortableVariants) Less(i, j int) bool { return v[i].Bitrate > v[j].Bitrate }
|
||||||
@ -183,7 +183,7 @@ func (t *APITweet) NormalizeContent() {
|
|||||||
t.RetweetedStatusID = int64(id)
|
t.RetweetedStatusID = int64(id)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (len(t.DisplayTextRange) == 2) {
|
if len(t.DisplayTextRange) == 2 {
|
||||||
t.Entities.ReplyMentions = strings.TrimSpace(string([]rune(t.FullText)[0:t.DisplayTextRange[0]]))
|
t.Entities.ReplyMentions = strings.TrimSpace(string([]rune(t.FullText)[0:t.DisplayTextRange[0]]))
|
||||||
t.FullText = string([]rune(t.FullText)[t.DisplayTextRange[0]:t.DisplayTextRange[1]])
|
t.FullText = string([]rune(t.FullText)[t.DisplayTextRange[0]:t.DisplayTextRange[1]])
|
||||||
}
|
}
|
||||||
@ -217,7 +217,6 @@ func (t APITweet) String() string {
|
|||||||
return string(data)
|
return string(data)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
type APIUser struct {
|
type APIUser struct {
|
||||||
CreatedAt string `json:"created_at"`
|
CreatedAt string `json:"created_at"`
|
||||||
Description string `json:"description"`
|
Description string `json:"description"`
|
||||||
@ -246,7 +245,6 @@ type APIUser struct {
|
|||||||
DoesntExist bool
|
DoesntExist bool
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
type UserResponse struct {
|
type UserResponse struct {
|
||||||
Data struct {
|
Data struct {
|
||||||
User struct {
|
User struct {
|
||||||
@ -260,6 +258,7 @@ type UserResponse struct {
|
|||||||
Code int `json:"code"`
|
Code int `json:"code"`
|
||||||
} `json:"errors"`
|
} `json:"errors"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (u UserResponse) ConvertToAPIUser() APIUser {
|
func (u UserResponse) ConvertToAPIUser() APIUser {
|
||||||
ret := u.Data.User.Legacy
|
ret := u.Data.User.Legacy
|
||||||
ret.ID = u.Data.User.ID
|
ret.ID = u.Data.User.ID
|
||||||
@ -303,10 +302,13 @@ type Entry struct {
|
|||||||
} `json:"operation"`
|
} `json:"operation"`
|
||||||
} `json:"content"`
|
} `json:"content"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (e Entry) GetTombstoneText() string {
|
func (e Entry) GetTombstoneText() string {
|
||||||
return e.Content.Item.Content.Tombstone.TombstoneInfo.RichText.Text
|
return e.Content.Item.Content.Tombstone.TombstoneInfo.RichText.Text
|
||||||
}
|
}
|
||||||
|
|
||||||
type SortableEntries []Entry
|
type SortableEntries []Entry
|
||||||
|
|
||||||
func (e SortableEntries) Len() int { return len(e) }
|
func (e SortableEntries) Len() int { return len(e) }
|
||||||
func (e SortableEntries) Swap(i, j int) { e[i], e[j] = e[j], e[i] }
|
func (e SortableEntries) Swap(i, j int) { e[i], e[j] = e[j], e[i] }
|
||||||
func (e SortableEntries) Less(i, j int) bool { return e[i].SortIndex > e[j].SortIndex }
|
func (e SortableEntries) Less(i, j int) bool { return e[i].SortIndex > e[j].SortIndex }
|
||||||
@ -338,6 +340,7 @@ var tombstone_types = map[string]string{
|
|||||||
"Age-restricted adult content. This content might not be appropriate for people under 18 years old. To view this media, " +
|
"Age-restricted adult content. This content might not be appropriate for people under 18 years old. To view this media, " +
|
||||||
"you’ll need to log in to Twitter. Learn more": "age-restricted",
|
"you’ll need to log in to Twitter. Learn more": "age-restricted",
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Insert tweets into GlobalObjects for each tombstone. Returns a list of users that need to
|
* Insert tweets into GlobalObjects for each tombstone. Returns a list of users that need to
|
||||||
* be fetched for tombstones.
|
* be fetched for tombstones.
|
||||||
@ -450,7 +453,6 @@ func (t *TweetResponse) IsEndOfFeed() bool {
|
|||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
func idstr_to_int(idstr string) int64 {
|
func idstr_to_int(idstr string) int64 {
|
||||||
id, err := strconv.Atoi(idstr)
|
id, err := strconv.Atoi(idstr)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
package scraper_test
|
package scraper_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"testing"
|
|
||||||
"os"
|
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"os"
|
||||||
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
@ -11,7 +11,6 @@ import (
|
|||||||
. "offline_twitter/scraper"
|
. "offline_twitter/scraper"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
func TestNormalizeContent(t *testing.T) {
|
func TestNormalizeContent(t *testing.T) {
|
||||||
assert := assert.New(t)
|
assert := assert.New(t)
|
||||||
test_cases := []struct {
|
test_cases := []struct {
|
||||||
@ -60,7 +59,6 @@ func TestNormalizeContent(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
func TestUserProfileToAPIUser(t *testing.T) {
|
func TestUserProfileToAPIUser(t *testing.T) {
|
||||||
assert := assert.New(t)
|
assert := assert.New(t)
|
||||||
data, err := os.ReadFile("test_responses/michael_malice_user_profile.json")
|
data, err := os.ReadFile("test_responses/michael_malice_user_profile.json")
|
||||||
@ -76,7 +74,6 @@ func TestUserProfileToAPIUser(t *testing.T) {
|
|||||||
assert.Equal(user_resp.Data.User.Legacy.FollowersCount, result.FollowersCount)
|
assert.Equal(user_resp.Data.User.Legacy.FollowersCount, result.FollowersCount)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
func TestGetCursor(t *testing.T) {
|
func TestGetCursor(t *testing.T) {
|
||||||
assert := assert.New(t)
|
assert := assert.New(t)
|
||||||
data, err := os.ReadFile("test_responses/midriffs_anarchist_cookbook.json")
|
data, err := os.ReadFile("test_responses/midriffs_anarchist_cookbook.json")
|
||||||
@ -91,7 +88,6 @@ func TestGetCursor(t *testing.T) {
|
|||||||
tweet_resp.GetCursor())
|
tweet_resp.GetCursor())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
func TestIsEndOfFeed(t *testing.T) {
|
func TestIsEndOfFeed(t *testing.T) {
|
||||||
assert := assert.New(t)
|
assert := assert.New(t)
|
||||||
test_cases := []struct {
|
test_cases := []struct {
|
||||||
@ -113,7 +109,6 @@ func TestIsEndOfFeed(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
func TestHandleTombstonesHidden(t *testing.T) {
|
func TestHandleTombstonesHidden(t *testing.T) {
|
||||||
assert := assert.New(t)
|
assert := assert.New(t)
|
||||||
data, err := os.ReadFile("test_responses/tombstones/tombstone_hidden_1.json")
|
data, err := os.ReadFile("test_responses/tombstones/tombstone_hidden_1.json")
|
||||||
|
@ -37,6 +37,7 @@ type APIV2Card struct {
|
|||||||
Url string `json:"url"`
|
Url string `json:"url"`
|
||||||
} `json:"legacy"`
|
} `json:"legacy"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (card APIV2Card) ParseAsUrl() Url {
|
func (card APIV2Card) ParseAsUrl() Url {
|
||||||
values := make(map[string]CardValue)
|
values := make(map[string]CardValue)
|
||||||
for _, obj := range card.Legacy.BindingValues {
|
for _, obj := range card.Legacy.BindingValues {
|
||||||
@ -121,6 +122,7 @@ type APIV2UserResult struct {
|
|||||||
} `json:"result"`
|
} `json:"result"`
|
||||||
} `json:"user_results"`
|
} `json:"user_results"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (u APIV2UserResult) ToUser() User {
|
func (u APIV2UserResult) ToUser() User {
|
||||||
user, err := ParseSingleUser(u.UserResults.Result.Legacy)
|
user, err := ParseSingleUser(u.UserResults.Result.Legacy)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -149,6 +151,7 @@ type APIV2Result struct {
|
|||||||
Tweet _Result `json:"tweet"`
|
Tweet _Result `json:"tweet"`
|
||||||
} `json:"result"`
|
} `json:"result"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api_result APIV2Result) ToTweetTrove(ignore_null_entries bool) TweetTrove {
|
func (api_result APIV2Result) ToTweetTrove(ignore_null_entries bool) TweetTrove {
|
||||||
ret := NewTweetTrove()
|
ret := NewTweetTrove()
|
||||||
|
|
||||||
@ -245,6 +248,7 @@ type APIV2Tweet struct {
|
|||||||
RetweetedStatusResult *APIV2Result `json:"retweeted_status_result"`
|
RetweetedStatusResult *APIV2Result `json:"retweeted_status_result"`
|
||||||
APITweet
|
APITweet
|
||||||
}
|
}
|
||||||
|
|
||||||
func (api_v2_tweet APIV2Tweet) ToTweetTrove() TweetTrove {
|
func (api_v2_tweet APIV2Tweet) ToTweetTrove() TweetTrove {
|
||||||
ret := NewTweetTrove()
|
ret := NewTweetTrove()
|
||||||
|
|
||||||
@ -253,7 +257,6 @@ func (api_v2_tweet APIV2Tweet) ToTweetTrove() TweetTrove {
|
|||||||
orig_tweet_trove := api_v2_tweet.RetweetedStatusResult.ToTweetTrove(false)
|
orig_tweet_trove := api_v2_tweet.RetweetedStatusResult.ToTweetTrove(false)
|
||||||
ret.MergeWith(orig_tweet_trove)
|
ret.MergeWith(orig_tweet_trove)
|
||||||
|
|
||||||
|
|
||||||
retweet := Retweet{}
|
retweet := Retweet{}
|
||||||
var err error
|
var err error
|
||||||
retweet.RetweetID = TweetID(api_v2_tweet.ID)
|
retweet.RetweetID = TweetID(api_v2_tweet.ID)
|
||||||
@ -289,7 +292,6 @@ type APIV2Entry struct {
|
|||||||
EntryType string `json:"entryType"`
|
EntryType string `json:"entryType"`
|
||||||
Value string `json:"value"`
|
Value string `json:"value"`
|
||||||
CursorType string `json:"cursorType"`
|
CursorType string `json:"cursorType"`
|
||||||
|
|
||||||
} `json:"content"`
|
} `json:"content"`
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -363,7 +365,6 @@ func (api_response APIV2Response) ToTweetTrove() (TweetTrove, error) {
|
|||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
func get_graphql_user_timeline_url(user_id UserID, cursor string) string {
|
func get_graphql_user_timeline_url(user_id UserID, cursor string) string {
|
||||||
if cursor != "" {
|
if cursor != "" {
|
||||||
return "https://twitter.com/i/api/graphql/CwLU7qTfeu0doqhSr6tW4A/UserTweetsAndReplies?variables=%7B%22userId%22%3A%22" + fmt.Sprint(user_id) + "%22%2C%22count%22%3A40%2C%22cursor%22%3A%22" + url.QueryEscape(cursor) + "%22%2C%22includePromotedContent%22%3Atrue%2C%22withCommunity%22%3Atrue%2C%22withSuperFollowsUserFields%22%3Atrue%2C%22withBirdwatchPivots%22%3Afalse%2C%22withDownvotePerspective%22%3Afalse%2C%22withReactionsMetadata%22%3Afalse%2C%22withReactionsPerspective%22%3Afalse%2C%22withSuperFollowsTweetFields%22%3Atrue%2C%22withVoice%22%3Atrue%2C%22withV2Timeline%22%3Afalse%2C%22__fs_interactive_text%22%3Afalse%2C%22__fs_responsive_web_uc_gql_enabled%22%3Afalse%2C%22__fs_dont_mention_me_view_api_enabled%22%3Afalse%7D" // nolint:lll // It's a URL, come on
|
return "https://twitter.com/i/api/graphql/CwLU7qTfeu0doqhSr6tW4A/UserTweetsAndReplies?variables=%7B%22userId%22%3A%22" + fmt.Sprint(user_id) + "%22%2C%22count%22%3A40%2C%22cursor%22%3A%22" + url.QueryEscape(cursor) + "%22%2C%22includePromotedContent%22%3Atrue%2C%22withCommunity%22%3Atrue%2C%22withSuperFollowsUserFields%22%3Atrue%2C%22withBirdwatchPivots%22%3Afalse%2C%22withDownvotePerspective%22%3Afalse%2C%22withReactionsMetadata%22%3Afalse%2C%22withReactionsPerspective%22%3Afalse%2C%22withSuperFollowsTweetFields%22%3Atrue%2C%22withVoice%22%3Atrue%2C%22withV2Timeline%22%3Afalse%2C%22__fs_interactive_text%22%3Afalse%2C%22__fs_responsive_web_uc_gql_enabled%22%3Afalse%2C%22__fs_dont_mention_me_view_api_enabled%22%3Afalse%7D" // nolint:lll // It's a URL, come on
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
package scraper_test
|
package scraper_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"testing"
|
|
||||||
"os"
|
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
@ -200,7 +200,6 @@ func TestAPIV2ParseRetweet(t *testing.T) {
|
|||||||
assert.Equal(UserID(44067298), retweeting_user.ID)
|
assert.Equal(UserID(44067298), retweeting_user.ID)
|
||||||
assert.Equal(UserHandle("michaelmalice"), retweeting_user.Handle)
|
assert.Equal(UserHandle("michaelmalice"), retweeting_user.Handle)
|
||||||
|
|
||||||
|
|
||||||
// Should be 1 retweet
|
// Should be 1 retweet
|
||||||
assert.Equal(1, len(trove.Retweets))
|
assert.Equal(1, len(trove.Retweets))
|
||||||
retweet, ok := trove.Retweets[1485699748514476037]
|
retweet, ok := trove.Retweets[1485699748514476037]
|
||||||
@ -270,7 +269,6 @@ func TestAPIV2ParseRetweetedQuoteTweet(t *testing.T) {
|
|||||||
assert.Equal(UserID(599817378), retweet.RetweetedByID)
|
assert.Equal(UserID(599817378), retweet.RetweetedByID)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Parse tweet with quoted tombstone
|
* Parse tweet with quoted tombstone
|
||||||
*/
|
*/
|
||||||
@ -306,7 +304,6 @@ func TestAPIV2ParseTweetWithQuotedTombstone(t *testing.T) {
|
|||||||
assert.Equal(0, len(trove.Retweets))
|
assert.Equal(0, len(trove.Retweets))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Parse a tweet with a link
|
* Parse a tweet with a link
|
||||||
*/
|
*/
|
||||||
@ -442,7 +439,6 @@ func TestAPIV2ParseTweetWithPoll(t *testing.T) {
|
|||||||
assert.Equal(1440*60, poll.VotingDuration)
|
assert.Equal(1440*60, poll.VotingDuration)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
func TestParseAPIV2UserFeed(t *testing.T) {
|
func TestParseAPIV2UserFeed(t *testing.T) {
|
||||||
data, err := os.ReadFile("test_responses/api_v2/user_feed_apiv2.json")
|
data, err := os.ReadFile("test_responses/api_v2/user_feed_apiv2.json")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -495,7 +491,6 @@ func TestParseAPIV2UserFeed(t *testing.T) {
|
|||||||
fmt.Printf("%d Users, %d Tweets, %d Retweets\n", len(tweet_trove.Users), len(tweet_trove.Tweets), len(tweet_trove.Retweets))
|
fmt.Printf("%d Users, %d Tweets, %d Retweets\n", len(tweet_trove.Users), len(tweet_trove.Tweets), len(tweet_trove.Retweets))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Should correctly identify an "empty" response
|
* Should correctly identify an "empty" response
|
||||||
*/
|
*/
|
||||||
@ -568,7 +563,6 @@ func TestAPIV2TombstoneEntry(t *testing.T) {
|
|||||||
assert.Len(trove.Retweets, 0)
|
assert.Len(trove.Retweets, 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
func TestTweetWithWarning(t *testing.T) {
|
func TestTweetWithWarning(t *testing.T) {
|
||||||
assert := assert.New(t)
|
assert := assert.New(t)
|
||||||
data, err := os.ReadFile("test_responses/api_v2/tweet_with_warning.json")
|
data, err := os.ReadFile("test_responses/api_v2/tweet_with_warning.json")
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
package scraper_test
|
package scraper_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"testing"
|
|
||||||
"os"
|
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"os"
|
||||||
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
@ -2,11 +2,10 @@ package scraper
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"time"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return the expanded version of a short URL. Input must be a real short URL.
|
* Return the expanded version of a short URL. Input must be a real short URL.
|
||||||
*/
|
*/
|
||||||
|
@ -11,7 +11,6 @@ import (
|
|||||||
. "offline_twitter/scraper"
|
. "offline_twitter/scraper"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
func TestExpandShortUrl(t *testing.T) {
|
func TestExpandShortUrl(t *testing.T) {
|
||||||
redirecting_to := "redirect target"
|
redirecting_to := "redirect target"
|
||||||
srvr := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
|
srvr := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
package scraper
|
package scraper
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"strings"
|
|
||||||
"strconv"
|
|
||||||
"net/url"
|
"net/url"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
type PollID int64
|
type PollID int64
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
package scraper_test
|
package scraper_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"testing"
|
|
||||||
"os"
|
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"os"
|
||||||
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
package scraper
|
package scraper
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"time"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"strings"
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
"offline_twitter/terminal_utils"
|
"offline_twitter/terminal_utils"
|
||||||
)
|
)
|
||||||
@ -42,7 +42,6 @@ type Tweet struct {
|
|||||||
LastScrapedAt Timestamp
|
LastScrapedAt Timestamp
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
func (t Tweet) String() string {
|
func (t Tweet) String() string {
|
||||||
var author string
|
var author string
|
||||||
if t.User != nil {
|
if t.User != nil {
|
||||||
@ -71,7 +70,7 @@ Replies: %d RT: %d QT: %d Likes: %d
|
|||||||
}
|
}
|
||||||
if len(t.Urls) > 0 {
|
if len(t.Urls) > 0 {
|
||||||
ret += "urls: [\n"
|
ret += "urls: [\n"
|
||||||
for _, url := range(t.Urls) {
|
for _, url := range t.Urls {
|
||||||
ret += " " + url.Text + "\n"
|
ret += " " + url.Text + "\n"
|
||||||
}
|
}
|
||||||
ret += "]"
|
ret += "]"
|
||||||
@ -151,7 +150,6 @@ func ParseSingleTweet(apiTweet APITweet) (ret Tweet, err error) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// Process videos
|
// Process videos
|
||||||
for _, entity := range apiTweet.ExtendedEntities.Media {
|
for _, entity := range apiTweet.ExtendedEntities.Media {
|
||||||
if entity.Type != "video" && entity.Type != "animated_gif" {
|
if entity.Type != "video" && entity.Type != "animated_gif" {
|
||||||
@ -181,7 +179,6 @@ func ParseSingleTweet(apiTweet APITweet) (ret Tweet, err error) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get a single tweet with no replies from the API.
|
* Get a single tweet with no replies from the API.
|
||||||
*
|
*
|
||||||
@ -206,7 +203,6 @@ func GetTweet(id TweetID) (Tweet, error) {
|
|||||||
return ParseSingleTweet(single_tweet)
|
return ParseSingleTweet(single_tweet)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return a list of tweets, including the original and the rest of its thread,
|
* Return a list of tweets, including the original and the rest of its thread,
|
||||||
* along with a list of associated users.
|
* along with a list of associated users.
|
||||||
|
@ -28,7 +28,6 @@ func load_tweet_from_file(filename string) Tweet{
|
|||||||
return tweet
|
return tweet
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
func TestParseSingleTweet(t *testing.T) {
|
func TestParseSingleTweet(t *testing.T) {
|
||||||
assert := assert.New(t)
|
assert := assert.New(t)
|
||||||
tweet := load_tweet_from_file("test_responses/single_tweets/tweet_with_unicode_chars.json")
|
tweet := load_tweet_from_file("test_responses/single_tweets/tweet_with_unicode_chars.json")
|
||||||
@ -171,7 +170,6 @@ func TestTweetWithPoll(t *testing.T) {
|
|||||||
assert.Equal(int64(1638331935), p.LastUpdatedAt.Unix())
|
assert.Equal(int64(1638331935), p.LastUpdatedAt.Unix())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
func TestParseTweetResponse(t *testing.T) {
|
func TestParseTweetResponse(t *testing.T) {
|
||||||
assert := assert.New(t)
|
assert := assert.New(t)
|
||||||
data, err := os.ReadFile("test_responses/michael_malice_feed.json")
|
data, err := os.ReadFile("test_responses/michael_malice_feed.json")
|
||||||
|
@ -2,9 +2,9 @@ package scraper
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"net/url"
|
||||||
"path"
|
"path"
|
||||||
"regexp"
|
"regexp"
|
||||||
"net/url"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type Url struct {
|
type Url struct {
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
package scraper_test
|
package scraper_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"testing"
|
|
||||||
"os"
|
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"os"
|
||||||
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
@ -2,9 +2,9 @@ package scraper
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"strings"
|
|
||||||
"regexp"
|
|
||||||
"path"
|
"path"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
|
||||||
"offline_twitter/terminal_utils"
|
"offline_twitter/terminal_utils"
|
||||||
)
|
)
|
||||||
@ -35,6 +35,7 @@ type User struct {
|
|||||||
IsPrivate bool
|
IsPrivate bool
|
||||||
IsVerified bool
|
IsVerified bool
|
||||||
IsBanned bool
|
IsBanned bool
|
||||||
|
IsDeleted bool
|
||||||
ProfileImageUrl string
|
ProfileImageUrl string
|
||||||
ProfileImageLocalPath string
|
ProfileImageLocalPath string
|
||||||
BannerImageUrl string
|
BannerImageUrl string
|
||||||
@ -83,8 +84,6 @@ Joined %s
|
|||||||
return ret
|
return ret
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Unknown Users with handles are only created by direct GetUser calls (either `twitter fetch_user`
|
* Unknown Users with handles are only created by direct GetUser calls (either `twitter fetch_user`
|
||||||
* subcommand or as part of tombstone user fetching.)
|
* subcommand or as part of tombstone user fetching.)
|
||||||
@ -141,7 +140,6 @@ func ParseSingleUser(apiUser APIUser) (ret User, err error) {
|
|||||||
ret.IsVerified = apiUser.Verified
|
ret.IsVerified = apiUser.Verified
|
||||||
ret.ProfileImageUrl = apiUser.ProfileImageURLHTTPS
|
ret.ProfileImageUrl = apiUser.ProfileImageURLHTTPS
|
||||||
|
|
||||||
|
|
||||||
if regexp.MustCompile(`_normal\.\w{2,4}`).MatchString(ret.ProfileImageUrl) {
|
if regexp.MustCompile(`_normal\.\w{2,4}`).MatchString(ret.ProfileImageUrl) {
|
||||||
ret.ProfileImageUrl = strings.ReplaceAll(ret.ProfileImageUrl, "_normal.", ".")
|
ret.ProfileImageUrl = strings.ReplaceAll(ret.ProfileImageUrl, "_normal.", ".")
|
||||||
}
|
}
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
package scraper
|
package scraper
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
|
||||||
"errors"
|
"errors"
|
||||||
|
"fmt"
|
||||||
)
|
)
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -33,7 +33,6 @@ func GetUserFeedFor(user_id UserID, min_tweets int) (trove TweetTrove, err error
|
|||||||
return ParseTweetResponse(tweet_response)
|
return ParseTweetResponse(tweet_response)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
func GetUserFeedGraphqlFor(user_id UserID, min_tweets int) (trove TweetTrove, err error) {
|
func GetUserFeedGraphqlFor(user_id UserID, min_tweets int) (trove TweetTrove, err error) {
|
||||||
api := API{}
|
api := API{}
|
||||||
api_response, err := api.GetGraphqlFeedFor(user_id, "")
|
api_response, err := api.GetGraphqlFeedFor(user_id, "")
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
package scraper_test
|
package scraper_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"testing"
|
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"os"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"testing"
|
||||||
|
|
||||||
"github.com/jarcoal/httpmock"
|
"github.com/jarcoal/httpmock"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
@ -2,8 +2,8 @@ package scraper
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"sort"
|
|
||||||
"path"
|
"path"
|
||||||
|
"sort"
|
||||||
)
|
)
|
||||||
|
|
||||||
type VideoID int64
|
type VideoID int64
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
package scraper_test
|
package scraper_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"testing"
|
|
||||||
"os"
|
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"os"
|
||||||
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
Loading…
x
Reference in New Issue
Block a user