Add sample notification data, and compound query to retrieve notifications and associated data

This commit is contained in:
Alessio 2024-08-31 22:59:23 -07:00
parent 0c620621a6
commit f554846355
5 changed files with 250 additions and 15 deletions

View File

@ -254,7 +254,7 @@ func TestTimeline(t *testing.T) {
assert.Equal(title_node.FirstChild.Data, "Timeline | Offline Twitter") assert.Equal(title_node.FirstChild.Data, "Timeline | Offline Twitter")
tweet_nodes := cascadia.QueryAll(root, selector(".timeline > .tweet")) tweet_nodes := cascadia.QueryAll(root, selector(".timeline > .tweet"))
assert.Len(tweet_nodes, 19) assert.Len(tweet_nodes, 20)
} }
func TestTimelineWithCursor(t *testing.T) { func TestTimelineWithCursor(t *testing.T) {

View File

@ -5,6 +5,7 @@ import (
"fmt" "fmt"
"strings" "strings"
"github.com/jmoiron/sqlx"
. "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper" . "gitlab.com/offline-twitter/twitter_offline_engine/pkg/scraper"
) )
@ -13,6 +14,7 @@ var (
ErrNotInDB = errors.New("not in database") ErrNotInDB = errors.New("not in database")
) )
// TODO: make this a SQL view?
const TWEETS_ALL_SQL_FIELDS = ` const TWEETS_ALL_SQL_FIELDS = `
tweets.id id, tweets.user_id, text, posted_at, num_likes, num_retweets, num_replies, num_quote_tweets, in_reply_to_id, tweets.id id, tweets.user_id, text, posted_at, num_likes, num_retweets, num_replies, num_quote_tweets, in_reply_to_id,
quoted_tweet_id, mentions, reply_mentions, hashtags, ifnull(space_id, '') space_id, quoted_tweet_id, mentions, reply_mentions, hashtags, ifnull(space_id, '') space_id,
@ -20,6 +22,11 @@ const TWEETS_ALL_SQL_FIELDS = `
case when likes.user_id is null then 0 else 1 end is_liked_by_current_user, case when likes.user_id is null then 0 else 1 end is_liked_by_current_user,
is_expandable, is_stub, is_content_downloaded, is_conversation_scraped, last_scraped_at` is_expandable, is_stub, is_content_downloaded, is_conversation_scraped, last_scraped_at`
// Given a TweetTrove, fetch its:
// - quoted tweets
// - spaces
// - users
// - images, videos, urls, polls
func (p Profile) fill_content(trove *TweetTrove, current_user_id UserID) { func (p Profile) fill_content(trove *TweetTrove, current_user_id UserID) {
if len(trove.Tweets) == 0 { if len(trove.Tweets) == 0 {
// Empty trove, nothing to fetch // Empty trove, nothing to fetch
@ -50,6 +57,7 @@ func (p Profile) fill_content(trove *TweetTrove, current_user_id UserID) {
} }
} }
// Fetch spaces
space_ids := []interface{}{} space_ids := []interface{}{}
for _, t := range trove.Tweets { for _, t := range trove.Tweets {
if t.SpaceID != "" { if t.SpaceID != "" {
@ -79,6 +87,7 @@ func (p Profile) fill_content(trove *TweetTrove, current_user_id UserID) {
} }
} }
// Assemble list of users fetched in previous operations
in_clause := "" in_clause := ""
user_ids := []interface{}{} user_ids := []interface{}{}
tweet_ids := []interface{}{} tweet_ids := []interface{}{}
@ -98,6 +107,16 @@ func (p Profile) fill_content(trove *TweetTrove, current_user_id UserID) {
user_ids = append(user_ids, p) user_ids = append(user_ids, p)
} }
} }
for _, n := range trove.Notifications {
// Primary user
if n.ActionUserID != UserID(0) {
user_ids = append(user_ids, n.ActionUserID)
}
// Other users, if there are any
for _, u_id := range n.UserIDs {
user_ids = append(user_ids, u_id)
}
}
// Get all the users // Get all the users
if len(user_ids) > 0 { // It could be a search with no results, end of feed, etc-- strings.Repeat will fail! if len(user_ids) > 0 { // It could be a search with no results, end of feed, etc-- strings.Repeat will fail!
@ -120,7 +139,6 @@ func (p Profile) fill_content(trove *TweetTrove, current_user_id UserID) {
var images []Image var images []Image
imgquery := ` imgquery := `
select id, tweet_id, width, height, remote_url, local_filename, is_downloaded from images where tweet_id in (` + in_clause + `)` select id, tweet_id, width, height, remote_url, local_filename, is_downloaded from images where tweet_id in (` + in_clause + `)`
// fmt.Printf("%s\n", imgquery) // TODO: SQL logger
err := p.DB.Select(&images, imgquery, tweet_ids...) err := p.DB.Select(&images, imgquery, tweet_ids...)
if err != nil { if err != nil {
panic(err) panic(err)
@ -357,8 +375,9 @@ func (p Profile) GetTweetDetail(id TweetID, current_user_id UserID) (TweetDetail
// TODO: compound-query-structs // TODO: compound-query-structs
type FeedItem struct { type FeedItem struct {
TweetID TweetID
RetweetID TweetID RetweetID TweetID
QuoteNestingLevel int NotificationID
QuoteNestingLevel int // Defines the current nesting level (not available remaining levels)
} }
type Feed struct { type Feed struct {
Items []FeedItem Items []FeedItem
@ -383,3 +402,114 @@ func NewFeed() Feed {
TweetTrove: NewTweetTrove(), TweetTrove: NewTweetTrove(),
} }
} }
func (p Profile) GetNotificationsForUser(u_id UserID, cursor int64) Feed {
// Get the notifications
var notifications []Notification
err := p.DB.Select(&notifications,
`select id, type, sent_at, sort_index, user_id, ifnull(action_user_id, 0) action_user_id,
ifnull(action_tweet_id, 0) action_tweet_id, ifnull(action_retweet_id, 0) action_retweet_id, has_detail, last_scraped_at
from notifications
where sort_index < ? or ?
and user_id = ?
order by sort_index desc
`, cursor, cursor == 0, u_id)
if err != nil {
panic(err)
}
// Get the user_ids list for each notification. Unlike tweet+retweet_ids, users are needed to render
// the notification properly.
for i := range notifications {
err = p.DB.Select(&notifications[i].UserIDs, `select user_id from notification_users where notification_id = ?`, notifications[i].ID)
if err != nil {
panic(err)
}
}
// Collect tweet and retweet IDs
retweet_ids := []TweetID{}
tweet_ids := []TweetID{}
for _, n := range notifications {
if n.ActionRetweetID != TweetID(0) {
retweet_ids = append(retweet_ids, n.ActionRetweetID)
}
if n.ActionTweetID != TweetID(0) {
tweet_ids = append(tweet_ids, n.ActionTweetID)
}
}
// TODO: can this go in `fill_content`?
// Get retweets if there are any
var retweets []Retweet
if len(retweet_ids) != 0 {
sql_str, vals, err := sqlx.In(`
select retweet_id, tweet_id, retweeted_by, retweeted_at
from retweets
where retweet_id in (?)
`, retweet_ids)
if err != nil {
panic(err)
}
err = p.DB.Select(&retweets, sql_str, vals...)
if err != nil {
panic(err)
}
// Collect more tweet IDs, from retweets
for _, r := range retweets {
tweet_ids = append(tweet_ids, r.TweetID)
}
}
// Get tweets, if there are any
var tweets []Tweet
if len(tweet_ids) != 0 {
sql_str, vals, err := sqlx.In(`select `+TWEETS_ALL_SQL_FIELDS+`
from tweets
left join tombstone_types on tweets.tombstone_type = tombstone_types.rowid
left join likes on tweets.id = likes.tweet_id and likes.user_id = -1
where id in (?)`, tweet_ids)
if err != nil {
panic(err)
}
err = p.DB.Select(&tweets, sql_str, vals...)
if err != nil {
panic(err)
}
}
ret := NewFeed()
for _, t := range tweets {
ret.TweetTrove.Tweets[t.ID] = t
}
for _, r := range retweets {
ret.TweetTrove.Retweets[r.RetweetID] = r
}
for _, n := range notifications {
// Add to tweet trove
ret.TweetTrove.Notifications[n.ID] = n
// Construct feed item
feed_item := FeedItem{
NotificationID: n.ID,
RetweetID: n.ActionRetweetID, // might be 0
TweetID: n.ActionTweetID, // might be 0
}
r, is_ok := ret.TweetTrove.Retweets[n.ActionRetweetID]
if is_ok {
// If the action has a retweet, fill the FeedItem.TweetID from the retweet
feed_item.TweetID = r.TweetID
}
ret.Items = append(ret.Items, feed_item)
}
// TODO: proper user id
p.fill_content(&ret.TweetTrove, UserID(0))
// TODO:
// ret.CursorBottom = ??
return ret
}

View File

@ -317,3 +317,39 @@ func TestTweetDetailWithThread(t *testing.T) {
require.Len(tweet_detail.ReplyChains, 1) // Should not include the Thread replies require.Len(tweet_detail.ReplyChains, 1) // Should not include the Thread replies
assert.Equal(tweet_detail.ReplyChains[0][0], TweetID(1698792233619562866)) assert.Equal(tweet_detail.ReplyChains[0][0], TweetID(1698792233619562866))
} }
func TestNotificationsFeed(t *testing.T) {
require := require.New(t)
assert := assert.New(t)
profile, err := persistence.LoadProfile("../../sample_data/profile")
require.NoError(err)
feed := profile.GetNotificationsForUser(UserID(1488963321701171204), 12345678912345)
assert.Len(feed.TweetTrove.Notifications, 6)
assert.Len(feed.TweetTrove.Tweets, 3)
assert.Len(feed.TweetTrove.Retweets, 1)
assert.Len(feed.TweetTrove.Users, 6)
// Check that Users were retrieved on the notification with detail
notif, is_ok := feed.TweetTrove.Notifications["FKncQJGVgAQAAAABSQ3bEaTgXL8f40e77r4"]
assert.True(is_ok)
assert.Len(notif.UserIDs, 3)
// Ensure they're also in the TweetTrove
for _, u_id := range notif.UserIDs {
_, is_ok := feed.TweetTrove.Users[u_id]
assert.True(is_ok)
}
assert.Len(feed.Items, 6)
assert.Equal(feed.Items[0].NotificationID, NotificationID("FDzeDIfVUAIAAAABiJONcqaBFAzeN-n-Luw"))
assert.Equal(feed.Items[0].RetweetID, TweetID(1490135787124232223))
assert.Equal(feed.Items[1].NotificationID, NotificationID("FDzeDIfVUAIAAvsBiJONcqYgiLgXOolO9t0"))
assert.Equal(feed.Items[1].TweetID, TweetID(1826778617705115869))
assert.Equal(feed.Items[2].NotificationID, NotificationID("FKncQJGVgAQAAAABSQ3bEaTgXL8VBxefepo"))
assert.Equal(feed.Items[2].TweetID, TweetID(1826778617705115868))
assert.Equal(feed.Items[3].NotificationID, NotificationID("FKncQJGVgAQAAAABSQ3bEaTgXL_S11Ev36g"))
assert.Equal(feed.Items[4].NotificationID, NotificationID("FKncQJGVgAQAAAABSQ3bEaTgXL-G8wObqVY"))
assert.Equal(feed.Items[5].NotificationID, NotificationID("FKncQJGVgAQAAAABSQ3bEaTgXL8f40e77r4"))
assert.Equal(feed.Items[5].TweetID, TweetID(1826778617705115868))
}

View File

@ -100,18 +100,19 @@ func TestTimeline(t *testing.T) {
require.NoError(err) require.NoError(err)
c := persistence.NewTimelineCursor() c := persistence.NewTimelineCursor()
c.PageSize = 5 c.PageSize = 6
feed, err := profile.NextPage(c, UserID(0)) feed, err := profile.NextPage(c, UserID(0))
require.NoError(err) require.NoError(err)
assert.Len(feed.Items, 5) assert.Len(feed.Items, 6)
assert.Len(feed.Retweets, 4) assert.Len(feed.Retweets, 4)
assert.Equal(feed.Items[0].RetweetID, TweetID(1490135787144237058)) assert.Equal(feed.Items[0].TweetID, TweetID(1826778617705115868))
assert.Equal(feed.Items[1].RetweetID, TweetID(1490135787124232222)) assert.Equal(feed.Items[1].RetweetID, TweetID(1490135787144237058))
assert.Equal(feed.Items[2].RetweetID, TweetID(1490119308692766723)) assert.Equal(feed.Items[2].RetweetID, TweetID(1490135787124232223))
assert.Equal(feed.Items[3].RetweetID, TweetID(1490100255987171332)) assert.Equal(feed.Items[3].RetweetID, TweetID(1490119308692766723))
assert.Equal(feed.Items[4].TweetID, TweetID(1453461248142495744)) assert.Equal(feed.Items[4].RetweetID, TweetID(1490100255987171332))
assert.Equal(feed.Items[5].TweetID, TweetID(1453461248142495744))
next_cursor := feed.CursorBottom next_cursor := feed.CursorBottom
assert.Equal(next_cursor.CursorPosition, persistence.CURSOR_MIDDLE) assert.Equal(next_cursor.CursorPosition, persistence.CURSOR_MIDDLE)
@ -121,6 +122,7 @@ func TestTimeline(t *testing.T) {
assert.Equal(next_cursor.CursorValue, 1635367140000) assert.Equal(next_cursor.CursorValue, 1635367140000)
next_cursor.CursorValue = 1631935323000 // Scroll down a bit, kind of randomly next_cursor.CursorValue = 1631935323000 // Scroll down a bit, kind of randomly
next_cursor.PageSize = 5
feed, err = profile.NextPage(next_cursor, UserID(0)) feed, err = profile.NextPage(next_cursor, UserID(0))
require.NoError(err) require.NoError(err)

View File

@ -53,7 +53,9 @@ INSERT INTO users VALUES
(160242,534463724,'iko','ilyakooo0',replace('Code poet.\n~racfer-hattes','\n',char(10)),473,173,'','http://iko.soy',1332519666000,0,0,0,'https://pbs.twimg.com/profile_images/1671427114438909952/8v8raTeb.jpg','ilyakooo0_profile_8v8raTeb.jpg','','',0,0,0,0,0), (160242,534463724,'iko','ilyakooo0',replace('Code poet.\n~racfer-hattes','\n',char(10)),473,173,'','http://iko.soy',1332519666000,0,0,0,'https://pbs.twimg.com/profile_images/1671427114438909952/8v8raTeb.jpg','ilyakooo0_profile_8v8raTeb.jpg','','',0,0,0,0,0),
(169994,1689006330235760640,'sol🏴','sol_plunder','',165,134,'','',1691525490000,0,0,0,'https://pbs.twimg.com/profile_images/1689006644905033728/T1uO4Jvt.jpg','sol_plunder_profile_T1uO4Jvt.jpg','','',1704554384930058537,0,0,0,0), (169994,1689006330235760640,'sol🏴','sol_plunder','',165,134,'','',1691525490000,0,0,0,'https://pbs.twimg.com/profile_images/1689006644905033728/T1uO4Jvt.jpg','sol_plunder_profile_T1uO4Jvt.jpg','','',1704554384930058537,0,0,0,0),
(1680,1458284524761075714,'wispem-wantex','wispem_wantex',replace('~wispem-wantex\n\nCurrently looking for work (DMs open)','\n',char(10)),136,483,'on my computer','https://offline-twitter.com/',1636517116000,0,0,0,'https://pbs.twimg.com/profile_images/1462880679687954433/dXJN4Bo4.jpg','wispem_wantex_profile_dXJN4Bo4.jpg','','',1695221528617468324,1,0,0,0), (1680,1458284524761075714,'wispem-wantex','wispem_wantex',replace('~wispem-wantex\n\nCurrently looking for work (DMs open)','\n',char(10)),136,483,'on my computer','https://offline-twitter.com/',1636517116000,0,0,0,'https://pbs.twimg.com/profile_images/1462880679687954433/dXJN4Bo4.jpg','wispem_wantex_profile_dXJN4Bo4.jpg','','',1695221528617468324,1,0,0,0),
(27398,1488963321701171204,'Offline Twatter','Offline_Twatter',replace('Offline Twitter is an open source twitter client and tweet-archiving app all in one. Try it out!\n\nSource code: https://t.co/2PMumKSxFO','\n',char(10)),4,2,'','https://offline-twitter.com',1643831522000,0,0,0,'https://pbs.twimg.com/profile_images/1507883049853210626/TytFbk_3.jpg','Offline_Twatter_profile_TytFbk_3.jpg','','',1507883724615999488,1,1,0,0); (27398,1488963321701171204,'Offline Twatter','Offline_Twatter',replace('Offline Twitter is an open source twitter client and tweet-archiving app all in one. Try it out!\n\nSource code: https://t.co/2PMumKSxFO','\n',char(10)),4,2,'','https://offline-twitter.com',1643831522000,0,0,0,'https://pbs.twimg.com/profile_images/1507883049853210626/TytFbk_3.jpg','Offline_Twatter_profile_TytFbk_3.jpg','','',1507883724615999488,1,1,0,0),
(175560,249206992,'ludwig','ludwigABAP','Gods chosen principal engineer. What is impossible for you is not impossible for me.',984,17966,'','https://ludwigabap.bearblog.dev/',1297180819000,0,0,0,'https://pbs.twimg.com/profile_images/1753215006697459712/n76_qnTj.jpg','ludwigABAP_profile_n76_qnTj.jpg','https://pbs.twimg.com/profile_banners/249206992/1706835247','ludwigABAP_banner_1706835247.jpg',0,0,0,0,0);
create table lists(rowid integer primary key, create table lists(rowid integer primary key,
is_online boolean not null default 0, is_online boolean not null default 0,
@ -192,7 +194,9 @@ INSERT INTO tweets VALUES
(1409940,1698797388914151523,1458284524761075714,replace('This looks quite neat, but "comptime" is cool because it was designed to do stuff like declaring arrays where the size is the result of a function call, e.g.\n\nvar my_array: [fibonacci(10)]u32;\n\n...yet being able to create DSLs just emerged from this very simple concept','\n',char(10)),1693859834000,2,0,1,0,1698792233619562866,0,'ilyakooo0','ilyakooo0','',NULL,NULL,0,1,0,0,0), (1409940,1698797388914151523,1458284524761075714,replace('This looks quite neat, but "comptime" is cool because it was designed to do stuff like declaring arrays where the size is the result of a function call, e.g.\n\nvar my_array: [fibonacci(10)]u32;\n\n...yet being able to create DSLs just emerged from this very simple concept','\n',char(10)),1693859834000,2,0,1,0,1698792233619562866,0,'ilyakooo0','ilyakooo0','',NULL,NULL,0,1,0,0,0),
(1409953,1698802806096846909,1689006330235760640,replace('Just poking around at some examples and explanation videos, It does seem very similar to Template Haskell, though maybe a bit more ergonomic.\n\nIs there something missing from this mental model?','\n',char(10)),1693861125000,3,0,1,0,1698797388914151523,0,'wispem_wantex,ilyakooo0','wispem_wantex,ilyakooo0','',NULL,NULL,0,1,0,0,0), (1409953,1698802806096846909,1689006330235760640,replace('Just poking around at some examples and explanation videos, It does seem very similar to Template Haskell, though maybe a bit more ergonomic.\n\nIs there something missing from this mental model?','\n',char(10)),1693861125000,3,0,1,0,1698797388914151523,0,'wispem_wantex,ilyakooo0','wispem_wantex,ilyakooo0','',NULL,NULL,0,1,0,0,0),
(1411566,1698848086880133147,1458284524761075714,'I have basically no experience with one and literally no experience with the other, and additionally I''ve never even used Haskell. So unfortunately I''m not really in a position to say.',1693871921000,1,0,1,0,1698802806096846909,0,'sol_plunder,ilyakooo0','sol_plunder,ilyakooo0','',NULL,NULL, 0,1,0,0,0), (1411566,1698848086880133147,1458284524761075714,'I have basically no experience with one and literally no experience with the other, and additionally I''ve never even used Haskell. So unfortunately I''m not really in a position to say.',1693871921000,1,0,1,0,1698802806096846909,0,'sol_plunder,ilyakooo0','sol_plunder,ilyakooo0','',NULL,NULL, 0,1,0,0,0),
(1169437,1665509126737129472,1458284524761075714,replace('Btw, to the extent this has happened, it''s partly thanks to the Golden One (@TheGloriousLion) who invented #fizeekfriday and the "post physique" rejoinder. Everyone should follow him if they don''t already.\n\nSince I forgot last week, and since it''s topical, here''s a leg poast','\n',char(10)),1685923294000,7,0,0,0,1665505986184900611,0,'TheGloriousLion','','fizeekfriday',NULL,NULL,0,1,0,0,0); (1169437,1665509126737129472,1458284524761075714,replace('Btw, to the extent this has happened, it''s partly thanks to the Golden One (@TheGloriousLion) who invented #fizeekfriday and the "post physique" rejoinder. Everyone should follow him if they don''t already.\n\nSince I forgot last week, and since it''s topical, here''s a leg poast','\n',char(10)),1685923294000,7,0,0,0,1665505986184900611,0,'TheGloriousLion','','fizeekfriday',NULL,NULL,0,1,0,0,0),
(2857438,1826778617705115868,1488963321701171204,'Conversations are trees, not sequences. They branch. They don''t flow in a perfectly linear way.',1724372937000,4,1,0,0,0,0,'','','',NULL,NULL,0,1,0,0,0),
(2857439,1826778617705115869,1178839081222115328,'Real tweet that is definitely real',1724372938000,4,1,0,0,1826778617705115868,0,'Offline_Twatter','Offline_Twatter','',NULL,NULL,0,1,0,0,0);
CREATE TABLE retweets(rowid integer primary key, CREATE TABLE retweets(rowid integer primary key,
retweet_id integer not null unique, retweet_id integer not null unique,
@ -211,7 +215,7 @@ INSERT INTO retweets VALUES
(52,1490135787144237058,1490120332484972549,358545917,1644111031000), (52,1490135787144237058,1490120332484972549,358545917,1644111031000),
(42,1490119308692766723,1490116725395927042,358545917,1644107102000), (42,1490119308692766723,1490116725395927042,358545917,1644107102000),
(59,1490100255987171332,1489944024278523906,358545917,1644102560000), (59,1490100255987171332,1489944024278523906,358545917,1644102560000),
(1000,1490135787124232222,1343715029707796489,1304281147074064385,1644111021000); -- This is fake (1000,1490135787124232223,1698762413393236329,1488963321701171204,1644111022000); -- This is fake
create table urls (rowid integer primary key, create table urls (rowid integer primary key,
tweet_id integer not null, tweet_id integer not null,
@ -526,12 +530,75 @@ insert into follows values
(3, 1488963321701171204, 1240784920831762433); (3, 1488963321701171204, 1240784920831762433);
create table notification_types (rowid integer primary key,
name text not null unique
);
insert into notification_types(rowid, name) values
(1, 'like'),
(2, 'retweet'),
(3, 'quote-tweet'),
(4, 'reply'),
(5, 'follow'),
(6, 'mention'),
(7, 'user is LIVE'),
(8, 'poll ended'),
(9, 'login'),
(10, 'community pinned post'),
(11, 'new recommended post');
create table notifications (rowid integer primary key,
id text unique,
type integer not null,
sent_at integer not null,
sort_index integer not null,
user_id integer not null, -- user who received the notification
action_user_id integer references users(id), -- user who triggered the notification
action_tweet_id integer references tweets(id), -- tweet associated with the notification
action_retweet_id integer references retweets(retweet_id),
has_detail boolean not null default 0,
last_scraped_at not null default 0,
foreign key(type) references notification_types(rowid)
foreign key(user_id) references users(id)
);
INSERT INTO notifications VALUES
(1,'FKncQJGVgAQAAAABSQ3bEaTgXL8f40e77r4',1,1723494244885,1723494244885,1488963321701171204,249206992,1826778617705115868,NULL,1,1725067356270),
(2,'FKncQJGVgAQAAAABSQ3bEaTgXL-G8wObqVY',9,1724112169072,1724112169072,1488963321701171204,NULL,NULL,NULL,0,-62135596800000),
(3,'FKncQJGVgAQAAAABSQ3bEaTgXL_S11Ev36g',5,1722251072880,1724251072880,1488963321701171204,1032468021485293568,NULL,NULL,0,-62135596800000),
(4,'FKncQJGVgAQAAAABSQ3bEaTgXL8VBxefepo',2,1724372973735,1724372973735,1488963321701171204,1178839081222115328,1826778617705115868,NULL,0,-62135596800000),
(5,'FDzeDIfVUAIAAvsBiJONcqYgiLgXOolO9t0',6,-62135596800000,1725055975543,1488963321701171204,1178839081222115328,1826778617705115869,NULL,0,-62135596800000),
(6,'FDzeDIfVUAIAAAABiJONcqaBFAzeN-n-Luw',1,1724604756351,1726604756351,1488963321701171204,1178839081222115328,NULL,1490135787124232223,0,-62135596800000);
create table notification_tweets (rowid integer primary key,
notification_id not null references notifications(id),
tweet_id not null references tweets(id),
unique(notification_id, tweet_id)
);
create table notification_retweets (rowid integer primary key,
notification_id not null references notifications(id),
retweet_id not null references retweets(retweet_id),
unique(notification_id, retweet_id)
);
create table notification_users (rowid integer primary key,
notification_id not null references notifications(id),
user_id not null references users(id),
unique(notification_id, user_id)
);
INSERT INTO notification_users VALUES
(1,'FKncQJGVgAQAAAABSQ3bEaTgXL8f40e77r4',249206992),
(2,'FKncQJGVgAQAAAABSQ3bEaTgXL8f40e77r4',1304281147074064385),
(3,'FKncQJGVgAQAAAABSQ3bEaTgXL8f40e77r4',1178839081222115328);
create table fake_user_sequence(latest_fake_id integer not null); create table fake_user_sequence(latest_fake_id integer not null);
insert into fake_user_sequence values(0x4000000000000000); insert into fake_user_sequence values(0x4000000000000000);
create table database_version(rowid integer primary key, create table database_version(rowid integer primary key,
version_number integer not null unique version_number integer not null unique
); );
insert into database_version(version_number) values (30); insert into database_version(version_number) values (31);
COMMIT; COMMIT;