mirror of
https://gitea.invidious.io/iv-org/invidious-copy-2023-06-08.git
synced 2024-08-15 00:53:38 +00:00
Automatically migrate database
This commit is contained in:
parent
b8c87632e6
commit
5dc45c35e6
5 changed files with 123 additions and 9 deletions
|
@ -105,10 +105,16 @@ end
|
||||||
|
|
||||||
Kemal::CLI.new ARGV
|
Kemal::CLI.new ARGV
|
||||||
|
|
||||||
|
# Check table integrity
|
||||||
|
analyze_table(PG_DB, logger, "channel_videos", ChannelVideo)
|
||||||
|
analyze_table(PG_DB, logger, "nonces", Nonce)
|
||||||
|
analyze_table(PG_DB, logger, "session_ids", SessionId)
|
||||||
|
analyze_table(PG_DB, logger, "users", User)
|
||||||
|
analyze_table(PG_DB, logger, "videos", Video)
|
||||||
|
|
||||||
|
# Start jobs
|
||||||
refresh_channels(PG_DB, logger, config.channel_threads, config.full_refresh)
|
refresh_channels(PG_DB, logger, config.channel_threads, config.full_refresh)
|
||||||
|
|
||||||
refresh_feeds(PG_DB, logger, config.feed_threads)
|
refresh_feeds(PG_DB, logger, config.feed_threads)
|
||||||
|
|
||||||
subscribe_to_feeds(PG_DB, logger, HMAC_KEY, config)
|
subscribe_to_feeds(PG_DB, logger, HMAC_KEY, config)
|
||||||
|
|
||||||
statistics = {
|
statistics = {
|
||||||
|
|
|
@ -1,5 +1,20 @@
|
||||||
require "./macros"
|
require "./macros"
|
||||||
|
|
||||||
|
struct Nonce
|
||||||
|
db_mapping({
|
||||||
|
nonce: String,
|
||||||
|
expire: Time,
|
||||||
|
})
|
||||||
|
end
|
||||||
|
|
||||||
|
struct SessionId
|
||||||
|
db_mapping({
|
||||||
|
id: String,
|
||||||
|
email: String,
|
||||||
|
issued: String,
|
||||||
|
})
|
||||||
|
end
|
||||||
|
|
||||||
struct ConfigPreferences
|
struct ConfigPreferences
|
||||||
module StringToArray
|
module StringToArray
|
||||||
def self.to_yaml(value : Array(String), yaml : YAML::Nodes::Builder)
|
def self.to_yaml(value : Array(String), yaml : YAML::Nodes::Builder)
|
||||||
|
@ -483,3 +498,92 @@ def extract_shelf_items(nodeset, ucid = nil, author_name = nil)
|
||||||
|
|
||||||
return items
|
return items
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def analyze_table(db, logger, table_name, struct_type = nil)
|
||||||
|
# Create table if it doesn't exist
|
||||||
|
if !db.query_one?("SELECT true FROM information_schema.tables WHERE table_name = $1", table_name, as: Bool)
|
||||||
|
db.using_connection do |conn|
|
||||||
|
conn.as(PG::Connection).exec_all(File.read("config/sql/#{table_name}.sql"))
|
||||||
|
end
|
||||||
|
|
||||||
|
logger.write("CREATE TABLE #{table_name}\n")
|
||||||
|
end
|
||||||
|
|
||||||
|
if !struct_type
|
||||||
|
return
|
||||||
|
end
|
||||||
|
|
||||||
|
struct_array = struct_type.to_type_tuple
|
||||||
|
column_array = get_column_array(db, table_name)
|
||||||
|
column_types = File.read("config/sql/#{table_name}.sql").match(/CREATE TABLE public\.#{table_name}\n\((?<types>[\d\D]*?)\);/)
|
||||||
|
.try &.["types"].split(",").map { |line| line.strip }
|
||||||
|
|
||||||
|
if !column_types
|
||||||
|
return
|
||||||
|
end
|
||||||
|
|
||||||
|
struct_array.each_with_index do |name, i|
|
||||||
|
if name != column_array[i]?
|
||||||
|
if !column_array[i]?
|
||||||
|
new_column = column_types.select { |line| line.starts_with? name }[0]
|
||||||
|
db.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
|
||||||
|
logger.write("ALTER TABLE #{table_name} ADD COLUMN #{new_column}\n")
|
||||||
|
next
|
||||||
|
end
|
||||||
|
|
||||||
|
# Column doesn't exist
|
||||||
|
if !column_array.includes? name
|
||||||
|
new_column = column_types.select { |line| line.starts_with? name }[0]
|
||||||
|
db.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
|
||||||
|
end
|
||||||
|
|
||||||
|
# Column exists but in the wrong position, rotate
|
||||||
|
if struct_array.includes? column_array[i]
|
||||||
|
until name == column_array[i]
|
||||||
|
new_column = column_types.select { |line| line.starts_with? column_array[i] }[0]?.try &.gsub("#{column_array[i]}", "#{column_array[i]}_new")
|
||||||
|
|
||||||
|
# There's a column we didn't expect
|
||||||
|
if !new_column
|
||||||
|
db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
|
||||||
|
logger.write("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]}\n")
|
||||||
|
|
||||||
|
column_array = get_column_array(db, table_name)
|
||||||
|
next
|
||||||
|
end
|
||||||
|
|
||||||
|
db.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
|
||||||
|
logger.write("ALTER TABLE #{table_name} ADD COLUMN #{new_column}\n")
|
||||||
|
db.exec("UPDATE #{table_name} SET #{column_array[i]}_new=#{column_array[i]}")
|
||||||
|
logger.write("UPDATE #{table_name} SET #{column_array[i]}_new=#{column_array[i]}\n")
|
||||||
|
db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
|
||||||
|
logger.write("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE\n")
|
||||||
|
db.exec("ALTER TABLE #{table_name} RENAME COLUMN #{column_array[i]}_new TO #{column_array[i]}")
|
||||||
|
logger.write("ALTER TABLE #{table_name} RENAME COLUMN #{column_array[i]}_new TO #{column_array[i]}\n")
|
||||||
|
|
||||||
|
column_array = get_column_array(db, table_name)
|
||||||
|
end
|
||||||
|
else
|
||||||
|
db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
|
||||||
|
logger.write("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE\n")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
class PG::ResultSet
|
||||||
|
def field(index = @column_index)
|
||||||
|
@fields.not_nil![index]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def get_column_array(db, table_name)
|
||||||
|
column_array = [] of String
|
||||||
|
db.query("SELECT * FROM #{table_name} LIMIT 0") do |rs|
|
||||||
|
rs.column_count.times do |i|
|
||||||
|
column = rs.as(PG::ResultSet).field(i)
|
||||||
|
column_array << column.name
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
return column_array
|
||||||
|
end
|
||||||
|
|
|
@ -66,7 +66,7 @@ def refresh_feeds(db, logger, max_threads = 1)
|
||||||
spawn do
|
spawn do
|
||||||
begin
|
begin
|
||||||
db.query("SELECT * FROM #{view_name} LIMIT 1") do |rs|
|
db.query("SELECT * FROM #{view_name} LIMIT 1") do |rs|
|
||||||
# View doesn't contain same number of rows as ChannelVideo
|
# Drop view that doesn't contain same number of rows as ChannelVideo
|
||||||
if ChannelVideo.from_rs(rs)[0]?.try &.to_a.size.try &.!= rs.column_count
|
if ChannelVideo.from_rs(rs)[0]?.try &.to_a.size.try &.!= rs.column_count
|
||||||
db.exec("DROP MATERIALIZED VIEW #{view_name}")
|
db.exec("DROP MATERIALIZED VIEW #{view_name}")
|
||||||
raise "valid schema does not exist"
|
raise "valid schema does not exist"
|
|
@ -3,10 +3,14 @@ macro db_mapping(mapping)
|
||||||
end
|
end
|
||||||
|
|
||||||
def to_a
|
def to_a
|
||||||
return [{{*mapping.keys.map { |id| "@#{id}".id }}}]
|
return [ {{*mapping.keys.map { |id| "@#{id}".id }}} ]
|
||||||
end
|
end
|
||||||
|
|
||||||
DB.mapping({{mapping}})
|
def self.to_type_tuple
|
||||||
|
return { {{*mapping.keys.map { |id| "#{id}" }}} }
|
||||||
|
end
|
||||||
|
|
||||||
|
DB.mapping( {{mapping}} )
|
||||||
end
|
end
|
||||||
|
|
||||||
macro json_mapping(mapping)
|
macro json_mapping(mapping)
|
||||||
|
@ -14,11 +18,11 @@ macro json_mapping(mapping)
|
||||||
end
|
end
|
||||||
|
|
||||||
def to_a
|
def to_a
|
||||||
return [{{*mapping.keys.map { |id| "@#{id}".id }}}]
|
return [ {{*mapping.keys.map { |id| "@#{id}".id }}} ]
|
||||||
end
|
end
|
||||||
|
|
||||||
JSON.mapping({{mapping}})
|
JSON.mapping( {{mapping}} )
|
||||||
YAML.mapping({{mapping}})
|
YAML.mapping( {{mapping}} )
|
||||||
end
|
end
|
||||||
|
|
||||||
macro yaml_mapping(mapping)
|
macro yaml_mapping(mapping)
|
||||||
|
@ -26,7 +30,7 @@ macro yaml_mapping(mapping)
|
||||||
end
|
end
|
||||||
|
|
||||||
def to_a
|
def to_a
|
||||||
return [{{*mapping.keys.map { |id| "@#{id}".id }}}]
|
return [ {{*mapping.keys.map { |id| "@#{id}".id }}} ]
|
||||||
end
|
end
|
||||||
|
|
||||||
def to_tuple
|
def to_tuple
|
||||||
|
|
Loading…
Reference in a new issue