This commit is contained in:
Fijxu 2025-09-13 17:17:53 -03:00 committed by GitHub
commit a1c8a04a21
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
28 changed files with 207 additions and 181 deletions

View File

@ -290,7 +290,7 @@ https_only: false
## Logging Verbosity. This is overridden if "-l LEVEL" or ## Logging Verbosity. This is overridden if "-l LEVEL" or
## "--log-level=LEVEL" are passed on the command line. ## "--log-level=LEVEL" are passed on the command line.
## ##
## Accepted values: All, Trace, Debug, Info, Warn, Error, Fatal, Off ## Accepted values: Trace, Debug, Info, Notice, Warn, Error, Fatal, None
## Default: Info ## Default: Info
## ##
#log_level: Info #log_level: Info

View File

@ -10,7 +10,7 @@ shards:
backtracer: backtracer:
git: https://github.com/sija/backtracer.cr.git git: https://github.com/sija/backtracer.cr.git
version: 1.2.2 version: 1.2.4
db: db:
git: https://github.com/crystal-lang/crystal-db.git git: https://github.com/crystal-lang/crystal-db.git
@ -18,7 +18,7 @@ shards:
exception_page: exception_page:
git: https://github.com/crystal-loot/exception_page.git git: https://github.com/crystal-loot/exception_page.git
version: 0.4.1 version: 0.5.0
http_proxy: http_proxy:
git: https://github.com/mamantoha/http_proxy.git git: https://github.com/mamantoha/http_proxy.git
@ -26,7 +26,7 @@ shards:
kemal: kemal:
git: https://github.com/kemalcr/kemal.git git: https://github.com/kemalcr/kemal.git
version: 1.6.0 version: 1.7.2
pg: pg:
git: https://github.com/will/crystal-pg.git git: https://github.com/will/crystal-pg.git

View File

@ -17,7 +17,7 @@ dependencies:
version: ~> 0.21.0 version: ~> 0.21.0
kemal: kemal:
github: kemalcr/kemal github: kemalcr/kemal
version: ~> 1.6.0 version: ~> 1.7.2
protodec: protodec:
github: iv-org/protodec github: iv-org/protodec
version: ~> 0.1.5 version: ~> 0.1.5

View File

@ -8,7 +8,6 @@ require "spectator"
require "../src/invidious/exceptions" require "../src/invidious/exceptions"
require "../src/invidious/helpers/macros" require "../src/invidious/helpers/macros"
require "../src/invidious/helpers/logger"
require "../src/invidious/helpers/utils" require "../src/invidious/helpers/utils"
require "../src/invidious/videos" require "../src/invidious/videos"
@ -19,9 +18,6 @@ require "../src/invidious/helpers/serialized_yt_data"
require "../src/invidious/yt_backend/extractors" require "../src/invidious/yt_backend/extractors"
require "../src/invidious/yt_backend/extractors_utils" require "../src/invidious/yt_backend/extractors_utils"
OUTPUT = File.open(File::NULL, "w")
LOGGER = Invidious::LogHandler.new(OUTPUT, LogLevel::Off)
def load_mock(file) : Hash(String, JSON::Any) def load_mock(file) : Hash(String, JSON::Any)
file = File.join(__DIR__, "..", "mocks", file + ".json") file = File.join(__DIR__, "..", "mocks", file + ".json")
content = File.read(file) content = File.read(file)

View File

@ -15,7 +15,6 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>. # along with this program. If not, see <http://www.gnu.org/licenses/>.
require "digest/md5" require "digest/md5"
require "file_utils"
# Require kemal, then our own overrides # Require kemal, then our own overrides
require "kemal" require "kemal"
@ -30,6 +29,7 @@ require "xml"
require "yaml" require "yaml"
require "compress/zip" require "compress/zip"
require "protodec/utils" require "protodec/utils"
require "log"
require "./invidious/database/*" require "./invidious/database/*"
require "./invidious/database/migrations/*" require "./invidious/database/migrations/*"
@ -128,8 +128,8 @@ Kemal.config.extra_options do |parser|
parser.on("-o OUTPUT", "--output=OUTPUT", "Redirect output (default: #{CONFIG.output})") do |output| parser.on("-o OUTPUT", "--output=OUTPUT", "Redirect output (default: #{CONFIG.output})") do |output|
CONFIG.output = output CONFIG.output = output
end end
parser.on("-l LEVEL", "--log-level=LEVEL", "Log level, one of #{LogLevel.values} (default: #{CONFIG.log_level})") do |log_level| parser.on("-l LEVEL", "--log-level=LEVEL", "Log level, one of #{Log::Severity.values} (default: #{CONFIG.log_level})") do |log_level|
CONFIG.log_level = LogLevel.parse(log_level) CONFIG.log_level = Log::Severity.parse(log_level)
end end
parser.on("-k", "--colorize", "Colorize logs") do parser.on("-k", "--colorize", "Colorize logs") do
CONFIG.colorize_logs = true CONFIG.colorize_logs = true
@ -146,11 +146,23 @@ end
Kemal::CLI.new ARGV Kemal::CLI.new ARGV
if CONFIG.output.upcase != "STDOUT" Log.setup do |c|
FileUtils.mkdir_p(File.dirname(CONFIG.output)) colorize = CONFIG.colorize_logs
output = CONFIG.output
backend = Log::IOBackend.new(formatter: Invidious::Logger.formatter(colorize))
if output.upcase != "STDOUT"
Dir.mkdir_p(File.dirname(output))
io = File.open(output, "wb")
colorize = false
backend = Log::IOBackend.new(io, formatter: Invidious::Logger.formatter(colorize))
puts("File output enabled in config, logs will being saved in '#{output}'")
end
c.bind "*", CONFIG.log_level, backend
c.bind "db.*", :none, backend
c.bind "http.*", :none, backend
end end
OUTPUT = CONFIG.output.upcase == "STDOUT" ? STDOUT : File.open(CONFIG.output, mode: "a")
LOGGER = Invidious::LogHandler.new(OUTPUT, CONFIG.log_level, CONFIG.colorize_logs)
# Check table integrity # Check table integrity
Invidious::Database.check_integrity(CONFIG) Invidious::Database.check_integrity(CONFIG)
@ -244,11 +256,13 @@ add_handler FilteredCompressHandler.new
add_handler APIHandler.new add_handler APIHandler.new
add_handler AuthHandler.new add_handler AuthHandler.new
add_handler DenyFrame.new add_handler DenyFrame.new
# Turn off default Kemal logging since we are using our own one based on Kemal::RequestLogHandler
Kemal.config.logging = false
add_handler Invidious::RequestLogHandler.new
add_context_storage_type(Array(String)) add_context_storage_type(Array(String))
add_context_storage_type(Preferences) add_context_storage_type(Preferences)
add_context_storage_type(Invidious::User) add_context_storage_type(Invidious::User)
Kemal.config.logger = LOGGER
Kemal.config.app_name = "Invidious" Kemal.config.app_name = "Invidious"
# Use in kemal's production mode. # Use in kemal's production mode.

View File

@ -156,8 +156,8 @@ def get_channel(id) : InvidiousChannel
end end
def fetch_channel(ucid, pull_all_videos : Bool) def fetch_channel(ucid, pull_all_videos : Bool)
LOGGER.debug("fetch_channel: #{ucid}") Log.debug { "fetch_channel: #{ucid}" }
LOGGER.trace("fetch_channel: #{ucid} : pull_all_videos = #{pull_all_videos}") Log.trace { "fetch_channel: #{ucid} : pull_all_videos = #{pull_all_videos}" }
namespaces = { namespaces = {
"yt" => "http://www.youtube.com/xml/schemas/2015", "yt" => "http://www.youtube.com/xml/schemas/2015",
@ -165,9 +165,9 @@ def fetch_channel(ucid, pull_all_videos : Bool)
"default" => "http://www.w3.org/2005/Atom", "default" => "http://www.w3.org/2005/Atom",
} }
LOGGER.trace("fetch_channel: #{ucid} : Downloading RSS feed") Log.trace { "fetch_channel: #{ucid} : Downloading RSS feed" }
rss = YT_POOL.client &.get("/feeds/videos.xml?channel_id=#{ucid}").body rss = YT_POOL.client &.get("/feeds/videos.xml?channel_id=#{ucid}").body
LOGGER.trace("fetch_channel: #{ucid} : Parsing RSS feed") Log.trace { "fetch_channel: #{ucid} : Parsing RSS feed" }
rss = XML.parse(rss) rss = XML.parse(rss)
author = rss.xpath_node("//default:feed/default:title", namespaces) author = rss.xpath_node("//default:feed/default:title", namespaces)
@ -184,7 +184,7 @@ def fetch_channel(ucid, pull_all_videos : Bool)
auto_generated = true auto_generated = true
end end
LOGGER.trace("fetch_channel: #{ucid} : author = #{author}, auto_generated = #{auto_generated}") Log.trace { "fetch_channel: #{ucid} : author = #{author}, auto_generated = #{auto_generated}" }
channel = InvidiousChannel.new({ channel = InvidiousChannel.new({
id: ucid, id: ucid,
@ -194,10 +194,10 @@ def fetch_channel(ucid, pull_all_videos : Bool)
subscribed: nil, subscribed: nil,
}) })
LOGGER.trace("fetch_channel: #{ucid} : Downloading channel videos page") Log.trace { "fetch_channel: #{ucid} : Downloading channel videos page" }
videos, continuation = IV::Channel::Tabs.get_videos(channel) videos, continuation = IV::Channel::Tabs.get_videos(channel)
LOGGER.trace("fetch_channel: #{ucid} : Extracting videos from channel RSS feed") Log.trace { "fetch_channel: #{ucid} : Extracting videos from channel RSS feed" }
rss.xpath_nodes("//default:feed/default:entry", namespaces).each do |entry| rss.xpath_nodes("//default:feed/default:entry", namespaces).each do |entry|
video_id = entry.xpath_node("yt:videoId", namespaces).not_nil!.content video_id = entry.xpath_node("yt:videoId", namespaces).not_nil!.content
title = entry.xpath_node("default:title", namespaces).not_nil!.content title = entry.xpath_node("default:title", namespaces).not_nil!.content
@ -241,17 +241,17 @@ def fetch_channel(ucid, pull_all_videos : Bool)
views: views, views: views,
}) })
LOGGER.trace("fetch_channel: #{ucid} : video #{video_id} : Updating or inserting video") Log.trace { "fetch_channel: #{ucid} : video #{video_id} : Updating or inserting video" }
# We don't include the 'premiere_timestamp' here because channel pages don't include them, # We don't include the 'premiere_timestamp' here because channel pages don't include them,
# meaning the above timestamp is always null # meaning the above timestamp is always null
was_insert = Invidious::Database::ChannelVideos.insert(video) was_insert = Invidious::Database::ChannelVideos.insert(video)
if was_insert if was_insert
LOGGER.trace("fetch_channel: #{ucid} : video #{video_id} : Inserted, updating subscriptions") Log.trace { "fetch_channel: #{ucid} : video #{video_id} : Inserted, updating subscriptions" }
NOTIFICATION_CHANNEL.send(VideoNotification.from_video(video)) NOTIFICATION_CHANNEL.send(VideoNotification.from_video(video))
else else
LOGGER.trace("fetch_channel: #{ucid} : video #{video_id} : Updated") Log.trace { "fetch_channel: #{ucid} : video #{video_id} : Updated" }
end end
end end

View File

@ -75,6 +75,7 @@ end
class Config class Config
include YAML::Serializable include YAML::Serializable
CLog = ::Log.for(self)
class CompanionConfig class CompanionConfig
include YAML::Serializable include YAML::Serializable
@ -98,8 +99,8 @@ class Config
property feed_threads : Int32 = 1 property feed_threads : Int32 = 1
# Log file path or STDOUT # Log file path or STDOUT
property output : String = "STDOUT" property output : String = "STDOUT"
# Default log level, valid YAML values are ints and strings, see src/invidious/helpers/logger.cr # Default log level, valid YAML values are ints and strings, see https://crystal-lang.org/api/1.17.1/Log/Severity.html#enum-members
property log_level : LogLevel = LogLevel::Info property log_level : Log::Severity = Log::Severity::Info
# Enables colors in logs. Useful for debugging purposes # Enables colors in logs. Useful for debugging purposes
property colorize_logs : Bool = false property colorize_logs : Bool = false
# Database configuration with separate parameters (username, hostname, etc) # Database configuration with separate parameters (username, hostname, etc)
@ -249,14 +250,14 @@ class Config
# Exit on fail # Exit on fail
if !success if !success
puts %(Config.{{ivar.id}} failed to parse #{env_value} as {{ivar.type}}) CLog.fatal { %({{ivar.id}} failed to parse #{env_value} as {{ivar.type}}) }
exit(1) exit(1)
end end
end end
# Warn when any config attribute is set to "CHANGE_ME!!" # Warn when any config attribute is set to "CHANGE_ME!!"
if config.{{ivar.id}} == "CHANGE_ME!!" if config.{{ivar.id}} == "CHANGE_ME!!"
puts "Config: The value of '#{ {{ivar.stringify}} }' needs to be changed!!" CLog.fatal { "The value of '#{ {{ivar.stringify}} }' needs to be changed!!" }
exit(1) exit(1)
end end
{% end %} {% end %}
@ -264,16 +265,16 @@ class Config
if config.invidious_companion.present? if config.invidious_companion.present?
# invidious_companion and signature_server can't work together # invidious_companion and signature_server can't work together
if config.signature_server if config.signature_server
puts "Config: You can not run inv_sig_helper and invidious_companion at the same time." CLog.fatal { "You can not run inv_sig_helper and invidious_companion at the same time." }
exit(1) exit(1)
elsif config.invidious_companion_key.empty? elsif config.invidious_companion_key.empty?
puts "Config: Please configure a key if you are using invidious companion." CLog.fatal { "Please configure a key if you are using invidious companion." }
exit(1) exit(1)
elsif config.invidious_companion_key == "CHANGE_ME!!" elsif config.invidious_companion_key == "CHANGE_ME!!"
puts "Config: The value of 'invidious_companion_key' needs to be changed!!" CLog.fatal { "The value of 'invidious_companion_key' needs to be changed!!" }
exit(1) exit(1)
elsif config.invidious_companion_key.size != 16 elsif config.invidious_companion_key.size != 16
puts "Config: The value of 'invidious_companion_key' needs to be a size of 16 characters." CLog.fatal { "The value of 'invidious_companion_key' needs to be a size of 16 characters." }
exit(1) exit(1)
end end
@ -285,15 +286,15 @@ class Config
end end
end end
elsif config.signature_server elsif config.signature_server
puts("WARNING: inv-sig-helper is deprecated. Please switch to Invidious companion: https://docs.invidious.io/companion-installation/") CLog.warn { "inv-sig-helper is deprecated. Please switch to Invidious companion: https://docs.invidious.io/companion-installation/" }
else else
puts("WARNING: Invidious companion is required to view and playback videos. For more information see https://docs.invidious.io/companion-installation/") CLog.warn { "Invidious companion is required to view and playback videos. For more information see https://docs.invidious.io/companion-installation/" }
end end
# HMAC_key is mandatory # HMAC_key is mandatory
# See: https://github.com/iv-org/invidious/issues/3854 # See: https://github.com/iv-org/invidious/issues/3854
if config.hmac_key.empty? if config.hmac_key.empty?
puts "Config: 'hmac_key' is required/can't be empty" CLog.fatal { "'hmac_key' is required/can't be empty" }
exit(1) exit(1)
end end
@ -309,7 +310,7 @@ class Config
path: db.dbname, path: db.dbname,
) )
else else
puts "Config: Either database_url or db.* is required" CLog.fatal { "Either database_url or db.* is required" }
exit(1) exit(1)
end end
end end
@ -317,17 +318,17 @@ class Config
# Check if the socket configuration is valid # Check if the socket configuration is valid
if sb = config.socket_binding if sb = config.socket_binding
if sb.path.ends_with?("/") || File.directory?(sb.path) if sb.path.ends_with?("/") || File.directory?(sb.path)
puts "Config: The socket path " + sb.path + " must not be a directory!" CLog.fatal { "The socket path " + sb.path + " must not be a directory!" }
exit(1) exit(1)
end end
d = File.dirname(sb.path) d = File.dirname(sb.path)
if !File.directory?(d) if !File.directory?(d)
puts "Config: Socket directory " + sb.path + " does not exist or is not a directory!" CLog.fatal { "Socket directory " + sb.path + " does not exist or is not a directory!" }
exit(1) exit(1)
end end
p = sb.permissions.to_i?(base: 8) p = sb.permissions.to_i?(base: 8)
if !p || p < 0 || p > 0o777 if !p || p < 0 || p > 0o777
puts "Config: Socket permissions must be an octal between 0 and 777!" CLog.fatal { "Socket permissions must be an octal between 0 and 777!" }
exit(1) exit(1)
end end
end end

View File

@ -2,6 +2,7 @@ require "pg"
module Invidious::Database module Invidious::Database
extend self extend self
Log = ::Log.for(self)
# Checks table integrity # Checks table integrity
# #
@ -33,7 +34,7 @@ module Invidious::Database
return # TODO return # TODO
if !PG_DB.query_one?("SELECT true FROM pg_type WHERE typname = $1", enum_name, as: Bool) if !PG_DB.query_one?("SELECT true FROM pg_type WHERE typname = $1", enum_name, as: Bool)
LOGGER.info("check_enum: CREATE TYPE #{enum_name}") Log.info { "check_enum: CREATE TYPE #{enum_name}" }
PG_DB.using_connection do |conn| PG_DB.using_connection do |conn|
conn.as(PG::Connection).exec_all(File.read("config/sql/#{enum_name}.sql")) conn.as(PG::Connection).exec_all(File.read("config/sql/#{enum_name}.sql"))
@ -46,7 +47,7 @@ module Invidious::Database
begin begin
PG_DB.exec("SELECT * FROM #{table_name} LIMIT 0") PG_DB.exec("SELECT * FROM #{table_name} LIMIT 0")
rescue ex rescue ex
LOGGER.info("check_table: check_table: CREATE TABLE #{table_name}") Log.info { "check_table: CREATE TABLE #{table_name}" }
PG_DB.using_connection do |conn| PG_DB.using_connection do |conn|
conn.as(PG::Connection).exec_all(File.read("config/sql/#{table_name}.sql")) conn.as(PG::Connection).exec_all(File.read("config/sql/#{table_name}.sql"))
@ -66,7 +67,7 @@ module Invidious::Database
if name != column_array[i]? if name != column_array[i]?
if !column_array[i]? if !column_array[i]?
new_column = column_types.select(&.starts_with?(name))[0] new_column = column_types.select(&.starts_with?(name))[0]
LOGGER.info("check_table: ALTER TABLE #{table_name} ADD COLUMN #{new_column}") Log.info { "check_table: ALTER TABLE #{table_name} ADD COLUMN #{new_column}" }
PG_DB.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}") PG_DB.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
next next
end end
@ -84,29 +85,29 @@ module Invidious::Database
# There's a column we didn't expect # There's a column we didn't expect
if !new_column if !new_column
LOGGER.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]}") Log.info { "check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]}" }
PG_DB.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE") PG_DB.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
column_array = get_column_array(PG_DB, table_name) column_array = get_column_array(PG_DB, table_name)
next next
end end
LOGGER.info("check_table: ALTER TABLE #{table_name} ADD COLUMN #{new_column}") Log.info { "check_table: ALTER TABLE #{table_name} ADD COLUMN #{new_column}" }
PG_DB.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}") PG_DB.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
LOGGER.info("check_table: UPDATE #{table_name} SET #{column_array[i]}_new=#{column_array[i]}") Log.info { "check_table: UPDATE #{table_name} SET #{column_array[i]}_new=#{column_array[i]}" }
PG_DB.exec("UPDATE #{table_name} SET #{column_array[i]}_new=#{column_array[i]}") PG_DB.exec("UPDATE #{table_name} SET #{column_array[i]}_new=#{column_array[i]}")
LOGGER.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE") Log.info { "check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE" }
PG_DB.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE") PG_DB.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
LOGGER.info("check_table: ALTER TABLE #{table_name} RENAME COLUMN #{column_array[i]}_new TO #{column_array[i]}") Log.info { "check_table: ALTER TABLE #{table_name} RENAME COLUMN #{column_array[i]}_new TO #{column_array[i]}" }
PG_DB.exec("ALTER TABLE #{table_name} RENAME COLUMN #{column_array[i]}_new TO #{column_array[i]}") PG_DB.exec("ALTER TABLE #{table_name} RENAME COLUMN #{column_array[i]}_new TO #{column_array[i]}")
column_array = get_column_array(PG_DB, table_name) column_array = get_column_array(PG_DB, table_name)
end end
else else
LOGGER.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE") Log.info { "check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE" }
PG_DB.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE") PG_DB.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
end end
end end
@ -116,7 +117,7 @@ module Invidious::Database
column_array.each do |column| column_array.each do |column|
if !struct_array.includes? column if !struct_array.includes? column
LOGGER.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column} CASCADE") Log.info { "check_table: ALTER TABLE #{table_name} DROP COLUMN #{column} CASCADE" }
PG_DB.exec("ALTER TABLE #{table_name} DROP COLUMN #{column} CASCADE") PG_DB.exec("ALTER TABLE #{table_name} DROP COLUMN #{column} CASCADE")
end end
end end

View File

@ -14,12 +14,12 @@ class Invidious::Database::Migrator
.each do |migration| .each do |migration|
next if versions.includes?(migration.version) next if versions.includes?(migration.version)
puts "Running migration: #{migration.class.name}" Log.info { "Running migration: #{migration.class.name}" }
migration.migrate migration.migrate
ran_migration = true ran_migration = true
end end
puts "No migrations to run." unless ran_migration Log.info { "No migrations to run." } unless ran_migration
end end
def pending_migrations? : Bool def pending_migrations? : Bool

View File

@ -98,7 +98,7 @@ def translate(locale : String?, key : String, text : String | Hash(String, Strin
# Log a warning if "key" doesn't exist in en-US locale and return # Log a warning if "key" doesn't exist in en-US locale and return
# that key as the text, so this is more or less transparent to the user. # that key as the text, so this is more or less transparent to the user.
if !LOCALES["en-US"].has_key?(key) if !LOCALES["en-US"].has_key?(key)
LOGGER.warn("i18n: Missing translation key \"#{key}\"") Log.warn { "Missing translation key \"#{key}\"" }
return key return key
end end
@ -165,7 +165,7 @@ def translate_count(locale : String, key : String, count : Int, format = NumberF
translation = translate_count("en-US", key, count) translation = translate_count("en-US", key, count)
else else
# Return key if we're already in english, as the translation is missing # Return key if we're already in english, as the translation is missing
LOGGER.warn("i18n: Missing translation key \"#{key}\"") Log.warn { "Missing translation key \"#{key}\"" }
return key return key
end end
end end

View File

@ -145,7 +145,7 @@ module I18next::Plurals
if version > 4 || version == 0 if version > 4 || version == 0
raise "Invalid i18next version: v#{version}." raise "Invalid i18next version: v#{version}."
elsif version == 4 elsif version == 4
# Logger.error("Unsupported i18next version: v4. Falling back to v3") # Log.error { "Unsupported i18next version: v4. Falling back to v3" }
@version = 3_u8 @version = 3_u8
else else
@version = version.to_u8 @version = version.to_u8

View File

@ -1,68 +1,71 @@
require "colorize" require "colorize"
enum LogLevel module Invidious::Logger
All = 0 extend self
Trace = 1
Debug = 2
Info = 3
Warn = 4
Error = 5
Fatal = 6
Off = 7
end
class Invidious::LogHandler < Kemal::BaseLogHandler def formatter(use_color : Bool = true)
def initialize(@io : IO = STDOUT, @level = LogLevel::Debug, use_color : Bool = true)
Colorize.enabled = use_color Colorize.enabled = use_color
Colorize.on_tty_only! Colorize.on_tty_only!
formatter = ::Log::Formatter.new do |entry, io|
message = entry.message
severity = entry.severity
data = entry.data
source = entry.source
timestamp = entry.timestamp
io << (use_color ? timestamp.colorize(:dark_gray) : timestamp) << " "
io << (use_color ? colorize_severity(severity) : severity.label) << " "
io << (use_color ? source.colorize(:dark_gray) : source) << ": " if !source.empty?
io << message
if !data.empty?
io << " "
data.each do |dat|
io << (use_color ? dat[0].to_s.colorize(:light_cyan) : dat[0].to_s)
io << "="
io << dat[1].to_s
end
end
end
return formatter
end end
private def colorize_severity(severity : Log::Severity)
case severity
in Log::Severity::Trace then severity.label.colorize(:cyan)
in Log::Severity::Info then severity.label.colorize(:green)
in Log::Severity::Notice then severity.label.colorize(:light_yellow)
in Log::Severity::Warn then severity.label.colorize(:yellow)
in Log::Severity::Error then severity.label.colorize(:red)
in Log::Severity::Fatal then severity.label.colorize(:red)
in Log::Severity::Debug then severity.label
in Log::Severity::None then severity.label
end
end
end
class Invidious::RequestLogHandler < Kemal::RequestLogHandler
Log = ::Log.for(Kemal)
def call(context : HTTP::Server::Context) def call(context : HTTP::Server::Context)
elapsed_time = Time.measure { call_next(context) } elapsed_time = Time.measure { call_next(context) }
elapsed_text = elapsed_text(elapsed_time) elapsed_text = elapsed_text(elapsed_time)
# Default: full path with parameters
requested_url = context.request.resource requested_url = context.request.resource
# Try not to log search queries passed as GET parameters during normal use # Try not to log search queries passed as GET parameters during normal use
# (They will still be logged if log level is 'Debug' or 'Trace') # (They will still be logged if log level is 'Debug' or 'Trace')
if @level > LogLevel::Debug && ( if CONFIG.log_level > ::Log::Severity::Debug && (
requested_url.downcase.includes?("search") || requested_url.downcase.includes?("q=") requested_url.downcase.includes?("search") || requested_url.downcase.includes?("q=")
) )
# Log only the path # Log only the path
requested_url = context.request.path requested_url = context.request.path
end end
info("#{context.response.status_code} #{context.request.method} #{requested_url} #{elapsed_text}") Log.info { "#{context.response.status_code} #{context.request.method} #{requested_url} #{elapsed_text}" }
context context
end end
def write(message : String)
@io << message
@io.flush
end
def color(level)
case level
when LogLevel::Trace then :cyan
when LogLevel::Debug then :green
when LogLevel::Info then :white
when LogLevel::Warn then :yellow
when LogLevel::Error then :red
when LogLevel::Fatal then :magenta
else :default
end
end
{% for level in %w(trace debug info warn error fatal) %}
def {{level.id}}(message : String)
if LogLevel::{{level.id.capitalize}} >= @level
puts("#{Time.utc} [{{level.id}}] #{message}".colorize(color(LogLevel::{{level.id.capitalize}})))
end
end
{% end %}
private def elapsed_text(elapsed) private def elapsed_text(elapsed)
millis = elapsed.total_milliseconds millis = elapsed.total_milliseconds
return "#{millis.round(2)}ms" if millis >= 1 return "#{millis.round(2)}ms" if millis >= 1

View File

@ -73,6 +73,8 @@ module Invidious::SigHelper
# ---------------------- # ----------------------
class Client class Client
Log = ::Log.for(self)
@mux : Multiplexor @mux : Multiplexor
def initialize(uri_or_path) def initialize(uri_or_path)
@ -156,8 +158,8 @@ module Invidious::SigHelper
slice = channel.receive slice = channel.receive
return yield slice return yield slice
rescue ex rescue ex
LOGGER.debug("SigHelper: Error when sending a request") Log.debug { "Error when sending a request" }
LOGGER.trace(ex.inspect_with_backtrace) Log.trace { ex.inspect_with_backtrace }
return nil return nil
end end
end end
@ -167,6 +169,8 @@ module Invidious::SigHelper
# --------------------- # ---------------------
class Multiplexor class Multiplexor
Log = ::Log.for(self)
alias TransactionID = UInt32 alias TransactionID = UInt32
record Transaction, channel = ::Channel(Bytes).new record Transaction, channel = ::Channel(Bytes).new
@ -185,22 +189,22 @@ module Invidious::SigHelper
def listen : Nil def listen : Nil
raise "Socket is closed" if @conn.closed? raise "Socket is closed" if @conn.closed?
LOGGER.debug("SigHelper: Multiplexor listening") Log.debug { "Multiplexor listening" }
spawn do spawn do
loop do loop do
begin begin
receive_data receive_data
rescue ex rescue ex
LOGGER.info("SigHelper: Connection to helper died with '#{ex.message}' trying to reconnect...") Log.info { "Connection to helper died with '#{ex.message}' trying to reconnect..." }
# We close the socket because for some reason is not closed. # We close the socket because for some reason is not closed.
@conn.close @conn.close
loop do loop do
begin begin
@conn = Connection.new(@uri_or_path) @conn = Connection.new(@uri_or_path)
LOGGER.info("SigHelper: Reconnected to SigHelper!") Log.info { "Reconnected to SigHelper!" }
rescue ex rescue ex
LOGGER.debug("SigHelper: Reconnection to helper unsuccessful with error '#{ex.message}'. Retrying") Log.debug { "Reconnection to helper unsuccessful with error '#{ex.message}'. Retrying" }
sleep 500.milliseconds sleep 500.milliseconds
next next
end end
@ -238,7 +242,7 @@ module Invidious::SigHelper
if transaction = @queue.delete(transaction_id) if transaction = @queue.delete(transaction_id)
# Remove transaction from queue and send data to the channel # Remove transaction from queue and send data to the channel
transaction.channel.send(slice) transaction.channel.send(slice)
LOGGER.trace("SigHelper: Transaction unqueued and data sent to channel") Log.trace { "Transaction unqueued and data sent to channel" }
else else
raise Exception.new("SigHelper: Received transaction was not in queue") raise Exception.new("SigHelper: Received transaction was not in queue")
end end
@ -251,7 +255,7 @@ module Invidious::SigHelper
transaction_id = @conn.read_bytes(UInt32, NetworkEndian) transaction_id = @conn.read_bytes(UInt32, NetworkEndian)
length = @conn.read_bytes(UInt32, NetworkEndian) length = @conn.read_bytes(UInt32, NetworkEndian)
LOGGER.trace("SigHelper: Recv transaction 0x#{transaction_id.to_s(base: 16)} / length #{length}") Log.trace { "Recv transaction 0x#{transaction_id.to_s(base: 16)} / length #{length}" }
if length > 67_000 if length > 67_000
raise Exception.new("SigHelper: Packet longer than expected (#{length})") raise Exception.new("SigHelper: Packet longer than expected (#{length})")
@ -261,15 +265,15 @@ module Invidious::SigHelper
slice = Bytes.new(length) slice = Bytes.new(length)
@conn.read(slice) if length > 0 @conn.read(slice) if length > 0
LOGGER.trace("SigHelper: payload = #{slice}") Log.trace { "payload = #{slice}" }
LOGGER.trace("SigHelper: Recv transaction 0x#{transaction_id.to_s(base: 16)} - Done") Log.trace { "Recv transaction 0x#{transaction_id.to_s(base: 16)} - Done" }
return transaction_id, slice return transaction_id, slice
end end
# Write a single packet to the socket # Write a single packet to the socket
private def write_packet(transaction_id : TransactionID, request : Request) private def write_packet(transaction_id : TransactionID, request : Request)
LOGGER.trace("SigHelper: Send transaction 0x#{transaction_id.to_s(base: 16)} / opcode #{request.opcode}") Log.trace { "Send transaction 0x#{transaction_id.to_s(base: 16)} / opcode #{request.opcode}" }
io = IO::Memory.new(1024) io = IO::Memory.new(1024)
io.write_bytes(request.opcode.to_u8, NetworkEndian) io.write_bytes(request.opcode.to_u8, NetworkEndian)
@ -282,11 +286,13 @@ module Invidious::SigHelper
@conn.send(io) @conn.send(io)
@conn.flush @conn.flush
LOGGER.trace("SigHelper: Send transaction 0x#{transaction_id.to_s(base: 16)} - Done") Log.trace { "Send transaction 0x#{transaction_id.to_s(base: 16)} - Done" }
end end
end end
class Connection class Connection
Log = ::Log.for(self)
@socket : UNIXSocket | TCPSocket @socket : UNIXSocket | TCPSocket
{% if flag?(:advanced_debug) %} {% if flag?(:advanced_debug) %}
@ -309,7 +315,7 @@ module Invidious::SigHelper
uri = URI.parse("tcp://#{host_or_path}") uri = URI.parse("tcp://#{host_or_path}")
@socket = TCPSocket.new(uri.host.not_nil!, uri.port.not_nil!) @socket = TCPSocket.new(uri.host.not_nil!, uri.port.not_nil!)
end end
LOGGER.info("SigHelper: Using helper at '#{host_or_path}'") Log.info { "Using helper at '#{host_or_path}'" }
{% if flag?(:advanced_debug) %} {% if flag?(:advanced_debug) %}
@io = IO::Hexdump.new(@socket, output: STDERR, read: true, write: true) @io = IO::Hexdump.new(@socket, output: STDERR, read: true, write: true)

View File

@ -2,6 +2,8 @@ require "http/params"
require "./sig_helper" require "./sig_helper"
class Invidious::DecryptFunction class Invidious::DecryptFunction
Log = ::Log.for(self).for("SignatureDecryption")
@last_update : Time = Time.utc - 42.days @last_update : Time = Time.utc - 42.days
def initialize(uri_or_path) def initialize(uri_or_path)
@ -18,7 +20,7 @@ class Invidious::DecryptFunction
update_time_elapsed = (@client.get_player_timestamp || 301).seconds update_time_elapsed = (@client.get_player_timestamp || 301).seconds
if update_time_elapsed > 5.minutes if update_time_elapsed > 5.minutes
LOGGER.debug("Signature: Player might be outdated, updating") Log.debug { "Player might be outdated, updating" }
@client.force_update @client.force_update
@last_update = Time.utc @last_update = Time.utc
end end
@ -28,8 +30,8 @@ class Invidious::DecryptFunction
self.check_update self.check_update
return @client.decrypt_n_param(n) return @client.decrypt_n_param(n)
rescue ex rescue ex
LOGGER.debug(ex.message || "Signature: Unknown error") Log.debug { ex.message || "Unknown error" }
LOGGER.trace(ex.inspect_with_backtrace) Log.trace { ex.inspect_with_backtrace }
return nil return nil
end end
@ -37,8 +39,8 @@ class Invidious::DecryptFunction
self.check_update self.check_update
return @client.decrypt_sig(str) return @client.decrypt_sig(str)
rescue ex rescue ex
LOGGER.debug(ex.message || "Signature: Unknown error") Log.debug { ex.message || "Unknown error" }
LOGGER.trace(ex.inspect_with_backtrace) Log.trace { ex.inspect_with_backtrace }
return nil return nil
end end
@ -46,8 +48,8 @@ class Invidious::DecryptFunction
self.check_update self.check_update
return @client.get_signature_timestamp return @client.get_signature_timestamp
rescue ex rescue ex
LOGGER.debug(ex.message || "Signature: Unknown error") Log.debug { ex.message || "Unknown error" }
LOGGER.trace(ex.inspect_with_backtrace) Log.trace { ex.inspect_with_backtrace }
return nil return nil
end end
end end

View File

@ -6,6 +6,8 @@ abstract class Invidious::Jobs::BaseJob
# and to create the associated instance property. # and to create the associated instance property.
# #
macro inherited macro inherited
Log = ::Log.for(self)
macro finished macro finished
# This config structure can be expanded as required. # This config structure can be expanded as required.
struct Config struct Config

View File

@ -5,7 +5,7 @@ class Invidious::Jobs::ClearExpiredItemsJob < Invidious::Jobs::BaseJob
loop do loop do
failed = false failed = false
LOGGER.info("jobs: running ClearExpiredItems job") Log.info { "running ClearExpiredItemsJob job" }
begin begin
Invidious::Database::Videos.delete_expired Invidious::Database::Videos.delete_expired
@ -16,10 +16,10 @@ class Invidious::Jobs::ClearExpiredItemsJob < Invidious::Jobs::BaseJob
# Retry earlier than scheduled on DB error # Retry earlier than scheduled on DB error
if failed if failed
LOGGER.info("jobs: ClearExpiredItems failed. Retrying in 10 minutes.") Log.info { "ClearExpiredItems failed. Retrying in 10 minutes." }
sleep 10.minutes sleep 10.minutes
else else
LOGGER.info("jobs: ClearExpiredItems done.") Log.info { "ClearExpiredItems done." }
sleep 1.hour sleep 1.hour
end end
end end

View File

@ -12,7 +12,7 @@ class Invidious::Jobs::InstanceListRefreshJob < Invidious::Jobs::BaseJob
def begin def begin
loop do loop do
refresh_instances refresh_instances
LOGGER.info("InstanceListRefreshJob: Done, sleeping for 30 minutes") Log.info { "Done, sleeping for 30 minutes" }
sleep 30.minute sleep 30.minute
Fiber.yield Fiber.yield
end end
@ -43,9 +43,9 @@ class Invidious::Jobs::InstanceListRefreshJob < Invidious::Jobs::BaseJob
filtered_instance_list << {info["region"].as_s, domain.as_s} filtered_instance_list << {info["region"].as_s, domain.as_s}
rescue ex rescue ex
if domain if domain
LOGGER.info("InstanceListRefreshJob: failed to parse information from '#{domain}' because \"#{ex}\"\n\"#{ex.backtrace.join('\n')}\" ") Log.info { "failed to parse information from '#{domain}' because \"#{ex}\"\n\"#{ex.backtrace.join('\n')}\" " }
else else
LOGGER.info("InstanceListRefreshJob: failed to parse information from an instance because \"#{ex}\"\n\"#{ex.backtrace.join('\n')}\" ") Log.info { "failed to parse information from an instance because \"#{ex}\"\n\"#{ex.backtrace.join('\n')}\" " }
end end
end end
end end

View File

@ -57,7 +57,7 @@ class Invidious::Jobs::NotificationJob < Invidious::Jobs::BaseJob
spawn do spawn do
loop do loop do
begin begin
LOGGER.debug("NotificationJob: waking up") Log.debug { "waking up" }
cloned = {} of String => Set(VideoNotification) cloned = {} of String => Set(VideoNotification)
notify_mutex.synchronize do notify_mutex.synchronize do
cloned = to_notify.clone cloned = to_notify.clone
@ -69,7 +69,7 @@ class Invidious::Jobs::NotificationJob < Invidious::Jobs::BaseJob
next next
end end
LOGGER.info("NotificationJob: updating channel #{channel_id} with #{notifications.size} notifications") Log.info { "updating channel #{channel_id} with #{notifications.size} notifications" }
if CONFIG.enable_user_notifications if CONFIG.enable_user_notifications
video_ids = notifications.map(&.video_id) video_ids = notifications.map(&.video_id)
Invidious::Database::Users.add_multiple_notifications(channel_id, video_ids) Invidious::Database::Users.add_multiple_notifications(channel_id, video_ids)
@ -89,9 +89,9 @@ class Invidious::Jobs::NotificationJob < Invidious::Jobs::BaseJob
end end
end end
LOGGER.trace("NotificationJob: Done, sleeping") Log.trace { "Done, sleeping" }
rescue ex rescue ex
LOGGER.error("NotificationJob: #{ex.message}") Log.error { ex.message }
end end
sleep 1.minute sleep 1.minute
Fiber.yield Fiber.yield

View File

@ -12,37 +12,37 @@ class Invidious::Jobs::RefreshChannelsJob < Invidious::Jobs::BaseJob
backoff = 2.minutes backoff = 2.minutes
loop do loop do
LOGGER.debug("RefreshChannelsJob: Refreshing all channels") Log.debug { "Refreshing all channels" }
PG_DB.query("SELECT id FROM channels ORDER BY updated") do |rs| PG_DB.query("SELECT id FROM channels ORDER BY updated") do |rs|
rs.each do rs.each do
id = rs.read(String) id = rs.read(String)
if active_fibers >= lim_fibers if active_fibers >= lim_fibers
LOGGER.trace("RefreshChannelsJob: Fiber limit reached, waiting...") Log.trace { "Fiber limit reached, waiting..." }
if active_channel.receive if active_channel.receive
LOGGER.trace("RefreshChannelsJob: Fiber limit ok, continuing") Log.trace { "Fiber limit ok, continuing" }
active_fibers -= 1 active_fibers -= 1
end end
end end
LOGGER.debug("RefreshChannelsJob: #{id} : Spawning fiber") Log.debug { "#{id} : Spawning fiber" }
active_fibers += 1 active_fibers += 1
spawn do spawn do
begin begin
LOGGER.trace("RefreshChannelsJob: #{id} fiber : Fetching channel") Log.trace { "#{id} fiber : Fetching channel" }
channel = fetch_channel(id, pull_all_videos: CONFIG.full_refresh) channel = fetch_channel(id, pull_all_videos: CONFIG.full_refresh)
lim_fibers = max_fibers lim_fibers = max_fibers
LOGGER.trace("RefreshChannelsJob: #{id} fiber : Updating DB") Log.trace { "#{id} fiber : Updating DB" }
Invidious::Database::Channels.update_author(id, channel.author) Invidious::Database::Channels.update_author(id, channel.author)
rescue ex rescue ex
LOGGER.error("RefreshChannelsJob: #{id} : #{ex.message}") Log.error { "#{id} : #{ex.message}" }
if ex.message == "Deleted or invalid channel" if ex.message == "Deleted or invalid channel"
Invidious::Database::Channels.update_mark_deleted(id) Invidious::Database::Channels.update_mark_deleted(id)
else else
lim_fibers = 1 lim_fibers = 1
LOGGER.error("RefreshChannelsJob: #{id} fiber : backing off for #{backoff}s") Log.error { "#{id} fiber : backing off for #{backoff}s" }
sleep backoff sleep backoff
if backoff < 1.days if backoff < 1.days
backoff += backoff backoff += backoff
@ -51,14 +51,14 @@ class Invidious::Jobs::RefreshChannelsJob < Invidious::Jobs::BaseJob
end end
end end
ensure ensure
LOGGER.debug("RefreshChannelsJob: #{id} fiber : Done") Log.debug { "#{id} fiber : Done" }
active_channel.send(true) active_channel.send(true)
end end
end end
end end
end end
LOGGER.debug("RefreshChannelsJob: Done, sleeping for #{CONFIG.channel_refresh_interval}") Log.debug { "Done, sleeping for #{CONFIG.channel_refresh_interval}" }
sleep CONFIG.channel_refresh_interval sleep CONFIG.channel_refresh_interval
Fiber.yield Fiber.yield
end end

View File

@ -28,14 +28,14 @@ class Invidious::Jobs::RefreshFeedsJob < Invidious::Jobs::BaseJob
column_array = Invidious::Database.get_column_array(db, view_name) column_array = Invidious::Database.get_column_array(db, view_name)
ChannelVideo.type_array.each_with_index do |name, i| ChannelVideo.type_array.each_with_index do |name, i|
if name != column_array[i]? if name != column_array[i]?
LOGGER.info("RefreshFeedsJob: DROP MATERIALIZED VIEW #{view_name}") Log.info { "DROP MATERIALIZED VIEW #{view_name}" }
db.exec("DROP MATERIALIZED VIEW #{view_name}") db.exec("DROP MATERIALIZED VIEW #{view_name}")
raise "view does not exist" raise "view does not exist"
end end
end end
if !db.query_one("SELECT pg_get_viewdef('#{view_name}')", as: String).includes? "WHERE ((cv.ucid = ANY (u.subscriptions))" if !db.query_one("SELECT pg_get_viewdef('#{view_name}')", as: String).includes? "WHERE ((cv.ucid = ANY (u.subscriptions))"
LOGGER.info("RefreshFeedsJob: Materialized view #{view_name} is out-of-date, recreating...") Log.info { "Materialized view #{view_name} is out-of-date, recreating..." }
db.exec("DROP MATERIALIZED VIEW #{view_name}") db.exec("DROP MATERIALIZED VIEW #{view_name}")
end end
@ -47,18 +47,18 @@ class Invidious::Jobs::RefreshFeedsJob < Invidious::Jobs::BaseJob
legacy_view_name = "subscriptions_#{sha256(email)[0..7]}" legacy_view_name = "subscriptions_#{sha256(email)[0..7]}"
db.exec("SELECT * FROM #{legacy_view_name} LIMIT 0") db.exec("SELECT * FROM #{legacy_view_name} LIMIT 0")
LOGGER.info("RefreshFeedsJob: RENAME MATERIALIZED VIEW #{legacy_view_name}") Log.info { "RENAME MATERIALIZED VIEW #{legacy_view_name}" }
db.exec("ALTER MATERIALIZED VIEW #{legacy_view_name} RENAME TO #{view_name}") db.exec("ALTER MATERIALIZED VIEW #{legacy_view_name} RENAME TO #{view_name}")
rescue ex rescue ex
begin begin
# While iterating through, we may have an email stored from a deleted account # While iterating through, we may have an email stored from a deleted account
if db.query_one?("SELECT true FROM users WHERE email = $1", email, as: Bool) if db.query_one?("SELECT true FROM users WHERE email = $1", email, as: Bool)
LOGGER.info("RefreshFeedsJob: CREATE #{view_name}") Log.info { "CREATE #{view_name}" }
db.exec("CREATE MATERIALIZED VIEW #{view_name} AS #{MATERIALIZED_VIEW_SQL.call(email)}") db.exec("CREATE MATERIALIZED VIEW #{view_name} AS #{MATERIALIZED_VIEW_SQL.call(email)}")
db.exec("UPDATE users SET feed_needs_update = false WHERE email = $1", email) db.exec("UPDATE users SET feed_needs_update = false WHERE email = $1", email)
end end
rescue ex rescue ex
LOGGER.error("RefreshFeedJobs: REFRESH #{email} : #{ex.message}") Log.error { "REFRESH #{email} : #{ex.message}" }
end end
end end
end end

View File

@ -32,10 +32,10 @@ class Invidious::Jobs::SubscribeToFeedsJob < Invidious::Jobs::BaseJob
response = subscribe_pubsub(ucid, hmac_key) response = subscribe_pubsub(ucid, hmac_key)
if response.status_code >= 400 if response.status_code >= 400
LOGGER.error("SubscribeToFeedsJob: #{ucid} : #{response.body}") Log.error { "#{ucid} : #{response.body}" }
end end
rescue ex rescue ex
LOGGER.error("SubscribeToFeedsJob: #{ucid} : #{ex.message}") Log.error { "#{ucid} : #{ex.message}" }
end end
active_channel.send(true) active_channel.send(true)

View File

@ -403,7 +403,7 @@ module Invidious::Routes::Feeds
signature = env.request.headers["X-Hub-Signature"].lchop("sha1=") signature = env.request.headers["X-Hub-Signature"].lchop("sha1=")
if signature != OpenSSL::HMAC.hexdigest(:sha1, HMAC_KEY, body) if signature != OpenSSL::HMAC.hexdigest(:sha1, HMAC_KEY, body)
LOGGER.error("/feed/webhook/#{token} : Invalid signature") Log.error { "/feed/webhook/#{token} : Invalid signature" }
haltf env, status_code: 200 haltf env, status_code: 200
end end

View File

@ -54,10 +54,10 @@ module Invidious::Routes::Watch
begin begin
video = get_video(id, region: params.region) video = get_video(id, region: params.region)
rescue ex : NotFoundException rescue ex : NotFoundException
LOGGER.error("get_video not found: #{id} : #{ex.message}") Log.error { "get_video not found: #{id} : #{ex.message}" }
return error_template(404, ex) return error_template(404, ex)
rescue ex rescue ex
LOGGER.error("get_video: #{id} : #{ex.message}") Log.error { "get_video: #{id} : #{ex.message}" }
return error_template(500, ex) return error_template(500, ex)
end end

View File

@ -6,7 +6,7 @@ require "json"
# #
# TODO: "compactRadioRenderer" (Mix) and # TODO: "compactRadioRenderer" (Mix) and
# TODO: Use a proper struct/class instead of a hacky JSON object # TODO: Use a proper struct/class instead of a hacky JSON object
def parse_related_video(related : JSON::Any) : Hash(String, JSON::Any)? private def parse_related_video(related : JSON::Any) : Hash(String, JSON::Any)?
return nil if !related["videoId"]? return nil if !related["videoId"]?
# The compact renderer has video length in seconds, where the end # The compact renderer has video length in seconds, where the end
@ -34,7 +34,7 @@ def parse_related_video(related : JSON::Any) : Hash(String, JSON::Any)?
HelperExtractors.get_short_view_count(r).to_s HelperExtractors.get_short_view_count(r).to_s
end end
LOGGER.trace("parse_related_video: Found \"watchNextEndScreenRenderer\" container") Log.trace { "parse_related_video: Found \"watchNextEndScreenRenderer\" container" }
if published_time_text = related["publishedTimeText"]? if published_time_text = related["publishedTimeText"]?
decoded_time = decode_date(published_time_text["simpleText"].to_s) decoded_time = decode_date(published_time_text["simpleText"].to_s)
@ -113,7 +113,7 @@ def extract_video_info(video_id : String)
if !CONFIG.invidious_companion.present? if !CONFIG.invidious_companion.present?
if player_response.dig?("streamingData", "adaptiveFormats", 0, "url").nil? if player_response.dig?("streamingData", "adaptiveFormats", 0, "url").nil?
LOGGER.warn("Missing URLs for adaptive formats, falling back to other YT clients.") Log.warn { "Missing URLs for adaptive formats, falling back to other YT clients." }
players_fallback = {YoutubeAPI::ClientType::TvSimply, YoutubeAPI::ClientType::WebMobile} players_fallback = {YoutubeAPI::ClientType::TvSimply, YoutubeAPI::ClientType::WebMobile}
players_fallback.each do |player_fallback| players_fallback.each do |player_fallback|
@ -129,7 +129,7 @@ def extract_video_info(video_id : String)
break break
end end
rescue InfoException rescue InfoException
next LOGGER.warn("Failed to fetch streams with #{player_fallback}") next Log.warn { "Failed to fetch streams with #{player_fallback}" }
end end
end end
@ -168,11 +168,11 @@ def extract_video_info(video_id : String)
end end
def try_fetch_streaming_data(id : String, client_config : YoutubeAPI::ClientConfig) : Hash(String, JSON::Any)? def try_fetch_streaming_data(id : String, client_config : YoutubeAPI::ClientConfig) : Hash(String, JSON::Any)?
LOGGER.debug("try_fetch_streaming_data: [#{id}] Using #{client_config.client_type} client.") Log.debug { "try_fetch_streaming_data: [#{id}] Using #{client_config.client_type} client." }
response = YoutubeAPI.player(video_id: id, params: "2AMB", client_config: client_config) response = YoutubeAPI.player(video_id: id, params: "2AMB", client_config: client_config)
playability_status = response["playabilityStatus"]["status"] playability_status = response["playabilityStatus"]["status"]
LOGGER.debug("try_fetch_streaming_data: [#{id}] Got playabilityStatus == #{playability_status}.") Log.debug { "try_fetch_streaming_data: [#{id}] Got playabilityStatus == #{playability_status}." }
if id != response.dig?("videoDetails", "videoId") if id != response.dig?("videoDetails", "videoId")
# YouTube may return a different video player response than expected. # YouTube may return a different video player response than expected.
@ -269,7 +269,7 @@ def parse_video_info(video_id : String, player_response : Hash(String, JSON::Any
# Related videos # Related videos
LOGGER.debug("extract_video_info: parsing related videos...") Log.debug { "parse_video_info: parsing related videos..." }
related = [] of JSON::Any related = [] of JSON::Any
@ -336,8 +336,8 @@ def parse_video_info(video_id : String, player_response : Hash(String, JSON::Any
.try &.dig?("accessibility", "accessibilityData", "label") .try &.dig?("accessibility", "accessibilityData", "label")
likes = likes_txt.as_s.gsub(/\D/, "").to_i64? if likes_txt likes = likes_txt.as_s.gsub(/\D/, "").to_i64? if likes_txt
LOGGER.trace("extract_video_info: Found \"likes\" button. Button text is \"#{likes_txt}\"") Log.trace { "parse_video_info: Found \"likes\" button. Button text is \"#{likes_txt}\"" }
LOGGER.debug("extract_video_info: Likes count is #{likes}") if likes Log.debug { "parse_video_info: Likes count is #{likes}" } if likes
end end
end end
@ -485,7 +485,7 @@ private def convert_url(fmt)
url = URI.parse(cfr["url"]) url = URI.parse(cfr["url"])
params = url.query_params params = url.query_params
LOGGER.debug("convert_url: Decoding '#{cfr}'") Log.debug { "convert_url: Decoding '#{cfr}'" }
unsig = DECRYPT_FUNCTION.try &.decrypt_signature(cfr["s"]) unsig = DECRYPT_FUNCTION.try &.decrypt_signature(cfr["s"])
params[sp] = unsig if unsig params[sp] = unsig if unsig
@ -502,11 +502,11 @@ private def convert_url(fmt)
end end
url.query_params = params url.query_params = params
LOGGER.trace("convert_url: new url is '#{url}'") Log.trace { "convert_url: new url is '#{url}'" }
return url.to_s return url.to_s
rescue ex rescue ex
LOGGER.debug("convert_url: Error when parsing video URL") Log.debug { "convert_url: Error when parsing video URL" }
LOGGER.trace(ex.inspect_with_backtrace) Log.trace { ex.inspect_with_backtrace }
return "" return ""
end end

View File

@ -165,7 +165,7 @@ def get_ytimg_pool(subdomain)
if pool = YTIMG_POOLS[subdomain]? if pool = YTIMG_POOLS[subdomain]?
return pool return pool
else else
LOGGER.info("ytimg_pool: Creating a new HTTP pool for \"https://#{subdomain}.ytimg.com\"") Log.info { "get_ytimg_pool: Creating a new HTTP pool for \"https://#{subdomain}.ytimg.com\"" }
pool = YoutubeConnectionPool.new(URI.parse("https://#{subdomain}.ytimg.com"), capacity: CONFIG.pool_size) pool = YoutubeConnectionPool.new(URI.parse("https://#{subdomain}.ytimg.com"), capacity: CONFIG.pool_size)
YTIMG_POOLS[subdomain] = pool YTIMG_POOLS[subdomain] = pool

View File

@ -40,8 +40,8 @@ private module Parsers
begin begin
return parse_internal(*args) return parse_internal(*args)
rescue ex rescue ex
LOGGER.debug("#{{{@type.name}}}: Failed to render item.") Log.debug { "#{{{@type.name}}}: Failed to render item." }
LOGGER.debug("#{{{@type.name}}}: Got exception: #{ex.message}") Log.debug { "#{{{@type.name}}}: Got exception: #{ex.message}" }
ProblematicTimelineItem.new( ProblematicTimelineItem.new(
parse_exception: ex parse_exception: ex
) )
@ -1029,13 +1029,13 @@ def parse_item(item : JSON::Any, author_fallback : String? = "", author_id_fallb
# Each parser automatically validates the data given to see if the data is # Each parser automatically validates the data given to see if the data is
# applicable to itself. If not nil is returned and the next parser is attempted. # applicable to itself. If not nil is returned and the next parser is attempted.
ITEM_PARSERS.each do |parser| ITEM_PARSERS.each do |parser|
LOGGER.trace("parse_item: Attempting to parse item using \"#{parser.parser_name}\" (cycling...)") Log.trace { "parse_item: Attempting to parse item using \"#{parser.parser_name}\" (cycling...)" }
if result = parser.process(item, author_fallback) if result = parser.process(item, author_fallback)
LOGGER.debug("parse_item: Successfully parsed via #{parser.parser_name}") Log.debug { "parse_item: Successfully parsed via #{parser.parser_name}" }
return result return result
else else
LOGGER.trace("parse_item: Parser \"#{parser.parser_name}\" does not apply. Cycling to the next one...") Log.trace { "parse_item: Parser \"#{parser.parser_name}\" does not apply. Cycling to the next one..." }
end end
end end
end end
@ -1056,14 +1056,14 @@ def extract_items(initial_data : InitialData, &)
# This is identical to the parser cycling of parse_item(). # This is identical to the parser cycling of parse_item().
ITEM_CONTAINER_EXTRACTOR.each do |extractor| ITEM_CONTAINER_EXTRACTOR.each do |extractor|
LOGGER.trace("extract_items: Attempting to extract item container using \"#{extractor.extractor_name}\" (cycling...)") Log.trace { "extract_items: Attempting to extract item container using \"#{extractor.extractor_name}\" (cycling...)" }
if container = extractor.process(unpackaged_data) if container = extractor.process(unpackaged_data)
LOGGER.debug("extract_items: Successfully unpacked container with \"#{extractor.extractor_name}\"") Log.debug { "extract_items: Successfully unpacked container with \"#{extractor.extractor_name}\"" }
# Extract items in container # Extract items in container
container.each { |item| yield item } container.each { |item| yield item }
else else
LOGGER.trace("extract_items: Extractor \"#{extractor.extractor_name}\" does not apply. Cycling to the next one...") Log.trace { "extract_items: Extractor \"#{extractor.extractor_name}\" does not apply. Cycling to the next one..." }
end end
end end
end end

View File

@ -62,8 +62,8 @@ def has_verified_badge?(badges : JSON::Any?)
return false return false
rescue ex rescue ex
LOGGER.debug("Unable to parse owner badges. Got exception: #{ex.message}") Log.debug { "Unable to parse owner badges. Got exception: #{ex.message}" }
LOGGER.trace("Owner badges data: #{badges.to_json}") Log.trace { "Owner badges data: #{badges.to_json}" }
return false return false
end end

View File

@ -4,6 +4,7 @@
module YoutubeAPI module YoutubeAPI
extend self extend self
Log = ::Log.for(self)
# For Android versions, see https://en.wikipedia.org/wiki/Android_version_history # For Android versions, see https://en.wikipedia.org/wiki/Android_version_history
private ANDROID_APP_VERSION = "19.35.36" private ANDROID_APP_VERSION = "19.35.36"
@ -640,9 +641,9 @@ module YoutubeAPI
end end
# Logging # Logging
LOGGER.debug("YoutubeAPI: Using endpoint: \"#{endpoint}\"") Log.debug { "Using endpoint: \"#{endpoint}\"" }
LOGGER.trace("YoutubeAPI: ClientConfig: #{client_config}") Log.trace { "ClientConfig: #{client_config}" }
LOGGER.trace("YoutubeAPI: POST data: #{data}") Log.trace { "POST data: #{data}" }
# Send the POST request # Send the POST request
body = YT_POOL.client() do |client| body = YT_POOL.client() do |client|
@ -665,9 +666,9 @@ module YoutubeAPI
message = initial_data["error"]["message"].to_s.sub(/(\\n)+\^$/, "") message = initial_data["error"]["message"].to_s.sub(/(\\n)+\^$/, "")
# Logging # Logging
LOGGER.error("YoutubeAPI: Got error #{code} when requesting #{endpoint}") Log.error { "Got error #{code} when requesting #{endpoint}" }
LOGGER.error("YoutubeAPI: #{message}") Log.error { message }
LOGGER.info("YoutubeAPI: POST data was: #{data}") Log.info { "POST data was: #{data}" }
raise InfoException.new("Could not extract JSON. Youtube API returned \ raise InfoException.new("Could not extract JSON. Youtube API returned \
error #{code} with message:<br>\"#{message}\"") error #{code} with message:<br>\"#{message}\"")
@ -695,8 +696,8 @@ module YoutubeAPI
} }
# Logging # Logging
LOGGER.debug("Invidious companion: Using endpoint: \"#{endpoint}\"") Log.debug { "Invidious companion: Using endpoint: \"#{endpoint}\"" }
LOGGER.trace("Invidious companion: POST data: #{data}") Log.trace { "Invidious companion: POST data: #{data}" }
# Send the POST request # Send the POST request