diff --git a/config/config.example.yml b/config/config.example.yml
index 2b99345b2..7eb53283c 100644
--- a/config/config.example.yml
+++ b/config/config.example.yml
@@ -290,7 +290,7 @@ https_only: false
## Logging Verbosity. This is overridden if "-l LEVEL" or
## "--log-level=LEVEL" are passed on the command line.
##
-## Accepted values: All, Trace, Debug, Info, Warn, Error, Fatal, Off
+## Accepted values: Trace, Debug, Info, Notice, Warn, Error, Fatal, None
## Default: Info
##
#log_level: Info
diff --git a/shard.lock b/shard.lock
index 1265eda67..e7741c237 100644
--- a/shard.lock
+++ b/shard.lock
@@ -10,7 +10,7 @@ shards:
backtracer:
git: https://github.com/sija/backtracer.cr.git
- version: 1.2.2
+ version: 1.2.4
db:
git: https://github.com/crystal-lang/crystal-db.git
@@ -18,7 +18,7 @@ shards:
exception_page:
git: https://github.com/crystal-loot/exception_page.git
- version: 0.4.1
+ version: 0.5.0
http_proxy:
git: https://github.com/mamantoha/http_proxy.git
@@ -26,7 +26,7 @@ shards:
kemal:
git: https://github.com/kemalcr/kemal.git
- version: 1.6.0
+ version: 1.7.2
pg:
git: https://github.com/will/crystal-pg.git
diff --git a/shard.yml b/shard.yml
index 4dc8aa025..fb3470038 100644
--- a/shard.yml
+++ b/shard.yml
@@ -17,7 +17,7 @@ dependencies:
version: ~> 0.21.0
kemal:
github: kemalcr/kemal
- version: ~> 1.6.0
+ version: ~> 1.7.2
protodec:
github: iv-org/protodec
version: ~> 0.1.5
diff --git a/spec/parsers_helper.cr b/spec/parsers_helper.cr
index 6589acad8..b52ac2517 100644
--- a/spec/parsers_helper.cr
+++ b/spec/parsers_helper.cr
@@ -8,7 +8,6 @@ require "spectator"
require "../src/invidious/exceptions"
require "../src/invidious/helpers/macros"
-require "../src/invidious/helpers/logger"
require "../src/invidious/helpers/utils"
require "../src/invidious/videos"
@@ -19,9 +18,6 @@ require "../src/invidious/helpers/serialized_yt_data"
require "../src/invidious/yt_backend/extractors"
require "../src/invidious/yt_backend/extractors_utils"
-OUTPUT = File.open(File::NULL, "w")
-LOGGER = Invidious::LogHandler.new(OUTPUT, LogLevel::Off)
-
def load_mock(file) : Hash(String, JSON::Any)
file = File.join(__DIR__, "..", "mocks", file + ".json")
content = File.read(file)
diff --git a/src/invidious.cr b/src/invidious.cr
index 197b150ca..ed5c4ee0b 100644
--- a/src/invidious.cr
+++ b/src/invidious.cr
@@ -15,7 +15,6 @@
# along with this program. If not, see .
require "digest/md5"
-require "file_utils"
# Require kemal, then our own overrides
require "kemal"
@@ -30,6 +29,7 @@ require "xml"
require "yaml"
require "compress/zip"
require "protodec/utils"
+require "log"
require "./invidious/database/*"
require "./invidious/database/migrations/*"
@@ -128,8 +128,8 @@ Kemal.config.extra_options do |parser|
parser.on("-o OUTPUT", "--output=OUTPUT", "Redirect output (default: #{CONFIG.output})") do |output|
CONFIG.output = output
end
- parser.on("-l LEVEL", "--log-level=LEVEL", "Log level, one of #{LogLevel.values} (default: #{CONFIG.log_level})") do |log_level|
- CONFIG.log_level = LogLevel.parse(log_level)
+ parser.on("-l LEVEL", "--log-level=LEVEL", "Log level, one of #{Log::Severity.values} (default: #{CONFIG.log_level})") do |log_level|
+ CONFIG.log_level = Log::Severity.parse(log_level)
end
parser.on("-k", "--colorize", "Colorize logs") do
CONFIG.colorize_logs = true
@@ -146,11 +146,23 @@ end
Kemal::CLI.new ARGV
-if CONFIG.output.upcase != "STDOUT"
- FileUtils.mkdir_p(File.dirname(CONFIG.output))
+Log.setup do |c|
+ colorize = CONFIG.colorize_logs
+ output = CONFIG.output
+
+ backend = Log::IOBackend.new(formatter: Invidious::Logger.formatter(colorize))
+ if output.upcase != "STDOUT"
+ Dir.mkdir_p(File.dirname(output))
+ io = File.open(output, "wb")
+ colorize = false
+ backend = Log::IOBackend.new(io, formatter: Invidious::Logger.formatter(colorize))
+ puts("File output enabled in config, logs will being saved in '#{output}'")
+ end
+
+ c.bind "*", CONFIG.log_level, backend
+ c.bind "db.*", :none, backend
+ c.bind "http.*", :none, backend
end
-OUTPUT = CONFIG.output.upcase == "STDOUT" ? STDOUT : File.open(CONFIG.output, mode: "a")
-LOGGER = Invidious::LogHandler.new(OUTPUT, CONFIG.log_level, CONFIG.colorize_logs)
# Check table integrity
Invidious::Database.check_integrity(CONFIG)
@@ -244,11 +256,13 @@ add_handler FilteredCompressHandler.new
add_handler APIHandler.new
add_handler AuthHandler.new
add_handler DenyFrame.new
+# Turn off default Kemal logging since we are using our own one based on Kemal::RequestLogHandler
+Kemal.config.logging = false
+add_handler Invidious::RequestLogHandler.new
add_context_storage_type(Array(String))
add_context_storage_type(Preferences)
add_context_storage_type(Invidious::User)
-Kemal.config.logger = LOGGER
Kemal.config.app_name = "Invidious"
# Use in kemal's production mode.
diff --git a/src/invidious/channels/channels.cr b/src/invidious/channels/channels.cr
index 659823255..81f664b78 100644
--- a/src/invidious/channels/channels.cr
+++ b/src/invidious/channels/channels.cr
@@ -156,8 +156,8 @@ def get_channel(id) : InvidiousChannel
end
def fetch_channel(ucid, pull_all_videos : Bool)
- LOGGER.debug("fetch_channel: #{ucid}")
- LOGGER.trace("fetch_channel: #{ucid} : pull_all_videos = #{pull_all_videos}")
+ Log.debug { "fetch_channel: #{ucid}" }
+ Log.trace { "fetch_channel: #{ucid} : pull_all_videos = #{pull_all_videos}" }
namespaces = {
"yt" => "http://www.youtube.com/xml/schemas/2015",
@@ -165,9 +165,9 @@ def fetch_channel(ucid, pull_all_videos : Bool)
"default" => "http://www.w3.org/2005/Atom",
}
- LOGGER.trace("fetch_channel: #{ucid} : Downloading RSS feed")
+ Log.trace { "fetch_channel: #{ucid} : Downloading RSS feed" }
rss = YT_POOL.client &.get("/feeds/videos.xml?channel_id=#{ucid}").body
- LOGGER.trace("fetch_channel: #{ucid} : Parsing RSS feed")
+ Log.trace { "fetch_channel: #{ucid} : Parsing RSS feed" }
rss = XML.parse(rss)
author = rss.xpath_node("//default:feed/default:title", namespaces)
@@ -184,7 +184,7 @@ def fetch_channel(ucid, pull_all_videos : Bool)
auto_generated = true
end
- LOGGER.trace("fetch_channel: #{ucid} : author = #{author}, auto_generated = #{auto_generated}")
+ Log.trace { "fetch_channel: #{ucid} : author = #{author}, auto_generated = #{auto_generated}" }
channel = InvidiousChannel.new({
id: ucid,
@@ -194,10 +194,10 @@ def fetch_channel(ucid, pull_all_videos : Bool)
subscribed: nil,
})
- LOGGER.trace("fetch_channel: #{ucid} : Downloading channel videos page")
+ Log.trace { "fetch_channel: #{ucid} : Downloading channel videos page" }
videos, continuation = IV::Channel::Tabs.get_videos(channel)
- LOGGER.trace("fetch_channel: #{ucid} : Extracting videos from channel RSS feed")
+ Log.trace { "fetch_channel: #{ucid} : Extracting videos from channel RSS feed" }
rss.xpath_nodes("//default:feed/default:entry", namespaces).each do |entry|
video_id = entry.xpath_node("yt:videoId", namespaces).not_nil!.content
title = entry.xpath_node("default:title", namespaces).not_nil!.content
@@ -241,17 +241,17 @@ def fetch_channel(ucid, pull_all_videos : Bool)
views: views,
})
- LOGGER.trace("fetch_channel: #{ucid} : video #{video_id} : Updating or inserting video")
+ Log.trace { "fetch_channel: #{ucid} : video #{video_id} : Updating or inserting video" }
# We don't include the 'premiere_timestamp' here because channel pages don't include them,
# meaning the above timestamp is always null
was_insert = Invidious::Database::ChannelVideos.insert(video)
if was_insert
- LOGGER.trace("fetch_channel: #{ucid} : video #{video_id} : Inserted, updating subscriptions")
+ Log.trace { "fetch_channel: #{ucid} : video #{video_id} : Inserted, updating subscriptions" }
NOTIFICATION_CHANNEL.send(VideoNotification.from_video(video))
else
- LOGGER.trace("fetch_channel: #{ucid} : video #{video_id} : Updated")
+ Log.trace { "fetch_channel: #{ucid} : video #{video_id} : Updated" }
end
end
diff --git a/src/invidious/config.cr b/src/invidious/config.cr
index 36f09d282..05280871b 100644
--- a/src/invidious/config.cr
+++ b/src/invidious/config.cr
@@ -75,6 +75,7 @@ end
class Config
include YAML::Serializable
+ CLog = ::Log.for(self)
class CompanionConfig
include YAML::Serializable
@@ -98,8 +99,8 @@ class Config
property feed_threads : Int32 = 1
# Log file path or STDOUT
property output : String = "STDOUT"
- # Default log level, valid YAML values are ints and strings, see src/invidious/helpers/logger.cr
- property log_level : LogLevel = LogLevel::Info
+ # Default log level, valid YAML values are ints and strings, see https://crystal-lang.org/api/1.17.1/Log/Severity.html#enum-members
+ property log_level : Log::Severity = Log::Severity::Info
# Enables colors in logs. Useful for debugging purposes
property colorize_logs : Bool = false
# Database configuration with separate parameters (username, hostname, etc)
@@ -249,14 +250,14 @@ class Config
# Exit on fail
if !success
- puts %(Config.{{ivar.id}} failed to parse #{env_value} as {{ivar.type}})
+ CLog.fatal { %({{ivar.id}} failed to parse #{env_value} as {{ivar.type}}) }
exit(1)
end
end
# Warn when any config attribute is set to "CHANGE_ME!!"
if config.{{ivar.id}} == "CHANGE_ME!!"
- puts "Config: The value of '#{ {{ivar.stringify}} }' needs to be changed!!"
+ CLog.fatal { "The value of '#{ {{ivar.stringify}} }' needs to be changed!!" }
exit(1)
end
{% end %}
@@ -264,16 +265,16 @@ class Config
if config.invidious_companion.present?
# invidious_companion and signature_server can't work together
if config.signature_server
- puts "Config: You can not run inv_sig_helper and invidious_companion at the same time."
+ CLog.fatal { "You can not run inv_sig_helper and invidious_companion at the same time." }
exit(1)
elsif config.invidious_companion_key.empty?
- puts "Config: Please configure a key if you are using invidious companion."
+ CLog.fatal { "Please configure a key if you are using invidious companion." }
exit(1)
elsif config.invidious_companion_key == "CHANGE_ME!!"
- puts "Config: The value of 'invidious_companion_key' needs to be changed!!"
+ CLog.fatal { "The value of 'invidious_companion_key' needs to be changed!!" }
exit(1)
elsif config.invidious_companion_key.size != 16
- puts "Config: The value of 'invidious_companion_key' needs to be a size of 16 characters."
+ CLog.fatal { "The value of 'invidious_companion_key' needs to be a size of 16 characters." }
exit(1)
end
@@ -285,15 +286,15 @@ class Config
end
end
elsif config.signature_server
- puts("WARNING: inv-sig-helper is deprecated. Please switch to Invidious companion: https://docs.invidious.io/companion-installation/")
+ CLog.warn { "inv-sig-helper is deprecated. Please switch to Invidious companion: https://docs.invidious.io/companion-installation/" }
else
- puts("WARNING: Invidious companion is required to view and playback videos. For more information see https://docs.invidious.io/companion-installation/")
+ CLog.warn { "Invidious companion is required to view and playback videos. For more information see https://docs.invidious.io/companion-installation/" }
end
# HMAC_key is mandatory
# See: https://github.com/iv-org/invidious/issues/3854
if config.hmac_key.empty?
- puts "Config: 'hmac_key' is required/can't be empty"
+ CLog.fatal { "'hmac_key' is required/can't be empty" }
exit(1)
end
@@ -309,7 +310,7 @@ class Config
path: db.dbname,
)
else
- puts "Config: Either database_url or db.* is required"
+ CLog.fatal { "Either database_url or db.* is required" }
exit(1)
end
end
@@ -317,17 +318,17 @@ class Config
# Check if the socket configuration is valid
if sb = config.socket_binding
if sb.path.ends_with?("/") || File.directory?(sb.path)
- puts "Config: The socket path " + sb.path + " must not be a directory!"
+ CLog.fatal { "The socket path " + sb.path + " must not be a directory!" }
exit(1)
end
d = File.dirname(sb.path)
if !File.directory?(d)
- puts "Config: Socket directory " + sb.path + " does not exist or is not a directory!"
+ CLog.fatal { "Socket directory " + sb.path + " does not exist or is not a directory!" }
exit(1)
end
p = sb.permissions.to_i?(base: 8)
if !p || p < 0 || p > 0o777
- puts "Config: Socket permissions must be an octal between 0 and 777!"
+ CLog.fatal { "Socket permissions must be an octal between 0 and 777!" }
exit(1)
end
end
diff --git a/src/invidious/database/base.cr b/src/invidious/database/base.cr
index 0fb1b6af0..bd00cd68a 100644
--- a/src/invidious/database/base.cr
+++ b/src/invidious/database/base.cr
@@ -2,6 +2,7 @@ require "pg"
module Invidious::Database
extend self
+ Log = ::Log.for(self)
# Checks table integrity
#
@@ -33,7 +34,7 @@ module Invidious::Database
return # TODO
if !PG_DB.query_one?("SELECT true FROM pg_type WHERE typname = $1", enum_name, as: Bool)
- LOGGER.info("check_enum: CREATE TYPE #{enum_name}")
+ Log.info { "check_enum: CREATE TYPE #{enum_name}" }
PG_DB.using_connection do |conn|
conn.as(PG::Connection).exec_all(File.read("config/sql/#{enum_name}.sql"))
@@ -46,7 +47,7 @@ module Invidious::Database
begin
PG_DB.exec("SELECT * FROM #{table_name} LIMIT 0")
rescue ex
- LOGGER.info("check_table: check_table: CREATE TABLE #{table_name}")
+ Log.info { "check_table: CREATE TABLE #{table_name}" }
PG_DB.using_connection do |conn|
conn.as(PG::Connection).exec_all(File.read("config/sql/#{table_name}.sql"))
@@ -66,7 +67,7 @@ module Invidious::Database
if name != column_array[i]?
if !column_array[i]?
new_column = column_types.select(&.starts_with?(name))[0]
- LOGGER.info("check_table: ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
+ Log.info { "check_table: ALTER TABLE #{table_name} ADD COLUMN #{new_column}" }
PG_DB.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
next
end
@@ -84,29 +85,29 @@ module Invidious::Database
# There's a column we didn't expect
if !new_column
- LOGGER.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]}")
+ Log.info { "check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]}" }
PG_DB.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
column_array = get_column_array(PG_DB, table_name)
next
end
- LOGGER.info("check_table: ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
+ Log.info { "check_table: ALTER TABLE #{table_name} ADD COLUMN #{new_column}" }
PG_DB.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
- LOGGER.info("check_table: UPDATE #{table_name} SET #{column_array[i]}_new=#{column_array[i]}")
+ Log.info { "check_table: UPDATE #{table_name} SET #{column_array[i]}_new=#{column_array[i]}" }
PG_DB.exec("UPDATE #{table_name} SET #{column_array[i]}_new=#{column_array[i]}")
- LOGGER.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
+ Log.info { "check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE" }
PG_DB.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
- LOGGER.info("check_table: ALTER TABLE #{table_name} RENAME COLUMN #{column_array[i]}_new TO #{column_array[i]}")
+ Log.info { "check_table: ALTER TABLE #{table_name} RENAME COLUMN #{column_array[i]}_new TO #{column_array[i]}" }
PG_DB.exec("ALTER TABLE #{table_name} RENAME COLUMN #{column_array[i]}_new TO #{column_array[i]}")
column_array = get_column_array(PG_DB, table_name)
end
else
- LOGGER.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
+ Log.info { "check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE" }
PG_DB.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
end
end
@@ -116,7 +117,7 @@ module Invidious::Database
column_array.each do |column|
if !struct_array.includes? column
- LOGGER.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column} CASCADE")
+ Log.info { "check_table: ALTER TABLE #{table_name} DROP COLUMN #{column} CASCADE" }
PG_DB.exec("ALTER TABLE #{table_name} DROP COLUMN #{column} CASCADE")
end
end
diff --git a/src/invidious/database/migrator.cr b/src/invidious/database/migrator.cr
index 660c32034..054886ed2 100644
--- a/src/invidious/database/migrator.cr
+++ b/src/invidious/database/migrator.cr
@@ -14,12 +14,12 @@ class Invidious::Database::Migrator
.each do |migration|
next if versions.includes?(migration.version)
- puts "Running migration: #{migration.class.name}"
+ Log.info { "Running migration: #{migration.class.name}" }
migration.migrate
ran_migration = true
end
- puts "No migrations to run." unless ran_migration
+ Log.info { "No migrations to run." } unless ran_migration
end
def pending_migrations? : Bool
diff --git a/src/invidious/helpers/i18n.cr b/src/invidious/helpers/i18n.cr
index bca2edda9..822fb6441 100644
--- a/src/invidious/helpers/i18n.cr
+++ b/src/invidious/helpers/i18n.cr
@@ -98,7 +98,7 @@ def translate(locale : String?, key : String, text : String | Hash(String, Strin
# Log a warning if "key" doesn't exist in en-US locale and return
# that key as the text, so this is more or less transparent to the user.
if !LOCALES["en-US"].has_key?(key)
- LOGGER.warn("i18n: Missing translation key \"#{key}\"")
+ Log.warn { "Missing translation key \"#{key}\"" }
return key
end
@@ -165,7 +165,7 @@ def translate_count(locale : String, key : String, count : Int, format = NumberF
translation = translate_count("en-US", key, count)
else
# Return key if we're already in english, as the translation is missing
- LOGGER.warn("i18n: Missing translation key \"#{key}\"")
+ Log.warn { "Missing translation key \"#{key}\"" }
return key
end
end
diff --git a/src/invidious/helpers/i18next.cr b/src/invidious/helpers/i18next.cr
index 684e6d145..023fae97c 100644
--- a/src/invidious/helpers/i18next.cr
+++ b/src/invidious/helpers/i18next.cr
@@ -145,7 +145,7 @@ module I18next::Plurals
if version > 4 || version == 0
raise "Invalid i18next version: v#{version}."
elsif version == 4
- # Logger.error("Unsupported i18next version: v4. Falling back to v3")
+ # Log.error { "Unsupported i18next version: v4. Falling back to v3" }
@version = 3_u8
else
@version = version.to_u8
diff --git a/src/invidious/helpers/logger.cr b/src/invidious/helpers/logger.cr
index 033495955..b3f9889f8 100644
--- a/src/invidious/helpers/logger.cr
+++ b/src/invidious/helpers/logger.cr
@@ -1,68 +1,71 @@
require "colorize"
-enum LogLevel
- All = 0
- Trace = 1
- Debug = 2
- Info = 3
- Warn = 4
- Error = 5
- Fatal = 6
- Off = 7
-end
+module Invidious::Logger
+ extend self
-class Invidious::LogHandler < Kemal::BaseLogHandler
- def initialize(@io : IO = STDOUT, @level = LogLevel::Debug, use_color : Bool = true)
+ def formatter(use_color : Bool = true)
Colorize.enabled = use_color
Colorize.on_tty_only!
+
+ formatter = ::Log::Formatter.new do |entry, io|
+ message = entry.message
+ severity = entry.severity
+ data = entry.data
+ source = entry.source
+ timestamp = entry.timestamp
+
+ io << (use_color ? timestamp.colorize(:dark_gray) : timestamp) << " "
+ io << (use_color ? colorize_severity(severity) : severity.label) << " "
+ io << (use_color ? source.colorize(:dark_gray) : source) << ": " if !source.empty?
+ io << message
+ if !data.empty?
+ io << " "
+ data.each do |dat|
+ io << (use_color ? dat[0].to_s.colorize(:light_cyan) : dat[0].to_s)
+ io << "="
+ io << dat[1].to_s
+ end
+ end
+ end
+
+ return formatter
end
+ private def colorize_severity(severity : Log::Severity)
+ case severity
+ in Log::Severity::Trace then severity.label.colorize(:cyan)
+ in Log::Severity::Info then severity.label.colorize(:green)
+ in Log::Severity::Notice then severity.label.colorize(:light_yellow)
+ in Log::Severity::Warn then severity.label.colorize(:yellow)
+ in Log::Severity::Error then severity.label.colorize(:red)
+ in Log::Severity::Fatal then severity.label.colorize(:red)
+ in Log::Severity::Debug then severity.label
+ in Log::Severity::None then severity.label
+ end
+ end
+end
+
+class Invidious::RequestLogHandler < Kemal::RequestLogHandler
+ Log = ::Log.for(Kemal)
+
def call(context : HTTP::Server::Context)
elapsed_time = Time.measure { call_next(context) }
elapsed_text = elapsed_text(elapsed_time)
-
- # Default: full path with parameters
requested_url = context.request.resource
# Try not to log search queries passed as GET parameters during normal use
# (They will still be logged if log level is 'Debug' or 'Trace')
- if @level > LogLevel::Debug && (
+ if CONFIG.log_level > ::Log::Severity::Debug && (
requested_url.downcase.includes?("search") || requested_url.downcase.includes?("q=")
)
# Log only the path
requested_url = context.request.path
end
- info("#{context.response.status_code} #{context.request.method} #{requested_url} #{elapsed_text}")
-
+ Log.info { "#{context.response.status_code} #{context.request.method} #{requested_url} #{elapsed_text}" }
context
end
- def write(message : String)
- @io << message
- @io.flush
- end
-
- def color(level)
- case level
- when LogLevel::Trace then :cyan
- when LogLevel::Debug then :green
- when LogLevel::Info then :white
- when LogLevel::Warn then :yellow
- when LogLevel::Error then :red
- when LogLevel::Fatal then :magenta
- else :default
- end
- end
-
- {% for level in %w(trace debug info warn error fatal) %}
- def {{level.id}}(message : String)
- if LogLevel::{{level.id.capitalize}} >= @level
- puts("#{Time.utc} [{{level.id}}] #{message}".colorize(color(LogLevel::{{level.id.capitalize}})))
- end
- end
- {% end %}
-
private def elapsed_text(elapsed)
millis = elapsed.total_milliseconds
return "#{millis.round(2)}ms" if millis >= 1
diff --git a/src/invidious/helpers/sig_helper.cr b/src/invidious/helpers/sig_helper.cr
index 6d198a427..65c8618f8 100644
--- a/src/invidious/helpers/sig_helper.cr
+++ b/src/invidious/helpers/sig_helper.cr
@@ -73,6 +73,8 @@ module Invidious::SigHelper
# ----------------------
class Client
+ Log = ::Log.for(self)
+
@mux : Multiplexor
def initialize(uri_or_path)
@@ -156,8 +158,8 @@ module Invidious::SigHelper
slice = channel.receive
return yield slice
rescue ex
- LOGGER.debug("SigHelper: Error when sending a request")
- LOGGER.trace(ex.inspect_with_backtrace)
+ Log.debug { "Error when sending a request" }
+ Log.trace { ex.inspect_with_backtrace }
return nil
end
end
@@ -167,6 +169,8 @@ module Invidious::SigHelper
# ---------------------
class Multiplexor
+ Log = ::Log.for(self)
+
alias TransactionID = UInt32
record Transaction, channel = ::Channel(Bytes).new
@@ -185,22 +189,22 @@ module Invidious::SigHelper
def listen : Nil
raise "Socket is closed" if @conn.closed?
- LOGGER.debug("SigHelper: Multiplexor listening")
+ Log.debug { "Multiplexor listening" }
spawn do
loop do
begin
receive_data
rescue ex
- LOGGER.info("SigHelper: Connection to helper died with '#{ex.message}' trying to reconnect...")
+ Log.info { "Connection to helper died with '#{ex.message}' trying to reconnect..." }
# We close the socket because for some reason is not closed.
@conn.close
loop do
begin
@conn = Connection.new(@uri_or_path)
- LOGGER.info("SigHelper: Reconnected to SigHelper!")
+ Log.info { "Reconnected to SigHelper!" }
rescue ex
- LOGGER.debug("SigHelper: Reconnection to helper unsuccessful with error '#{ex.message}'. Retrying")
+ Log.debug { "Reconnection to helper unsuccessful with error '#{ex.message}'. Retrying" }
sleep 500.milliseconds
next
end
@@ -238,7 +242,7 @@ module Invidious::SigHelper
if transaction = @queue.delete(transaction_id)
# Remove transaction from queue and send data to the channel
transaction.channel.send(slice)
- LOGGER.trace("SigHelper: Transaction unqueued and data sent to channel")
+ Log.trace { "Transaction unqueued and data sent to channel" }
else
raise Exception.new("SigHelper: Received transaction was not in queue")
end
@@ -251,7 +255,7 @@ module Invidious::SigHelper
transaction_id = @conn.read_bytes(UInt32, NetworkEndian)
length = @conn.read_bytes(UInt32, NetworkEndian)
- LOGGER.trace("SigHelper: Recv transaction 0x#{transaction_id.to_s(base: 16)} / length #{length}")
+ Log.trace { "Recv transaction 0x#{transaction_id.to_s(base: 16)} / length #{length}" }
if length > 67_000
raise Exception.new("SigHelper: Packet longer than expected (#{length})")
@@ -261,15 +265,15 @@ module Invidious::SigHelper
slice = Bytes.new(length)
@conn.read(slice) if length > 0
- LOGGER.trace("SigHelper: payload = #{slice}")
- LOGGER.trace("SigHelper: Recv transaction 0x#{transaction_id.to_s(base: 16)} - Done")
+ Log.trace { "payload = #{slice}" }
+ Log.trace { "Recv transaction 0x#{transaction_id.to_s(base: 16)} - Done" }
return transaction_id, slice
end
# Write a single packet to the socket
private def write_packet(transaction_id : TransactionID, request : Request)
- LOGGER.trace("SigHelper: Send transaction 0x#{transaction_id.to_s(base: 16)} / opcode #{request.opcode}")
+ Log.trace { "Send transaction 0x#{transaction_id.to_s(base: 16)} / opcode #{request.opcode}" }
io = IO::Memory.new(1024)
io.write_bytes(request.opcode.to_u8, NetworkEndian)
@@ -282,11 +286,13 @@ module Invidious::SigHelper
@conn.send(io)
@conn.flush
- LOGGER.trace("SigHelper: Send transaction 0x#{transaction_id.to_s(base: 16)} - Done")
+ Log.trace { "Send transaction 0x#{transaction_id.to_s(base: 16)} - Done" }
end
end
class Connection
+ Log = ::Log.for(self)
+
@socket : UNIXSocket | TCPSocket
{% if flag?(:advanced_debug) %}
@@ -309,7 +315,7 @@ module Invidious::SigHelper
uri = URI.parse("tcp://#{host_or_path}")
@socket = TCPSocket.new(uri.host.not_nil!, uri.port.not_nil!)
end
- LOGGER.info("SigHelper: Using helper at '#{host_or_path}'")
+ Log.info { "Using helper at '#{host_or_path}'" }
{% if flag?(:advanced_debug) %}
@io = IO::Hexdump.new(@socket, output: STDERR, read: true, write: true)
diff --git a/src/invidious/helpers/signatures.cr b/src/invidious/helpers/signatures.cr
index 82a28fc09..c4c5ff402 100644
--- a/src/invidious/helpers/signatures.cr
+++ b/src/invidious/helpers/signatures.cr
@@ -2,6 +2,8 @@ require "http/params"
require "./sig_helper"
class Invidious::DecryptFunction
+ Log = ::Log.for(self).for("SignatureDecryption")
+
@last_update : Time = Time.utc - 42.days
def initialize(uri_or_path)
@@ -18,7 +20,7 @@ class Invidious::DecryptFunction
update_time_elapsed = (@client.get_player_timestamp || 301).seconds
if update_time_elapsed > 5.minutes
- LOGGER.debug("Signature: Player might be outdated, updating")
+ Log.debug { "Player might be outdated, updating" }
@client.force_update
@last_update = Time.utc
end
@@ -28,8 +30,8 @@ class Invidious::DecryptFunction
self.check_update
return @client.decrypt_n_param(n)
rescue ex
- LOGGER.debug(ex.message || "Signature: Unknown error")
- LOGGER.trace(ex.inspect_with_backtrace)
+ Log.debug { ex.message || "Unknown error" }
+ Log.trace { ex.inspect_with_backtrace }
return nil
end
@@ -37,8 +39,8 @@ class Invidious::DecryptFunction
self.check_update
return @client.decrypt_sig(str)
rescue ex
- LOGGER.debug(ex.message || "Signature: Unknown error")
- LOGGER.trace(ex.inspect_with_backtrace)
+ Log.debug { ex.message || "Unknown error" }
+ Log.trace { ex.inspect_with_backtrace }
return nil
end
@@ -46,8 +48,8 @@ class Invidious::DecryptFunction
self.check_update
return @client.get_signature_timestamp
rescue ex
- LOGGER.debug(ex.message || "Signature: Unknown error")
- LOGGER.trace(ex.inspect_with_backtrace)
+ Log.debug { ex.message || "Unknown error" }
+ Log.trace { ex.inspect_with_backtrace }
return nil
end
end
diff --git a/src/invidious/jobs/base_job.cr b/src/invidious/jobs/base_job.cr
index f90f0bfeb..69f711a17 100644
--- a/src/invidious/jobs/base_job.cr
+++ b/src/invidious/jobs/base_job.cr
@@ -6,6 +6,8 @@ abstract class Invidious::Jobs::BaseJob
# and to create the associated instance property.
#
macro inherited
+ Log = ::Log.for(self)
+
macro finished
# This config structure can be expanded as required.
struct Config
diff --git a/src/invidious/jobs/clear_expired_items_job.cr b/src/invidious/jobs/clear_expired_items_job.cr
index 17191aac8..197d29b58 100644
--- a/src/invidious/jobs/clear_expired_items_job.cr
+++ b/src/invidious/jobs/clear_expired_items_job.cr
@@ -5,7 +5,7 @@ class Invidious::Jobs::ClearExpiredItemsJob < Invidious::Jobs::BaseJob
loop do
failed = false
- LOGGER.info("jobs: running ClearExpiredItems job")
+ Log.info { "running ClearExpiredItemsJob job" }
begin
Invidious::Database::Videos.delete_expired
@@ -16,10 +16,10 @@ class Invidious::Jobs::ClearExpiredItemsJob < Invidious::Jobs::BaseJob
# Retry earlier than scheduled on DB error
if failed
- LOGGER.info("jobs: ClearExpiredItems failed. Retrying in 10 minutes.")
+ Log.info { "ClearExpiredItems failed. Retrying in 10 minutes." }
sleep 10.minutes
else
- LOGGER.info("jobs: ClearExpiredItems done.")
+ Log.info { "ClearExpiredItems done." }
sleep 1.hour
end
end
diff --git a/src/invidious/jobs/instance_refresh_job.cr b/src/invidious/jobs/instance_refresh_job.cr
index cb4280b96..b3b607a8e 100644
--- a/src/invidious/jobs/instance_refresh_job.cr
+++ b/src/invidious/jobs/instance_refresh_job.cr
@@ -12,7 +12,7 @@ class Invidious::Jobs::InstanceListRefreshJob < Invidious::Jobs::BaseJob
def begin
loop do
refresh_instances
- LOGGER.info("InstanceListRefreshJob: Done, sleeping for 30 minutes")
+ Log.info { "Done, sleeping for 30 minutes" }
sleep 30.minute
Fiber.yield
end
@@ -43,9 +43,9 @@ class Invidious::Jobs::InstanceListRefreshJob < Invidious::Jobs::BaseJob
filtered_instance_list << {info["region"].as_s, domain.as_s}
rescue ex
if domain
- LOGGER.info("InstanceListRefreshJob: failed to parse information from '#{domain}' because \"#{ex}\"\n\"#{ex.backtrace.join('\n')}\" ")
+ Log.info { "failed to parse information from '#{domain}' because \"#{ex}\"\n\"#{ex.backtrace.join('\n')}\" " }
else
- LOGGER.info("InstanceListRefreshJob: failed to parse information from an instance because \"#{ex}\"\n\"#{ex.backtrace.join('\n')}\" ")
+ Log.info { "failed to parse information from an instance because \"#{ex}\"\n\"#{ex.backtrace.join('\n')}\" " }
end
end
end
diff --git a/src/invidious/jobs/notification_job.cr b/src/invidious/jobs/notification_job.cr
index 968ee47f8..4c1fecf5e 100644
--- a/src/invidious/jobs/notification_job.cr
+++ b/src/invidious/jobs/notification_job.cr
@@ -57,7 +57,7 @@ class Invidious::Jobs::NotificationJob < Invidious::Jobs::BaseJob
spawn do
loop do
begin
- LOGGER.debug("NotificationJob: waking up")
+ Log.debug { "waking up" }
cloned = {} of String => Set(VideoNotification)
notify_mutex.synchronize do
cloned = to_notify.clone
@@ -69,7 +69,7 @@ class Invidious::Jobs::NotificationJob < Invidious::Jobs::BaseJob
next
end
- LOGGER.info("NotificationJob: updating channel #{channel_id} with #{notifications.size} notifications")
+ Log.info { "updating channel #{channel_id} with #{notifications.size} notifications" }
if CONFIG.enable_user_notifications
video_ids = notifications.map(&.video_id)
Invidious::Database::Users.add_multiple_notifications(channel_id, video_ids)
@@ -89,9 +89,9 @@ class Invidious::Jobs::NotificationJob < Invidious::Jobs::BaseJob
end
end
- LOGGER.trace("NotificationJob: Done, sleeping")
+ Log.trace { "Done, sleeping" }
rescue ex
- LOGGER.error("NotificationJob: #{ex.message}")
+ Log.error { ex.message }
end
sleep 1.minute
Fiber.yield
diff --git a/src/invidious/jobs/refresh_channels_job.cr b/src/invidious/jobs/refresh_channels_job.cr
index 80812a63c..18df9e553 100644
--- a/src/invidious/jobs/refresh_channels_job.cr
+++ b/src/invidious/jobs/refresh_channels_job.cr
@@ -12,37 +12,37 @@ class Invidious::Jobs::RefreshChannelsJob < Invidious::Jobs::BaseJob
backoff = 2.minutes
loop do
- LOGGER.debug("RefreshChannelsJob: Refreshing all channels")
+ Log.debug { "Refreshing all channels" }
PG_DB.query("SELECT id FROM channels ORDER BY updated") do |rs|
rs.each do
id = rs.read(String)
if active_fibers >= lim_fibers
- LOGGER.trace("RefreshChannelsJob: Fiber limit reached, waiting...")
+ Log.trace { "Fiber limit reached, waiting..." }
if active_channel.receive
- LOGGER.trace("RefreshChannelsJob: Fiber limit ok, continuing")
+ Log.trace { "Fiber limit ok, continuing" }
active_fibers -= 1
end
end
- LOGGER.debug("RefreshChannelsJob: #{id} : Spawning fiber")
+ Log.debug { "#{id} : Spawning fiber" }
active_fibers += 1
spawn do
begin
- LOGGER.trace("RefreshChannelsJob: #{id} fiber : Fetching channel")
+ Log.trace { "#{id} fiber : Fetching channel" }
channel = fetch_channel(id, pull_all_videos: CONFIG.full_refresh)
lim_fibers = max_fibers
- LOGGER.trace("RefreshChannelsJob: #{id} fiber : Updating DB")
+ Log.trace { "#{id} fiber : Updating DB" }
Invidious::Database::Channels.update_author(id, channel.author)
rescue ex
- LOGGER.error("RefreshChannelsJob: #{id} : #{ex.message}")
+ Log.error { "#{id} : #{ex.message}" }
if ex.message == "Deleted or invalid channel"
Invidious::Database::Channels.update_mark_deleted(id)
else
lim_fibers = 1
- LOGGER.error("RefreshChannelsJob: #{id} fiber : backing off for #{backoff}s")
+ Log.error { "#{id} fiber : backing off for #{backoff}s" }
sleep backoff
if backoff < 1.days
backoff += backoff
@@ -51,14 +51,14 @@ class Invidious::Jobs::RefreshChannelsJob < Invidious::Jobs::BaseJob
end
end
ensure
- LOGGER.debug("RefreshChannelsJob: #{id} fiber : Done")
+ Log.debug { "#{id} fiber : Done" }
active_channel.send(true)
end
end
end
end
- LOGGER.debug("RefreshChannelsJob: Done, sleeping for #{CONFIG.channel_refresh_interval}")
+ Log.debug { "Done, sleeping for #{CONFIG.channel_refresh_interval}" }
sleep CONFIG.channel_refresh_interval
Fiber.yield
end
diff --git a/src/invidious/jobs/refresh_feeds_job.cr b/src/invidious/jobs/refresh_feeds_job.cr
index 4f8130dfd..3742f1ff8 100644
--- a/src/invidious/jobs/refresh_feeds_job.cr
+++ b/src/invidious/jobs/refresh_feeds_job.cr
@@ -28,14 +28,14 @@ class Invidious::Jobs::RefreshFeedsJob < Invidious::Jobs::BaseJob
column_array = Invidious::Database.get_column_array(db, view_name)
ChannelVideo.type_array.each_with_index do |name, i|
if name != column_array[i]?
- LOGGER.info("RefreshFeedsJob: DROP MATERIALIZED VIEW #{view_name}")
+ Log.info { "DROP MATERIALIZED VIEW #{view_name}" }
db.exec("DROP MATERIALIZED VIEW #{view_name}")
raise "view does not exist"
end
end
if !db.query_one("SELECT pg_get_viewdef('#{view_name}')", as: String).includes? "WHERE ((cv.ucid = ANY (u.subscriptions))"
- LOGGER.info("RefreshFeedsJob: Materialized view #{view_name} is out-of-date, recreating...")
+ Log.info { "Materialized view #{view_name} is out-of-date, recreating..." }
db.exec("DROP MATERIALIZED VIEW #{view_name}")
end
@@ -47,18 +47,18 @@ class Invidious::Jobs::RefreshFeedsJob < Invidious::Jobs::BaseJob
legacy_view_name = "subscriptions_#{sha256(email)[0..7]}"
db.exec("SELECT * FROM #{legacy_view_name} LIMIT 0")
- LOGGER.info("RefreshFeedsJob: RENAME MATERIALIZED VIEW #{legacy_view_name}")
+ Log.info { "RENAME MATERIALIZED VIEW #{legacy_view_name}" }
db.exec("ALTER MATERIALIZED VIEW #{legacy_view_name} RENAME TO #{view_name}")
rescue ex
begin
# While iterating through, we may have an email stored from a deleted account
if db.query_one?("SELECT true FROM users WHERE email = $1", email, as: Bool)
- LOGGER.info("RefreshFeedsJob: CREATE #{view_name}")
+ Log.info { "CREATE #{view_name}" }
db.exec("CREATE MATERIALIZED VIEW #{view_name} AS #{MATERIALIZED_VIEW_SQL.call(email)}")
db.exec("UPDATE users SET feed_needs_update = false WHERE email = $1", email)
end
rescue ex
- LOGGER.error("RefreshFeedJobs: REFRESH #{email} : #{ex.message}")
+ Log.error { "REFRESH #{email} : #{ex.message}" }
end
end
end
diff --git a/src/invidious/jobs/subscribe_to_feeds_job.cr b/src/invidious/jobs/subscribe_to_feeds_job.cr
index 8584fb9cf..abc86b2ae 100644
--- a/src/invidious/jobs/subscribe_to_feeds_job.cr
+++ b/src/invidious/jobs/subscribe_to_feeds_job.cr
@@ -32,10 +32,10 @@ class Invidious::Jobs::SubscribeToFeedsJob < Invidious::Jobs::BaseJob
response = subscribe_pubsub(ucid, hmac_key)
if response.status_code >= 400
- LOGGER.error("SubscribeToFeedsJob: #{ucid} : #{response.body}")
+ Log.error { "#{ucid} : #{response.body}" }
end
rescue ex
- LOGGER.error("SubscribeToFeedsJob: #{ucid} : #{ex.message}")
+ Log.error { "#{ucid} : #{ex.message}" }
end
active_channel.send(true)
diff --git a/src/invidious/routes/feeds.cr b/src/invidious/routes/feeds.cr
index 070c96eb8..84ab42443 100644
--- a/src/invidious/routes/feeds.cr
+++ b/src/invidious/routes/feeds.cr
@@ -403,7 +403,7 @@ module Invidious::Routes::Feeds
signature = env.request.headers["X-Hub-Signature"].lchop("sha1=")
if signature != OpenSSL::HMAC.hexdigest(:sha1, HMAC_KEY, body)
- LOGGER.error("/feed/webhook/#{token} : Invalid signature")
+ Log.error { "/feed/webhook/#{token} : Invalid signature" }
haltf env, status_code: 200
end
diff --git a/src/invidious/routes/watch.cr b/src/invidious/routes/watch.cr
index 8a4fa2468..c88100eb4 100644
--- a/src/invidious/routes/watch.cr
+++ b/src/invidious/routes/watch.cr
@@ -54,10 +54,10 @@ module Invidious::Routes::Watch
begin
video = get_video(id, region: params.region)
rescue ex : NotFoundException
- LOGGER.error("get_video not found: #{id} : #{ex.message}")
+ Log.error { "get_video not found: #{id} : #{ex.message}" }
return error_template(404, ex)
rescue ex
- LOGGER.error("get_video: #{id} : #{ex.message}")
+ Log.error { "get_video: #{id} : #{ex.message}" }
return error_template(500, ex)
end
diff --git a/src/invidious/videos/parser.cr b/src/invidious/videos/parser.cr
index 6b1dedd69..8fd196940 100644
--- a/src/invidious/videos/parser.cr
+++ b/src/invidious/videos/parser.cr
@@ -6,7 +6,7 @@ require "json"
#
# TODO: "compactRadioRenderer" (Mix) and
# TODO: Use a proper struct/class instead of a hacky JSON object
-def parse_related_video(related : JSON::Any) : Hash(String, JSON::Any)?
+private def parse_related_video(related : JSON::Any) : Hash(String, JSON::Any)?
return nil if !related["videoId"]?
# The compact renderer has video length in seconds, where the end
@@ -34,7 +34,7 @@ def parse_related_video(related : JSON::Any) : Hash(String, JSON::Any)?
HelperExtractors.get_short_view_count(r).to_s
end
- LOGGER.trace("parse_related_video: Found \"watchNextEndScreenRenderer\" container")
+ Log.trace { "parse_related_video: Found \"watchNextEndScreenRenderer\" container" }
if published_time_text = related["publishedTimeText"]?
decoded_time = decode_date(published_time_text["simpleText"].to_s)
@@ -113,7 +113,7 @@ def extract_video_info(video_id : String)
if !CONFIG.invidious_companion.present?
if player_response.dig?("streamingData", "adaptiveFormats", 0, "url").nil?
- LOGGER.warn("Missing URLs for adaptive formats, falling back to other YT clients.")
+ Log.warn { "Missing URLs for adaptive formats, falling back to other YT clients." }
players_fallback = {YoutubeAPI::ClientType::TvSimply, YoutubeAPI::ClientType::WebMobile}
players_fallback.each do |player_fallback|
@@ -129,7 +129,7 @@ def extract_video_info(video_id : String)
break
end
rescue InfoException
- next LOGGER.warn("Failed to fetch streams with #{player_fallback}")
+ next Log.warn { "Failed to fetch streams with #{player_fallback}" }
end
end
@@ -168,11 +168,11 @@ def extract_video_info(video_id : String)
end
def try_fetch_streaming_data(id : String, client_config : YoutubeAPI::ClientConfig) : Hash(String, JSON::Any)?
- LOGGER.debug("try_fetch_streaming_data: [#{id}] Using #{client_config.client_type} client.")
+ Log.debug { "try_fetch_streaming_data: [#{id}] Using #{client_config.client_type} client." }
response = YoutubeAPI.player(video_id: id, params: "2AMB", client_config: client_config)
playability_status = response["playabilityStatus"]["status"]
- LOGGER.debug("try_fetch_streaming_data: [#{id}] Got playabilityStatus == #{playability_status}.")
+ Log.debug { "try_fetch_streaming_data: [#{id}] Got playabilityStatus == #{playability_status}." }
if id != response.dig?("videoDetails", "videoId")
# YouTube may return a different video player response than expected.
@@ -269,7 +269,7 @@ def parse_video_info(video_id : String, player_response : Hash(String, JSON::Any
# Related videos
- LOGGER.debug("extract_video_info: parsing related videos...")
+ Log.debug { "parse_video_info: parsing related videos..." }
related = [] of JSON::Any
@@ -336,8 +336,8 @@ def parse_video_info(video_id : String, player_response : Hash(String, JSON::Any
.try &.dig?("accessibility", "accessibilityData", "label")
likes = likes_txt.as_s.gsub(/\D/, "").to_i64? if likes_txt
- LOGGER.trace("extract_video_info: Found \"likes\" button. Button text is \"#{likes_txt}\"")
- LOGGER.debug("extract_video_info: Likes count is #{likes}") if likes
+ Log.trace { "parse_video_info: Found \"likes\" button. Button text is \"#{likes_txt}\"" }
+ Log.debug { "parse_video_info: Likes count is #{likes}" } if likes
end
end
@@ -485,7 +485,7 @@ private def convert_url(fmt)
url = URI.parse(cfr["url"])
params = url.query_params
- LOGGER.debug("convert_url: Decoding '#{cfr}'")
+ Log.debug { "convert_url: Decoding '#{cfr}'" }
unsig = DECRYPT_FUNCTION.try &.decrypt_signature(cfr["s"])
params[sp] = unsig if unsig
@@ -502,11 +502,11 @@ private def convert_url(fmt)
end
url.query_params = params
- LOGGER.trace("convert_url: new url is '#{url}'")
+ Log.trace { "convert_url: new url is '#{url}'" }
return url.to_s
rescue ex
- LOGGER.debug("convert_url: Error when parsing video URL")
- LOGGER.trace(ex.inspect_with_backtrace)
+ Log.debug { "convert_url: Error when parsing video URL" }
+ Log.trace { ex.inspect_with_backtrace }
return ""
end
diff --git a/src/invidious/yt_backend/connection_pool.cr b/src/invidious/yt_backend/connection_pool.cr
index 42241d159..d45275a13 100644
--- a/src/invidious/yt_backend/connection_pool.cr
+++ b/src/invidious/yt_backend/connection_pool.cr
@@ -165,7 +165,7 @@ def get_ytimg_pool(subdomain)
if pool = YTIMG_POOLS[subdomain]?
return pool
else
- LOGGER.info("ytimg_pool: Creating a new HTTP pool for \"https://#{subdomain}.ytimg.com\"")
+ Log.info { "get_ytimg_pool: Creating a new HTTP pool for \"https://#{subdomain}.ytimg.com\"" }
pool = YoutubeConnectionPool.new(URI.parse("https://#{subdomain}.ytimg.com"), capacity: CONFIG.pool_size)
YTIMG_POOLS[subdomain] = pool
diff --git a/src/invidious/yt_backend/extractors.cr b/src/invidious/yt_backend/extractors.cr
index 85f6caa55..4ee2e986f 100644
--- a/src/invidious/yt_backend/extractors.cr
+++ b/src/invidious/yt_backend/extractors.cr
@@ -40,8 +40,8 @@ private module Parsers
begin
return parse_internal(*args)
rescue ex
- LOGGER.debug("#{{{@type.name}}}: Failed to render item.")
- LOGGER.debug("#{{{@type.name}}}: Got exception: #{ex.message}")
+ Log.debug { "#{{{@type.name}}}: Failed to render item." }
+ Log.debug { "#{{{@type.name}}}: Got exception: #{ex.message}" }
ProblematicTimelineItem.new(
parse_exception: ex
)
@@ -1029,13 +1029,13 @@ def parse_item(item : JSON::Any, author_fallback : String? = "", author_id_fallb
# Each parser automatically validates the data given to see if the data is
# applicable to itself. If not nil is returned and the next parser is attempted.
ITEM_PARSERS.each do |parser|
- LOGGER.trace("parse_item: Attempting to parse item using \"#{parser.parser_name}\" (cycling...)")
+ Log.trace { "parse_item: Attempting to parse item using \"#{parser.parser_name}\" (cycling...)" }
if result = parser.process(item, author_fallback)
- LOGGER.debug("parse_item: Successfully parsed via #{parser.parser_name}")
+ Log.debug { "parse_item: Successfully parsed via #{parser.parser_name}" }
return result
else
- LOGGER.trace("parse_item: Parser \"#{parser.parser_name}\" does not apply. Cycling to the next one...")
+ Log.trace { "parse_item: Parser \"#{parser.parser_name}\" does not apply. Cycling to the next one..." }
end
end
end
@@ -1056,14 +1056,14 @@ def extract_items(initial_data : InitialData, &)
# This is identical to the parser cycling of parse_item().
ITEM_CONTAINER_EXTRACTOR.each do |extractor|
- LOGGER.trace("extract_items: Attempting to extract item container using \"#{extractor.extractor_name}\" (cycling...)")
+ Log.trace { "extract_items: Attempting to extract item container using \"#{extractor.extractor_name}\" (cycling...)" }
if container = extractor.process(unpackaged_data)
- LOGGER.debug("extract_items: Successfully unpacked container with \"#{extractor.extractor_name}\"")
+ Log.debug { "extract_items: Successfully unpacked container with \"#{extractor.extractor_name}\"" }
# Extract items in container
container.each { |item| yield item }
else
- LOGGER.trace("extract_items: Extractor \"#{extractor.extractor_name}\" does not apply. Cycling to the next one...")
+ Log.trace { "extract_items: Extractor \"#{extractor.extractor_name}\" does not apply. Cycling to the next one..." }
end
end
end
diff --git a/src/invidious/yt_backend/extractors_utils.cr b/src/invidious/yt_backend/extractors_utils.cr
index c83a2de5a..20f5f73aa 100644
--- a/src/invidious/yt_backend/extractors_utils.cr
+++ b/src/invidious/yt_backend/extractors_utils.cr
@@ -62,8 +62,8 @@ def has_verified_badge?(badges : JSON::Any?)
return false
rescue ex
- LOGGER.debug("Unable to parse owner badges. Got exception: #{ex.message}")
- LOGGER.trace("Owner badges data: #{badges.to_json}")
+ Log.debug { "Unable to parse owner badges. Got exception: #{ex.message}" }
+ Log.trace { "Owner badges data: #{badges.to_json}" }
return false
end
diff --git a/src/invidious/yt_backend/youtube_api.cr b/src/invidious/yt_backend/youtube_api.cr
index 6fa8ae0ec..c1cc97c90 100644
--- a/src/invidious/yt_backend/youtube_api.cr
+++ b/src/invidious/yt_backend/youtube_api.cr
@@ -4,6 +4,7 @@
module YoutubeAPI
extend self
+ Log = ::Log.for(self)
# For Android versions, see https://en.wikipedia.org/wiki/Android_version_history
private ANDROID_APP_VERSION = "19.35.36"
@@ -640,9 +641,9 @@ module YoutubeAPI
end
# Logging
- LOGGER.debug("YoutubeAPI: Using endpoint: \"#{endpoint}\"")
- LOGGER.trace("YoutubeAPI: ClientConfig: #{client_config}")
- LOGGER.trace("YoutubeAPI: POST data: #{data}")
+ Log.debug { "Using endpoint: \"#{endpoint}\"" }
+ Log.trace { "ClientConfig: #{client_config}" }
+ Log.trace { "POST data: #{data}" }
# Send the POST request
body = YT_POOL.client() do |client|
@@ -665,9 +666,9 @@ module YoutubeAPI
message = initial_data["error"]["message"].to_s.sub(/(\\n)+\^$/, "")
# Logging
- LOGGER.error("YoutubeAPI: Got error #{code} when requesting #{endpoint}")
- LOGGER.error("YoutubeAPI: #{message}")
- LOGGER.info("YoutubeAPI: POST data was: #{data}")
+ Log.error { "Got error #{code} when requesting #{endpoint}" }
+ Log.error { message }
+ Log.info { "POST data was: #{data}" }
raise InfoException.new("Could not extract JSON. Youtube API returned \
error #{code} with message:
\"#{message}\"")
@@ -695,8 +696,8 @@ module YoutubeAPI
}
# Logging
- LOGGER.debug("Invidious companion: Using endpoint: \"#{endpoint}\"")
- LOGGER.trace("Invidious companion: POST data: #{data}")
+ Log.debug { "Invidious companion: Using endpoint: \"#{endpoint}\"" }
+ Log.trace { "Invidious companion: POST data: #{data}" }
# Send the POST request