From d739ef8fd340149a0b1a2371ffd419c9723ce4f4 Mon Sep 17 00:00:00 2001
From: Omar Roth
Date: Fri, 2 Nov 2018 08:26:35 -0500
Subject: [PATCH 1/5] Add fix for videos without keywords
---
src/invidious/videos.cr | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/src/invidious/videos.cr b/src/invidious/videos.cr
index d21806d5..1c0ad35a 100644
--- a/src/invidious/videos.cr
+++ b/src/invidious/videos.cr
@@ -263,7 +263,10 @@ class Video
end
def keywords
- return self.player_response["videoDetails"]["keywords"].as_a
+ keywords = self.player_response["videoDetails"]["keywords"]?.try &.as_a
+ keywords ||= [] of String
+
+ return keywords
end
def fmt_stream(decrypt_function)
From 19632511d5ed1a38c32929d5dcb01edcd014e291 Mon Sep 17 00:00:00 2001
From: Omar Roth
Date: Fri, 2 Nov 2018 09:46:45 -0500
Subject: [PATCH 2/5] Update SQL
---
config/sql/channel_videos.sql | 39 +++++++++++-------------
config/sql/channels.sql | 23 ++++++--------
config/sql/users.sql | 38 +++++++++++++----------
config/sql/videos.sql | 57 +++++++++++++++++------------------
4 files changed, 77 insertions(+), 80 deletions(-)
diff --git a/config/sql/channel_videos.sql b/config/sql/channel_videos.sql
index 5dfe4650..98567780 100644
--- a/config/sql/channel_videos.sql
+++ b/config/sql/channel_videos.sql
@@ -4,36 +4,33 @@
CREATE TABLE public.channel_videos
(
- id text COLLATE pg_catalog."default" NOT NULL,
- title text COLLATE pg_catalog."default",
- published timestamp with time zone,
- updated timestamp with time zone,
- ucid text COLLATE pg_catalog."default",
- author text COLLATE pg_catalog."default",
- length_seconds integer,
- CONSTRAINT channel_videos_id_key UNIQUE (id)
-)
-WITH (
- OIDS = FALSE
-)
-TABLESPACE pg_default;
+ id text NOT NULL,
+ title text,
+ published timestamp with time zone,
+ updated timestamp with time zone,
+ ucid text,
+ author text,
+ length_seconds integer,
+ CONSTRAINT channel_videos_id_key UNIQUE (id)
+);
GRANT ALL ON TABLE public.channel_videos TO kemal;
--- Index: channel_videos_published_idx
+-- Index: public.channel_videos_published_idx
-- DROP INDEX public.channel_videos_published_idx;
CREATE INDEX channel_videos_published_idx
- ON public.channel_videos USING btree
- (published)
- TABLESPACE pg_default;
+ ON public.channel_videos
+ USING btree
+ (published);
--- Index: channel_videos_ucid_idx
+-- Index: public.channel_videos_ucid_idx
-- DROP INDEX public.channel_videos_ucid_idx;
CREATE INDEX channel_videos_ucid_idx
- ON public.channel_videos USING hash
- (ucid COLLATE pg_catalog."default")
- TABLESPACE pg_default;
\ No newline at end of file
+ ON public.channel_videos
+ USING hash
+ (ucid COLLATE pg_catalog."default");
+
diff --git a/config/sql/channels.sql b/config/sql/channels.sql
index a328bcad..c4259c12 100644
--- a/config/sql/channels.sql
+++ b/config/sql/channels.sql
@@ -4,23 +4,20 @@
CREATE TABLE public.channels
(
- id text COLLATE pg_catalog."default" NOT NULL,
- author text COLLATE pg_catalog."default",
- updated timestamp with time zone,
- CONSTRAINT channels_id_key UNIQUE (id)
-)
-WITH (
- OIDS = FALSE
-)
-TABLESPACE pg_default;
+ id text NOT NULL,
+ author text,
+ updated timestamp with time zone,
+ CONSTRAINT channels_id_key UNIQUE (id)
+);
GRANT ALL ON TABLE public.channels TO kemal;
--- Index: channels_id_idx
+-- Index: public.channels_id_idx
-- DROP INDEX public.channels_id_idx;
CREATE INDEX channels_id_idx
- ON public.channels USING btree
- (id COLLATE pg_catalog."default")
- TABLESPACE pg_default;
\ No newline at end of file
+ ON public.channels
+ USING btree
+ (id COLLATE pg_catalog."default");
+
diff --git a/config/sql/users.sql b/config/sql/users.sql
index 7f452e4c..f806271c 100644
--- a/config/sql/users.sql
+++ b/config/sql/users.sql
@@ -2,22 +2,28 @@
-- DROP TABLE public.users;
-CREATE TABLE public.users
+CREATE TABLE public.users
(
- id text[] COLLATE pg_catalog."default" NOT NULL,
- updated timestamp with time zone,
- notifications text[] COLLATE pg_catalog."default",
- subscriptions text[] COLLATE pg_catalog."default",
- email text COLLATE pg_catalog."default" NOT NULL,
- preferences text COLLATE pg_catalog."default",
- password text COLLATE pg_catalog."default",
- token text COLLATE pg_catalog."default",
- watched text[] COLLATE pg_catalog."default",
- CONSTRAINT users_email_key UNIQUE (email)
-)
-WITH (
- OIDS = FALSE
-)
-TABLESPACE pg_default;
+ id text[] NOT NULL,
+ updated timestamp with time zone,
+ notifications text[],
+ subscriptions text[],
+ email text NOT NULL,
+ preferences text,
+ password text,
+ token text,
+ watched text[],
+ CONSTRAINT users_email_key UNIQUE (email)
+);
GRANT ALL ON TABLE public.users TO kemal;
+
+-- Index: public.email_unique_idx
+
+-- DROP INDEX public.email_unique_idx;
+
+CREATE UNIQUE INDEX email_unique_idx
+ ON public.users
+ USING btree
+ (lower(email) COLLATE pg_catalog."default");
+
diff --git a/config/sql/videos.sql b/config/sql/videos.sql
index b94405e9..6ded01de 100644
--- a/config/sql/videos.sql
+++ b/config/sql/videos.sql
@@ -4,40 +4,37 @@
CREATE TABLE public.videos
(
- id text COLLATE pg_catalog."default" NOT NULL,
- info text COLLATE pg_catalog."default",
- updated timestamp with time zone,
- title text COLLATE pg_catalog."default",
- views bigint,
- likes integer,
- dislikes integer,
- wilson_score double precision,
- published timestamp with time zone,
- description text COLLATE pg_catalog."default",
- language text COLLATE pg_catalog."default",
- author text COLLATE pg_catalog."default",
- ucid text COLLATE pg_catalog."default",
- allowed_regions text[] COLLATE pg_catalog."default",
- is_family_friendly boolean,
- genre text COLLATE pg_catalog."default",
- genre_url text COLLATE pg_catalog."default",
- license text COLLATE pg_catalog."default",
- sub_count_text text COLLATE pg_catalog."default",
- author_thumbnail text COLLATE pg_catalog."default",
- CONSTRAINT videos_pkey PRIMARY KEY (id)
-)
-WITH (
- OIDS = FALSE
-)
-TABLESPACE pg_default;
+ id text NOT NULL,
+ info text,
+ updated timestamp with time zone,
+ title text,
+ views bigint,
+ likes integer,
+ dislikes integer,
+ wilson_score double precision,
+ published timestamp with time zone,
+ description text,
+ language text,
+ author text,
+ ucid text,
+ allowed_regions text[],
+ is_family_friendly boolean,
+ genre text,
+ genre_url text,
+ license text,
+ sub_count_text text,
+ author_thumbnail text,
+ CONSTRAINT videos_pkey PRIMARY KEY (id)
+);
GRANT ALL ON TABLE public.videos TO kemal;
--- Index: id_idx
+-- Index: public.id_idx
-- DROP INDEX public.id_idx;
CREATE UNIQUE INDEX id_idx
- ON public.videos USING btree
- (id COLLATE pg_catalog."default")
- TABLESPACE pg_default;
\ No newline at end of file
+ ON public.videos
+ USING btree
+ (id COLLATE pg_catalog."default");
+
From 7e558c5b1d78d748faa9e736369314272cd3f107 Mon Sep 17 00:00:00 2001
From: Omar Roth
Date: Sat, 3 Nov 2018 11:52:33 -0500
Subject: [PATCH 3/5] Add error messages for invalid password sizes
---
src/invidious.cr | 11 +++++++++++
1 file changed, 11 insertions(+)
diff --git a/src/invidious.cr b/src/invidious.cr
index 2924c894..2c26e24b 100644
--- a/src/invidious.cr
+++ b/src/invidious.cr
@@ -803,6 +803,17 @@ post "/login" do |env|
next templated "error"
end
+ if password.empty?
+ error_message = "Password cannot be empty"
+ next templated "error"
+ end
+
+ # See https://security.stackexchange.com/a/39851
+ if password.size > 55
+ error_message = "Password cannot be longer than 55 characters"
+ next templated "error"
+ end
+
if !challenge_response || !token
error_message = "CAPTCHA is a required field"
next templated "error"
From c912e63fb5ba686125f13af4a83be695fcdf72e6 Mon Sep 17 00:00:00 2001
From: Omar Roth
Date: Sun, 4 Nov 2018 08:30:16 -0600
Subject: [PATCH 4/5] Only check invalid size passwords on register
---
src/invidious.cr | 22 +++++++++++-----------
1 file changed, 11 insertions(+), 11 deletions(-)
diff --git a/src/invidious.cr b/src/invidious.cr
index 2c26e24b..34c9384f 100644
--- a/src/invidious.cr
+++ b/src/invidious.cr
@@ -803,17 +803,6 @@ post "/login" do |env|
next templated "error"
end
- if password.empty?
- error_message = "Password cannot be empty"
- next templated "error"
- end
-
- # See https://security.stackexchange.com/a/39851
- if password.size > 55
- error_message = "Password cannot be longer than 55 characters"
- next templated "error"
- end
-
if !challenge_response || !token
error_message = "CAPTCHA is a required field"
next templated "error"
@@ -856,6 +845,17 @@ post "/login" do |env|
next templated "error"
end
elsif action == "register"
+ if password.empty?
+ error_message = "Password cannot be empty"
+ next templated "error"
+ end
+
+ # See https://security.stackexchange.com/a/39851
+ if password.size > 55
+ error_message = "Password cannot be longer than 55 characters"
+ next templated "error"
+ end
+
user = PG_DB.query_one?("SELECT * FROM users WHERE LOWER(email) = LOWER($1) AND password IS NOT NULL", email, as: User)
if user
error_message = "Please sign in"
From 4f856dd898b20ddc905e7592701eb685813fa262 Mon Sep 17 00:00:00 2001
From: Omar Roth
Date: Sun, 4 Nov 2018 09:37:12 -0600
Subject: [PATCH 5/5] Add support for Crystal 0.27.0
---
shard.yml | 1 +
src/invidious.cr | 20 ++++++++++----------
src/invidious/channels.cr | 4 ++--
src/invidious/comments.cr | 16 ++++++++--------
src/invidious/helpers/helpers.cr | 2 +-
src/invidious/mixes.cr | 2 +-
src/invidious/playlists.cr | 7 +++----
src/invidious/videos.cr | 6 +++---
8 files changed, 29 insertions(+), 29 deletions(-)
diff --git a/shard.yml b/shard.yml
index b9f01337..8172b9fe 100644
--- a/shard.yml
+++ b/shard.yml
@@ -13,6 +13,7 @@ dependencies:
github: detectlanguage/detectlanguage-crystal
kemal:
github: kemalcr/kemal
+ commit: b389022
pg:
github: will/crystal-pg
diff --git a/src/invidious.cr b/src/invidious.cr
index 34c9384f..ad0fcf95 100644
--- a/src/invidious.cr
+++ b/src/invidious.cr
@@ -2096,7 +2096,7 @@ get "/api/v1/videos/:id" do |env|
json.field "description", description
json.field "descriptionHtml", video.description
- json.field "published", video.published.epoch
+ json.field "published", video.published.to_unix
json.field "publishedText", "#{recode_date(video.published)} ago"
json.field "keywords", video.keywords
@@ -2290,7 +2290,7 @@ get "/api/v1/trending" do |env|
json.field "authorId", video.ucid
json.field "authorUrl", "/channel/#{video.ucid}"
- json.field "published", video.published.epoch
+ json.field "published", video.published.to_unix
json.field "publishedText", "#{recode_date(video.published)} ago"
json.field "description", video.description
json.field "descriptionHtml", video.description_html
@@ -2320,7 +2320,7 @@ get "/api/v1/top" do |env|
json.field "author", video.author
json.field "authorId", video.ucid
json.field "authorUrl", "/channel/#{video.ucid}"
- json.field "published", video.published.epoch
+ json.field "published", video.published.to_unix
json.field "publishedText", "#{recode_date(video.published)} ago"
description = video.description.gsub(" ", "\n")
@@ -2370,7 +2370,7 @@ get "/api/v1/channels/:ucid" do |env|
total_views = 0_i64
sub_count = 0_i64
- joined = Time.epoch(0)
+ joined = Time.unix(0)
metadata = channel_html.xpath_nodes(%q(//span[@class="about-stat"]))
metadata.each do |item|
case item.content
@@ -2426,7 +2426,7 @@ get "/api/v1/channels/:ucid" do |env|
json.field "subCount", sub_count
json.field "totalViews", total_views
- json.field "joined", joined.epoch
+ json.field "joined", joined.to_unix
json.field "paid", paid
json.field "isFamilyFriendly", is_family_friendly
@@ -2460,7 +2460,7 @@ get "/api/v1/channels/:ucid" do |env|
json.field "descriptionHtml", video.description_html
json.field "viewCount", video.views
- json.field "published", video.published.epoch
+ json.field "published", video.published.to_unix
json.field "publishedText", "#{recode_date(video.published)} ago"
json.field "lengthSeconds", video.length_seconds
json.field "paid", video.paid
@@ -2517,7 +2517,7 @@ end
json.field "descriptionHtml", video.description_html
json.field "viewCount", video.views
- json.field "published", video.published.epoch
+ json.field "published", video.published.to_unix
json.field "publishedText", "#{recode_date(video.published)} ago"
json.field "lengthSeconds", video.length_seconds
json.field "paid", video.paid
@@ -2565,7 +2565,7 @@ get "/api/v1/channels/search/:ucid" do |env|
json.field "descriptionHtml", item.description_html
json.field "viewCount", item.views
- json.field "published", item.published.epoch
+ json.field "published", item.published.to_unix
json.field "publishedText", "#{recode_date(item.published)} ago"
json.field "lengthSeconds", item.length_seconds
json.field "liveNow", item.live_now
@@ -2688,7 +2688,7 @@ get "/api/v1/search" do |env|
json.field "descriptionHtml", item.description_html
json.field "viewCount", item.views
- json.field "published", item.published.epoch
+ json.field "published", item.published.to_unix
json.field "publishedText", "#{recode_date(item.published)} ago"
json.field "lengthSeconds", item.length_seconds
json.field "liveNow", item.live_now
@@ -2809,7 +2809,7 @@ get "/api/v1/playlists/:plid" do |env|
json.field "videoCount", playlist.video_count
json.field "viewCount", playlist.views
- json.field "updated", playlist.updated.epoch
+ json.field "updated", playlist.updated.to_unix
json.field "videos" do
json.array do
diff --git a/src/invidious/channels.cr b/src/invidious/channels.cr
index d3f4588d..dcab5e29 100644
--- a/src/invidious/channels.cr
+++ b/src/invidious/channels.cr
@@ -165,14 +165,14 @@ end
def produce_channel_videos_url(ucid, page = 1, auto_generated = nil)
if auto_generated
- seed = Time.epoch(1525757349)
+ seed = Time.unix(1525757349)
until seed >= Time.now
seed += 1.month
end
timestamp = seed - (page - 1).months
- page = "#{timestamp.epoch}"
+ page = "#{timestamp.to_unix}"
switch = "\x36"
else
page = "#{page}"
diff --git a/src/invidious/comments.cr b/src/invidious/comments.cr
index 94c4698e..a699aaac 100644
--- a/src/invidious/comments.cr
+++ b/src/invidious/comments.cr
@@ -8,11 +8,11 @@ end
class RedditComment
module TimeConverter
def self.from_json(value : JSON::PullParser) : Time
- Time.epoch(value.read_float.to_i)
+ Time.unix(value.read_float.to_i)
end
def self.to_json(value : Time, json : JSON::Builder)
- json.number(value.epoch)
+ json.number(value.to_unix)
end
end
@@ -58,7 +58,7 @@ end
def fetch_youtube_comments(id, continuation, proxies, format)
client = make_client(YT_URL)
- html = client.get("/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1")
+ html = client.get("/watch?v=#{id}&bpctr=#{Time.new.to_unix + 2000}&gl=US&hl=en&disable_polymer=1")
headers = HTTP::Headers.new
headers["cookie"] = html.cookies.add_request_headers(headers)["cookie"]
body = html.body
@@ -83,7 +83,7 @@ def fetch_youtube_comments(id, continuation, proxies, format)
proxy = HTTPProxy.new(proxy_host: proxy[:ip], proxy_port: proxy[:port])
proxy_client.set_proxy(proxy)
- response = proxy_client.get("/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1")
+ response = proxy_client.get("/watch?v=#{id}&bpctr=#{Time.new.to_unix + 2000}&gl=US&hl=en&disable_polymer=1")
proxy_headers = HTTP::Headers.new
proxy_headers["cookie"] = response.cookies.add_request_headers(headers)["cookie"]
proxy_html = response.body
@@ -140,8 +140,8 @@ def fetch_youtube_comments(id, continuation, proxies, format)
headers["content-type"] = "application/x-www-form-urlencoded"
headers["x-client-data"] = "CIi2yQEIpbbJAQipncoBCNedygEIqKPKAQ=="
- headers["x-spf-previous"] = "https://www.youtube.com/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1"
- headers["x-spf-referer"] = "https://www.youtube.com/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1"
+ headers["x-spf-previous"] = "https://www.youtube.com/watch?v=#{id}&bpctr=#{Time.new.to_unix + 2000}&gl=US&hl=en&disable_polymer=1"
+ headers["x-spf-referer"] = "https://www.youtube.com/watch?v=#{id}&bpctr=#{Time.new.to_unix + 2000}&gl=US&hl=en&disable_polymer=1"
headers["x-youtube-client-name"] = "1"
headers["x-youtube-client-version"] = "2.20180719"
@@ -229,7 +229,7 @@ def fetch_youtube_comments(id, continuation, proxies, format)
json.field "content", content
json.field "contentHtml", content_html
- json.field "published", published.epoch
+ json.field "published", published.to_unix
json.field "publishedText", "#{recode_date(published)} ago"
json.field "likeCount", node_comment["likeCount"]
json.field "commentId", node_comment["commentId"]
@@ -327,7 +327,7 @@ def template_youtube_comments(comments)
#{child["author"]}
#{child["contentHtml"]}
- #{recode_date(Time.epoch(child["published"].as_i64))} ago
+ #{recode_date(Time.unix(child["published"].as_i64))} ago
|
#{number_with_separator(child["likeCount"])}
diff --git a/src/invidious/helpers/helpers.cr b/src/invidious/helpers/helpers.cr
index c887f1a2..92a2e1b1 100644
--- a/src/invidious/helpers/helpers.cr
+++ b/src/invidious/helpers/helpers.cr
@@ -329,7 +329,7 @@ def extract_items(nodeset, ucid = nil)
rescue ex
end
begin
- published ||= Time.epoch(metadata[0].xpath_node(%q(.//span)).not_nil!["data-timestamp"].to_i64)
+ published ||= Time.unix(metadata[0].xpath_node(%q(.//span)).not_nil!["data-timestamp"].to_i64)
rescue ex
end
published ||= Time.now
diff --git a/src/invidious/mixes.cr b/src/invidious/mixes.cr
index a6a7693c..66f7371d 100644
--- a/src/invidious/mixes.cr
+++ b/src/invidious/mixes.cr
@@ -26,7 +26,7 @@ def fetch_mix(rdid, video_id, cookies = nil)
if cookies
headers = cookies.add_request_headers(headers)
end
- response = client.get("/watch?v=#{video_id}&list=#{rdid}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en", headers)
+ response = client.get("/watch?v=#{video_id}&list=#{rdid}&bpctr=#{Time.new.to_unix + 2000}&gl=US&hl=en", headers)
yt_data = response.body.match(/window\["ytInitialData"\] = (?.*);/)
if yt_data
diff --git a/src/invidious/playlists.cr b/src/invidious/playlists.cr
index 4e9d837b..d85084eb 100644
--- a/src/invidious/playlists.cr
+++ b/src/invidious/playlists.cr
@@ -30,7 +30,7 @@ def fetch_playlist_videos(plid, page, video_count, continuation = nil)
client = make_client(YT_URL)
if continuation
- html = client.get("/watch?v=#{continuation}&list=#{plid}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1")
+ html = client.get("/watch?v=#{continuation}&list=#{plid}&bpctr=#{Time.new.to_unix + 2000}&gl=US&hl=en&disable_polymer=1")
html = XML.parse_html(html.body)
index = html.xpath_node(%q(//span[@id="playlist-current-index"])).try &.content.to_i?
@@ -167,11 +167,10 @@ def fetch_playlist(plid)
raise "Invalid playlist."
end
- body = response.body.gsub(<<-END_BUTTON
+ body = response.body.gsub(%(
- END_BUTTON
- , "")
+ ), "")
document = XML.parse_html(body)
title = document.xpath_node(%q(//h1[@class="pl-header-title"]))
diff --git a/src/invidious/videos.cr b/src/invidious/videos.cr
index 1c0ad35a..25ee2770 100644
--- a/src/invidious/videos.cr
+++ b/src/invidious/videos.cr
@@ -319,7 +319,7 @@ class Video
clen = url.match(/clen\/(?\d+)/).try &.["clen"]
clen ||= "0"
lmt = url.match(/lmt\/(?\d+)/).try &.["lmt"]
- lmt ||= "#{((Time.now + 1.hour).epoch_f.to_f64 * 1000000).to_i64}"
+ lmt ||= "#{((Time.now + 1.hour).to_unix_f.to_f64 * 1000000).to_i64}"
segment_list = representation.xpath_node(%q(.//segmentlist)).not_nil!
init = segment_list.xpath_node(%q(.//initialization))
@@ -546,7 +546,7 @@ def fetch_video(id, proxies)
spawn do
client = make_client(YT_URL)
- html = client.get("/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1")
+ html = client.get("/watch?v=#{id}&bpctr=#{Time.new.to_unix + 2000}&gl=US&hl=en&disable_polymer=1")
if md = html.headers["location"]?.try &.match(/v=(?[a-zA-Z0-9_-]{11})/)
next html_channel.send(md["id"])
@@ -620,7 +620,7 @@ def fetch_video(id, proxies)
client.connect_timeout = 10.seconds
client.set_proxy(proxy)
- html = XML.parse_html(client.get("/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1").body)
+ html = XML.parse_html(client.get("/watch?v=#{id}&bpctr=#{Time.new.to_unix + 2000}&gl=US&hl=en&disable_polymer=1").body)
info = HTTP::Params.parse(client.get("/get_video_info?video_id=#{id}&el=detailpage&ps=default&eurl=&gl=US&hl=en&disable_polymer=1").body)
if info["reason"]?