mirror of
https://github.com/iv-org/invidious.git
synced 2025-08-13 16:18:29 +00:00
Use new API to fetch videos from channels
This mirrors the process used by subscriptions.gir.st. The old API is tried first, and if it fails then the new one is used.
This commit is contained in:
parent
13f58d602f
commit
b43866eeda
@ -396,7 +396,7 @@ def fetch_channel_playlists(ucid, author, auto_generated, continuation, sort_by)
|
|||||||
return items, continuation
|
return items, continuation
|
||||||
end
|
end
|
||||||
|
|
||||||
def produce_channel_videos_url(ucid, page = 1, auto_generated = nil, sort_by = "newest")
|
def produce_channel_videos_url(ucid, page = 1, auto_generated = nil, sort_by = "newest", v2 = false)
|
||||||
object = {
|
object = {
|
||||||
"80226972:embedded" => {
|
"80226972:embedded" => {
|
||||||
"2:string" => ucid,
|
"2:string" => ucid,
|
||||||
@ -411,6 +411,7 @@ def produce_channel_videos_url(ucid, page = 1, auto_generated = nil, sort_by = "
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if !v2
|
||||||
if auto_generated
|
if auto_generated
|
||||||
seed = Time.unix(1525757349)
|
seed = Time.unix(1525757349)
|
||||||
until seed >= Time.utc
|
until seed >= Time.utc
|
||||||
@ -424,6 +425,20 @@ def produce_channel_videos_url(ucid, page = 1, auto_generated = nil, sort_by = "
|
|||||||
object["80226972:embedded"]["3:base64"].as(Hash)["4:varint"] = 0_i64
|
object["80226972:embedded"]["3:base64"].as(Hash)["4:varint"] = 0_i64
|
||||||
object["80226972:embedded"]["3:base64"].as(Hash)["15:string"] = "#{page}"
|
object["80226972:embedded"]["3:base64"].as(Hash)["15:string"] = "#{page}"
|
||||||
end
|
end
|
||||||
|
else
|
||||||
|
object["80226972:embedded"]["3:base64"].as(Hash)["4:varint"] = 0_i64
|
||||||
|
|
||||||
|
object["80226972:embedded"]["3:base64"].as(Hash)["61:string"] = Base64.urlsafe_encode(Protodec::Any.from_json(Protodec::Any.cast_json({
|
||||||
|
"1:embedded" => {
|
||||||
|
"1:varint" => 6307666885028338688_i64,
|
||||||
|
"2:embedded" => {
|
||||||
|
"1:string" => Base64.urlsafe_encode(Protodec::Any.from_json(Protodec::Any.cast_json({
|
||||||
|
"1:varint" => 30_i64 * (page - 1),
|
||||||
|
}))),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})))
|
||||||
|
end
|
||||||
|
|
||||||
case sort_by
|
case sort_by
|
||||||
when "newest"
|
when "newest"
|
||||||
@ -904,12 +919,25 @@ end
|
|||||||
def get_60_videos(ucid, author, page, auto_generated, sort_by = "newest")
|
def get_60_videos(ucid, author, page, auto_generated, sort_by = "newest")
|
||||||
videos = [] of SearchVideo
|
videos = [] of SearchVideo
|
||||||
|
|
||||||
2.times do |i|
|
needs_v2 = false
|
||||||
url = produce_channel_videos_url(ucid, page * 2 + (i - 1), auto_generated: auto_generated, sort_by: sort_by)
|
|
||||||
|
i = 0
|
||||||
|
while i < 2
|
||||||
|
url = produce_channel_videos_url(ucid, page * 2 + (i - 1), auto_generated: auto_generated, sort_by: sort_by, v2: needs_v2)
|
||||||
response = YT_POOL.client &.get(url)
|
response = YT_POOL.client &.get(url)
|
||||||
initial_data = JSON.parse(response.body).as_a.find &.["response"]?
|
initial_data = JSON.parse(response.body).as_a.find &.["response"]?
|
||||||
break if !initial_data
|
break if !initial_data
|
||||||
|
v1_error = !needs_v2 && initial_data
|
||||||
|
.try &.["response"]?.try &.["alerts"]?
|
||||||
|
.try &.as_a.any? { |alert|
|
||||||
|
alert.try &.["alertRenderer"]?.try &.["type"]?.try { |t| t == "ERROR" }
|
||||||
|
}
|
||||||
|
if v1_error
|
||||||
|
needs_v2 = true
|
||||||
|
else
|
||||||
videos.concat extract_videos(initial_data.as_h, author, ucid)
|
videos.concat extract_videos(initial_data.as_h, author, ucid)
|
||||||
|
i += 1
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
return videos.size, videos
|
return videos.size, videos
|
||||||
|
@ -164,20 +164,8 @@ def extract_videos(initial_data : Hash(String, JSON::Any), author_fallback : Str
|
|||||||
extract_items(initial_data, author_fallback, author_id_fallback).select(&.is_a?(SearchVideo)).map(&.as(SearchVideo))
|
extract_items(initial_data, author_fallback, author_id_fallback).select(&.is_a?(SearchVideo)).map(&.as(SearchVideo))
|
||||||
end
|
end
|
||||||
|
|
||||||
def extract_items(initial_data : Hash(String, JSON::Any), author_fallback : String? = nil, author_id_fallback : String? = nil)
|
def extract_item(item : JSON::Any, author_fallback : String? = nil, author_id_fallback : String? = nil)
|
||||||
items = [] of SearchItem
|
if i = (item["videoRenderer"]? || item["gridVideoRenderer"]?)
|
||||||
|
|
||||||
initial_data.try { |t| t["contents"]? || t["response"]? }
|
|
||||||
.try { |t| t["twoColumnBrowseResultsRenderer"]?.try &.["tabs"].as_a.select(&.["tabRenderer"]?.try &.["selected"].as_bool)[0]?.try &.["tabRenderer"]["content"] ||
|
|
||||||
t["twoColumnSearchResultsRenderer"]?.try &.["primaryContents"] ||
|
|
||||||
t["continuationContents"]? }
|
|
||||||
.try { |t| t["sectionListRenderer"]? || t["sectionListContinuation"]? }
|
|
||||||
.try &.["contents"].as_a
|
|
||||||
.each { |c| c.try &.["itemSectionRenderer"]?.try &.["contents"].as_a
|
|
||||||
.try { |t| t[0]?.try &.["shelfRenderer"]?.try &.["content"]["expandedShelfContentsRenderer"]?.try &.["items"].as_a ||
|
|
||||||
t[0]?.try &.["gridRenderer"]?.try &.["items"].as_a || t }
|
|
||||||
.each { |item|
|
|
||||||
if i = item["videoRenderer"]?
|
|
||||||
video_id = i["videoId"].as_s
|
video_id = i["videoId"].as_s
|
||||||
title = i["title"].try { |t| t["simpleText"]?.try &.as_s || t["runs"]?.try &.as_a.map(&.["text"].as_s).join("") } || ""
|
title = i["title"].try { |t| t["simpleText"]?.try &.as_s || t["runs"]?.try &.as_a.map(&.["text"].as_s).join("") } || ""
|
||||||
|
|
||||||
@ -212,7 +200,7 @@ def extract_items(initial_data : Hash(String, JSON::Any), author_fallback : Stri
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
items << SearchVideo.new({
|
SearchVideo.new({
|
||||||
title: title,
|
title: title,
|
||||||
id: video_id,
|
id: video_id,
|
||||||
author: author,
|
author: author,
|
||||||
@ -238,7 +226,7 @@ def extract_items(initial_data : Hash(String, JSON::Any), author_fallback : Stri
|
|||||||
video_count = i["videoCountText"]?.try &.["runs"].as_a[0]?.try &.["text"].as_s.gsub(/\D/, "").to_i || 0
|
video_count = i["videoCountText"]?.try &.["runs"].as_a[0]?.try &.["text"].as_s.gsub(/\D/, "").to_i || 0
|
||||||
description_html = i["descriptionSnippet"]?.try { |t| parse_content(t) } || ""
|
description_html = i["descriptionSnippet"]?.try { |t| parse_content(t) } || ""
|
||||||
|
|
||||||
items << SearchChannel.new({
|
SearchChannel.new({
|
||||||
author: author,
|
author: author,
|
||||||
ucid: author_id,
|
ucid: author_id,
|
||||||
author_thumbnail: author_thumbnail,
|
author_thumbnail: author_thumbnail,
|
||||||
@ -254,7 +242,7 @@ def extract_items(initial_data : Hash(String, JSON::Any), author_fallback : Stri
|
|||||||
video_count = i["videoCountText"]["runs"].as_a[0]?.try &.["text"].as_s.gsub(/\D/, "").to_i || 0
|
video_count = i["videoCountText"]["runs"].as_a[0]?.try &.["text"].as_s.gsub(/\D/, "").to_i || 0
|
||||||
playlist_thumbnail = i["thumbnail"]["thumbnails"][0]?.try &.["url"]?.try &.as_s || ""
|
playlist_thumbnail = i["thumbnail"]["thumbnails"][0]?.try &.["url"]?.try &.as_s || ""
|
||||||
|
|
||||||
items << SearchPlaylist.new({
|
SearchPlaylist.new({
|
||||||
title: title,
|
title: title,
|
||||||
id: plid,
|
id: plid,
|
||||||
author: author_fallback || "",
|
author: author_fallback || "",
|
||||||
@ -288,7 +276,7 @@ def extract_items(initial_data : Hash(String, JSON::Any), author_fallback : Stri
|
|||||||
|
|
||||||
# TODO: i["publishedTimeText"]?
|
# TODO: i["publishedTimeText"]?
|
||||||
|
|
||||||
items << SearchPlaylist.new({
|
SearchPlaylist.new({
|
||||||
title: title,
|
title: title,
|
||||||
id: plid,
|
id: plid,
|
||||||
author: author,
|
author: author,
|
||||||
@ -305,7 +293,37 @@ def extract_items(initial_data : Hash(String, JSON::Any), author_fallback : Stri
|
|||||||
elsif i = item["horizontalCardListRenderer"]?
|
elsif i = item["horizontalCardListRenderer"]?
|
||||||
elsif i = item["searchPyvRenderer"]? # Ad
|
elsif i = item["searchPyvRenderer"]? # Ad
|
||||||
end
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def extract_items(initial_data : Hash(String, JSON::Any), author_fallback : String? = nil, author_id_fallback : String? = nil)
|
||||||
|
items = [] of SearchItem
|
||||||
|
|
||||||
|
channel_v2_response = initial_data
|
||||||
|
.try &.["response"]?
|
||||||
|
.try &.["continuationContents"]?
|
||||||
|
.try &.["gridContinuation"]?
|
||||||
|
.try &.["items"]?
|
||||||
|
|
||||||
|
if channel_v2_response
|
||||||
|
channel_v2_response.try &.as_a.each { |item|
|
||||||
|
extract_item(item, author_fallback, author_id_fallback)
|
||||||
|
.try { |t| items << t }
|
||||||
|
}
|
||||||
|
else
|
||||||
|
initial_data.try { |t| t["contents"]? || t["response"]? }
|
||||||
|
.try { |t| t["twoColumnBrowseResultsRenderer"]?.try &.["tabs"].as_a.select(&.["tabRenderer"]?.try &.["selected"].as_bool)[0]?.try &.["tabRenderer"]["content"] ||
|
||||||
|
t["twoColumnSearchResultsRenderer"]?.try &.["primaryContents"] ||
|
||||||
|
t["continuationContents"]? }
|
||||||
|
.try { |t| t["sectionListRenderer"]? || t["sectionListContinuation"]? }
|
||||||
|
.try &.["contents"].as_a
|
||||||
|
.each { |c| c.try &.["itemSectionRenderer"]?.try &.["contents"].as_a
|
||||||
|
.try { |t| t[0]?.try &.["shelfRenderer"]?.try &.["content"]["expandedShelfContentsRenderer"]?.try &.["items"].as_a ||
|
||||||
|
t[0]?.try &.["gridRenderer"]?.try &.["items"].as_a || t }
|
||||||
|
.each { |item|
|
||||||
|
extract_item(item, author_fallback, author_id_fallback)
|
||||||
|
.try { |t| items << t }
|
||||||
} }
|
} }
|
||||||
|
end
|
||||||
|
|
||||||
items
|
items
|
||||||
end
|
end
|
||||||
|
Loading…
Reference in New Issue
Block a user