mirror of
https://git.nadeko.net/Fijxu/invidious.git
synced 2025-12-21 02:18:52 +00:00
Add support for polymer redesign
This commit is contained in:
@@ -232,9 +232,9 @@ def fetch_channel(ucid, db, pull_all_videos = true, locale = nil)
|
||||
nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")]))
|
||||
|
||||
if auto_generated
|
||||
videos = extract_videos(nodeset)
|
||||
videos = extract_videos_html(nodeset)
|
||||
else
|
||||
videos = extract_videos(nodeset, ucid, author)
|
||||
videos = extract_videos_html(nodeset, ucid, author)
|
||||
end
|
||||
end
|
||||
|
||||
@@ -317,9 +317,9 @@ def fetch_channel(ucid, db, pull_all_videos = true, locale = nil)
|
||||
nodeset = nodeset.not_nil!
|
||||
|
||||
if auto_generated
|
||||
videos = extract_videos(nodeset)
|
||||
videos = extract_videos_html(nodeset)
|
||||
else
|
||||
videos = extract_videos(nodeset, ucid, author)
|
||||
videos = extract_videos_html(nodeset, ucid, author)
|
||||
end
|
||||
|
||||
count = nodeset.size
|
||||
@@ -429,7 +429,7 @@ def fetch_channel_playlists(ucid, author, auto_generated, continuation, sort_by)
|
||||
if auto_generated
|
||||
items = extract_shelf_items(nodeset, ucid, author)
|
||||
else
|
||||
items = extract_items(nodeset, ucid, author)
|
||||
items = extract_items_html(nodeset, ucid, author)
|
||||
end
|
||||
|
||||
return items, continuation
|
||||
@@ -584,16 +584,8 @@ def fetch_channel_community(ucid, continuation, locale, format, thin_mode)
|
||||
|
||||
headers = HTTP::Headers.new
|
||||
headers["cookie"] = response.cookies.add_request_headers(headers)["cookie"]
|
||||
headers["content-type"] = "application/x-www-form-urlencoded"
|
||||
|
||||
headers["x-client-data"] = "CIi2yQEIpbbJAQipncoBCNedygEIqKPKAQ=="
|
||||
headers["x-spf-previous"] = ""
|
||||
headers["x-spf-referer"] = ""
|
||||
|
||||
headers["x-youtube-client-name"] = "1"
|
||||
headers["x-youtube-client-version"] = "2.20180719"
|
||||
|
||||
session_token = response.body.match(/"XSRF_TOKEN":"(?<session_token>[A-Za-z0-9\_\-\=]+)"/).try &.["session_token"]? || ""
|
||||
session_token = response.body.match(/"XSRF_TOKEN":"(?<session_token>[^"]+)"/).try &.["session_token"]? || ""
|
||||
post_req = {
|
||||
session_token: session_token,
|
||||
}
|
||||
@@ -633,13 +625,7 @@ def fetch_channel_community(ucid, continuation, locale, format, thin_mode)
|
||||
|
||||
next if !post
|
||||
|
||||
if !post["contentText"]?
|
||||
content_html = ""
|
||||
else
|
||||
content_html = post["contentText"]["simpleText"]?.try &.as_s.rchop('\ufeff').try { |b| HTML.escape(b) }.to_s ||
|
||||
post["contentText"]["runs"]?.try &.as_a.try { |r| content_to_comment_html(r).try &.to_s } || ""
|
||||
end
|
||||
|
||||
content_html = post["contentText"]?.try { |t| parse_content(t) } || ""
|
||||
author = post["authorText"]?.try &.["simpleText"]? || ""
|
||||
|
||||
json.object do
|
||||
@@ -960,7 +946,7 @@ def get_60_videos(ucid, author, page, auto_generated, sort_by = "newest")
|
||||
|
||||
2.times do |i|
|
||||
url = produce_channel_videos_url(ucid, page * 2 + (i - 1), auto_generated: auto_generated, sort_by: sort_by)
|
||||
response = YT_POOL.client &.get(url, headers)
|
||||
response = YT_POOL.client &.get(url)
|
||||
initial_data = JSON.parse(response.body).as_a.find &.["response"]?
|
||||
break if !initial_data
|
||||
videos.concat extract_videos(initial_data.as_h)
|
||||
@@ -980,7 +966,7 @@ def get_latest_videos(ucid)
|
||||
document = XML.parse_html(json["content_html"].as_s)
|
||||
nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")]))
|
||||
|
||||
videos = extract_videos(nodeset, ucid)
|
||||
videos = extract_videos_html(nodeset, ucid)
|
||||
end
|
||||
|
||||
return videos
|
||||
|
||||
Reference in New Issue
Block a user