Add support for polymer redesign

This commit is contained in:
Omar Roth
2020-06-15 17:33:23 -05:00
parent c1cbdae5ee
commit 1eca969cf6
17 changed files with 634 additions and 886 deletions

View File

@@ -232,9 +232,9 @@ def fetch_channel(ucid, db, pull_all_videos = true, locale = nil)
nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")]))
if auto_generated
videos = extract_videos(nodeset)
videos = extract_videos_html(nodeset)
else
videos = extract_videos(nodeset, ucid, author)
videos = extract_videos_html(nodeset, ucid, author)
end
end
@@ -317,9 +317,9 @@ def fetch_channel(ucid, db, pull_all_videos = true, locale = nil)
nodeset = nodeset.not_nil!
if auto_generated
videos = extract_videos(nodeset)
videos = extract_videos_html(nodeset)
else
videos = extract_videos(nodeset, ucid, author)
videos = extract_videos_html(nodeset, ucid, author)
end
count = nodeset.size
@@ -429,7 +429,7 @@ def fetch_channel_playlists(ucid, author, auto_generated, continuation, sort_by)
if auto_generated
items = extract_shelf_items(nodeset, ucid, author)
else
items = extract_items(nodeset, ucid, author)
items = extract_items_html(nodeset, ucid, author)
end
return items, continuation
@@ -584,16 +584,8 @@ def fetch_channel_community(ucid, continuation, locale, format, thin_mode)
headers = HTTP::Headers.new
headers["cookie"] = response.cookies.add_request_headers(headers)["cookie"]
headers["content-type"] = "application/x-www-form-urlencoded"
headers["x-client-data"] = "CIi2yQEIpbbJAQipncoBCNedygEIqKPKAQ=="
headers["x-spf-previous"] = ""
headers["x-spf-referer"] = ""
headers["x-youtube-client-name"] = "1"
headers["x-youtube-client-version"] = "2.20180719"
session_token = response.body.match(/"XSRF_TOKEN":"(?<session_token>[A-Za-z0-9\_\-\=]+)"/).try &.["session_token"]? || ""
session_token = response.body.match(/"XSRF_TOKEN":"(?<session_token>[^"]+)"/).try &.["session_token"]? || ""
post_req = {
session_token: session_token,
}
@@ -633,13 +625,7 @@ def fetch_channel_community(ucid, continuation, locale, format, thin_mode)
next if !post
if !post["contentText"]?
content_html = ""
else
content_html = post["contentText"]["simpleText"]?.try &.as_s.rchop('\ufeff').try { |b| HTML.escape(b) }.to_s ||
post["contentText"]["runs"]?.try &.as_a.try { |r| content_to_comment_html(r).try &.to_s } || ""
end
content_html = post["contentText"]?.try { |t| parse_content(t) } || ""
author = post["authorText"]?.try &.["simpleText"]? || ""
json.object do
@@ -960,7 +946,7 @@ def get_60_videos(ucid, author, page, auto_generated, sort_by = "newest")
2.times do |i|
url = produce_channel_videos_url(ucid, page * 2 + (i - 1), auto_generated: auto_generated, sort_by: sort_by)
response = YT_POOL.client &.get(url, headers)
response = YT_POOL.client &.get(url)
initial_data = JSON.parse(response.body).as_a.find &.["response"]?
break if !initial_data
videos.concat extract_videos(initial_data.as_h)
@@ -980,7 +966,7 @@ def get_latest_videos(ucid)
document = XML.parse_html(json["content_html"].as_s)
nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")]))
videos = extract_videos(nodeset, ucid)
videos = extract_videos_html(nodeset, ucid)
end
return videos