Skip to content

Commit 19516ea

Browse files
committed
Add option to view comments with JS disabled
1 parent 294c168 commit 19516ea

File tree

3 files changed

+281
-209
lines changed

3 files changed

+281
-209
lines changed

src/invidious.cr

Lines changed: 58 additions & 208 deletions
Original file line numberDiff line numberDiff line change
@@ -229,6 +229,10 @@ get "/watch" do |env|
229229
end
230230

231231
plid = env.params.query["list"]?
232+
nojs = env.params.query["nojs"]?
233+
234+
nojs ||= "0"
235+
nojs = nojs == "1"
232236

233237
user = env.get? "user"
234238
if user
@@ -255,6 +259,51 @@ get "/watch" do |env|
255259
next templated "error"
256260
end
257261

262+
if nojs
263+
if preferences
264+
source = preferences.comments[0]
265+
if source.empty?
266+
source = preferences.comments[1]
267+
end
268+
269+
if source == "youtube"
270+
begin
271+
comments = fetch_youtube_comments(id, "", proxies, "html")
272+
comments = JSON.parse(comments)
273+
comment_html = template_youtube_comments(comments)
274+
rescue ex
275+
if preferences.comments[1] == "reddit"
276+
comments, reddit_thread = fetch_reddit_comments(id)
277+
comment_html = template_reddit_comments(comments)
278+
279+
comment_html = fill_links(comment_html, "https", "www.reddit.com")
280+
comment_html = replace_links(comment_html)
281+
end
282+
end
283+
elsif source == "reddit"
284+
begin
285+
comments, reddit_thread = fetch_reddit_comments(id)
286+
comment_html = template_reddit_comments(comments)
287+
288+
comment_html = fill_links(comment_html, "https", "www.reddit.com")
289+
comment_html = replace_links(comment_html)
290+
rescue ex
291+
if preferences.comments[1] == "youtube"
292+
comments = fetch_youtube_comments(id, "", proxies, "html")
293+
comments = JSON.parse(comments)
294+
comment_html = template_youtube_comments(comments)
295+
end
296+
end
297+
end
298+
else
299+
comments = fetch_youtube_comments(id, "", proxies, "html")
300+
comments = JSON.parse(comments)
301+
comment_html = template_youtube_comments(comments)
302+
end
303+
304+
comment_html ||= ""
305+
end
306+
258307
fmt_stream = video.fmt_stream(decrypt_function)
259308
adaptive_fmts = video.adaptive_fmts(decrypt_function)
260309
video_streams = video.video_streams(adaptive_fmts)
@@ -1863,212 +1912,15 @@ get "/api/v1/comments/:id" do |env|
18631912
format = env.params.query["format"]?
18641913
format ||= "json"
18651914

1866-
if source == "youtube"
1867-
client = make_client(YT_URL)
1868-
html = client.get("/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1")
1869-
headers = HTTP::Headers.new
1870-
headers["cookie"] = html.cookies.add_request_headers(headers)["cookie"]
1871-
body = html.body
1872-
1873-
session_token = body.match(/'XSRF_TOKEN': "(?<session_token>[A-Za-z0-9\_\-\=]+)"/).not_nil!["session_token"]
1874-
itct = body.match(/itct=(?<itct>[^"]+)"/).not_nil!["itct"]
1875-
ctoken = body.match(/'COMMENTS_TOKEN': "(?<ctoken>[^"]+)"/)
1876-
1877-
if body.match(/<meta itemprop="regionsAllowed" content="">/)
1878-
bypass_channel = Channel({String, HTTPClient, HTTP::Headers} | Nil).new
1879-
1880-
proxies.each do |region, list|
1881-
spawn do
1882-
proxy_html = %(<meta itemprop="regionsAllowed" content="">)
1883-
1884-
list.each do |proxy|
1885-
begin
1886-
proxy_client = HTTPClient.new(YT_URL)
1887-
proxy_client.read_timeout = 10.seconds
1888-
proxy_client.connect_timeout = 10.seconds
1889-
1890-
proxy = HTTPProxy.new(proxy_host: proxy[:ip], proxy_port: proxy[:port])
1891-
proxy_client.set_proxy(proxy)
1892-
1893-
response = proxy_client.get("/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1")
1894-
proxy_headers = HTTP::Headers.new
1895-
proxy_headers["cookie"] = response.cookies.add_request_headers(headers)["cookie"]
1896-
proxy_html = response.body
1897-
1898-
if !proxy_html.match(/<meta itemprop="regionsAllowed" content="">/)
1899-
bypass_channel.send({proxy_html, proxy_client, proxy_headers})
1900-
break
1901-
end
1902-
rescue ex
1903-
end
1904-
end
1905-
1906-
# If none of the proxies we tried returned a valid response
1907-
if proxy_html.match(/<meta itemprop="regionsAllowed" content="">/)
1908-
bypass_channel.send(nil)
1909-
end
1910-
end
1911-
end
1912-
1913-
proxies.size.times do
1914-
response = bypass_channel.receive
1915-
if response
1916-
session_token = response[0].match(/'XSRF_TOKEN': "(?<session_token>[A-Za-z0-9\_\-\=]+)"/).not_nil!["session_token"]
1917-
itct = response[0].match(/itct=(?<itct>[^"]+)"/).not_nil!["itct"]
1918-
ctoken = response[0].match(/'COMMENTS_TOKEN': "(?<ctoken>[^"]+)"/)
1919-
1920-
client = response[1]
1921-
headers = response[2]
1922-
break
1923-
end
1924-
end
1925-
end
1926-
1927-
if !ctoken
1928-
if format == "json"
1929-
next {"comments" => [] of String}.to_json
1930-
else
1931-
next {"contentHtml" => "", "commentCount" => 0}.to_json
1932-
end
1933-
end
1934-
ctoken = ctoken["ctoken"]
1935-
1936-
if env.params.query["continuation"]? && !env.params.query["continuation"].empty?
1937-
continuation = env.params.query["continuation"]
1938-
ctoken = continuation
1939-
else
1940-
continuation = ctoken
1941-
end
1942-
1943-
post_req = {
1944-
"session_token" => session_token,
1945-
}
1946-
post_req = HTTP::Params.encode(post_req)
1947-
1948-
headers["content-type"] = "application/x-www-form-urlencoded"
1949-
1950-
headers["x-client-data"] = "CIi2yQEIpbbJAQipncoBCNedygEIqKPKAQ=="
1951-
headers["x-spf-previous"] = "https://www.youtube.com/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1"
1952-
headers["x-spf-referer"] = "https://www.youtube.com/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1"
1953-
1954-
headers["x-youtube-client-name"] = "1"
1955-
headers["x-youtube-client-version"] = "2.20180719"
1956-
response = client.post("/comment_service_ajax?action_get_comments=1&pbj=1&ctoken=#{ctoken}&continuation=#{continuation}&itct=#{itct}&hl=en&gl=US", headers, post_req)
1957-
response = JSON.parse(response.body)
1958-
1959-
if !response["response"]["continuationContents"]?
1960-
halt env, status_code: 500
1961-
end
1962-
1963-
response = response["response"]["continuationContents"]
1964-
if response["commentRepliesContinuation"]?
1965-
body = response["commentRepliesContinuation"]
1966-
else
1967-
body = response["itemSectionContinuation"]
1968-
end
1969-
contents = body["contents"]?
1970-
if !contents
1971-
if format == "json"
1972-
next {"comments" => [] of String}.to_json
1973-
else
1974-
next {"contentHtml" => "", "commentCount" => 0}.to_json
1975-
end
1976-
end
1977-
1978-
comments = JSON.build do |json|
1979-
json.object do
1980-
if body["header"]?
1981-
comment_count = body["header"]["commentsHeaderRenderer"]["countText"]["simpleText"].as_s.delete("Comments,").to_i
1982-
json.field "commentCount", comment_count
1983-
end
1984-
1985-
json.field "comments" do
1986-
json.array do
1987-
contents.as_a.each do |node|
1988-
json.object do
1989-
if !response["commentRepliesContinuation"]?
1990-
node = node["commentThreadRenderer"]
1991-
end
1992-
1993-
if node["replies"]?
1994-
node_replies = node["replies"]["commentRepliesRenderer"]
1995-
end
1996-
1997-
if !response["commentRepliesContinuation"]?
1998-
node_comment = node["comment"]["commentRenderer"]
1999-
else
2000-
node_comment = node["commentRenderer"]
2001-
end
2002-
2003-
content_html = node_comment["contentText"]["simpleText"]?.try &.as_s.rchop('\ufeff')
2004-
if content_html
2005-
content_html = HTML.escape(content_html)
2006-
end
2007-
2008-
content_html ||= content_to_comment_html(node_comment["contentText"]["runs"].as_a)
2009-
content_html, content = html_to_content(content_html)
2010-
2011-
author = node_comment["authorText"]?.try &.["simpleText"]
2012-
author ||= ""
2013-
2014-
json.field "author", author
2015-
json.field "authorThumbnails" do
2016-
json.array do
2017-
node_comment["authorThumbnail"]["thumbnails"].as_a.each do |thumbnail|
2018-
json.object do
2019-
json.field "url", thumbnail["url"]
2020-
json.field "width", thumbnail["width"]
2021-
json.field "height", thumbnail["height"]
2022-
end
2023-
end
2024-
end
2025-
end
2026-
2027-
if node_comment["authorEndpoint"]?
2028-
json.field "authorId", node_comment["authorEndpoint"]["browseEndpoint"]["browseId"]
2029-
json.field "authorUrl", node_comment["authorEndpoint"]["browseEndpoint"]["canonicalBaseUrl"]
2030-
else
2031-
json.field "authorId", ""
2032-
json.field "authorUrl", ""
2033-
end
2034-
2035-
published = decode_date(node_comment["publishedTimeText"]["runs"][0]["text"].as_s.rchop(" (edited)"))
2036-
2037-
json.field "content", content
2038-
json.field "contentHtml", content_html
2039-
json.field "published", published.epoch
2040-
json.field "publishedText", "#{recode_date(published)} ago"
2041-
json.field "likeCount", node_comment["likeCount"]
2042-
json.field "commentId", node_comment["commentId"]
2043-
2044-
if node_replies && !response["commentRepliesContinuation"]?
2045-
reply_count = node_replies["moreText"]["simpleText"].as_s.delete("View all reply replies,")
2046-
if reply_count.empty?
2047-
reply_count = 1
2048-
else
2049-
reply_count = reply_count.try &.to_i?
2050-
reply_count ||= 1
2051-
end
2052-
2053-
continuation = node_replies["continuations"].as_a[0]["nextContinuationData"]["continuation"].as_s
2054-
2055-
json.field "replies" do
2056-
json.object do
2057-
json.field "replyCount", reply_count
2058-
json.field "continuation", continuation
2059-
end
2060-
end
2061-
end
2062-
end
2063-
end
2064-
end
2065-
end
1915+
continuation = env.params.query["continuation"]?
1916+
continuation ||= ""
20661917

2067-
if body["continuations"]?
2068-
continuation = body["continuations"][0]["nextContinuationData"]["continuation"]
2069-
json.field "continuation", continuation
2070-
end
2071-
end
1918+
if source == "youtube"
1919+
begin
1920+
comments = fetch_youtube_comments(id, continuation, proxies, format)
1921+
rescue ex
1922+
error_message = {"error" => ex.message}.to_json
1923+
halt env, status_code: 500, response: error_message
20721924
end
20731925

20741926
if format == "json"
@@ -2092,10 +1944,8 @@ get "/api/v1/comments/:id" do |env|
20921944
next response
20931945
end
20941946
elsif source == "reddit"
2095-
client = make_client(REDDIT_URL)
2096-
headers = HTTP::Headers{"User-Agent" => "web:invidio.us:v0.6.0 (by /u/omarroth)"}
20971947
begin
2098-
comments, reddit_thread = get_reddit_comments(id, client, headers)
1948+
comments, reddit_thread = fetch_reddit_comments(id)
20991949
content_html = template_reddit_comments(comments)
21001950

21011951
content_html = fill_links(content_html, "https", "www.reddit.com")

0 commit comments

Comments
 (0)