Add support for Crystal 0.27.0

This commit is contained in:
Omar Roth 2018-11-04 09:37:12 -06:00
parent c912e63fb5
commit 4f856dd898
8 changed files with 29 additions and 29 deletions

View file

@ -165,14 +165,14 @@ end
def produce_channel_videos_url(ucid, page = 1, auto_generated = nil)
if auto_generated
seed = Time.epoch(1525757349)
seed = Time.unix(1525757349)
until seed >= Time.now
seed += 1.month
end
timestamp = seed - (page - 1).months
page = "#{timestamp.epoch}"
page = "#{timestamp.to_unix}"
switch = "\x36"
else
page = "#{page}"

View file

@ -8,11 +8,11 @@ end
class RedditComment
module TimeConverter
def self.from_json(value : JSON::PullParser) : Time
Time.epoch(value.read_float.to_i)
Time.unix(value.read_float.to_i)
end
def self.to_json(value : Time, json : JSON::Builder)
json.number(value.epoch)
json.number(value.to_unix)
end
end
@ -58,7 +58,7 @@ end
def fetch_youtube_comments(id, continuation, proxies, format)
client = make_client(YT_URL)
html = client.get("/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1")
html = client.get("/watch?v=#{id}&bpctr=#{Time.new.to_unix + 2000}&gl=US&hl=en&disable_polymer=1")
headers = HTTP::Headers.new
headers["cookie"] = html.cookies.add_request_headers(headers)["cookie"]
body = html.body
@ -83,7 +83,7 @@ def fetch_youtube_comments(id, continuation, proxies, format)
proxy = HTTPProxy.new(proxy_host: proxy[:ip], proxy_port: proxy[:port])
proxy_client.set_proxy(proxy)
response = proxy_client.get("/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1")
response = proxy_client.get("/watch?v=#{id}&bpctr=#{Time.new.to_unix + 2000}&gl=US&hl=en&disable_polymer=1")
proxy_headers = HTTP::Headers.new
proxy_headers["cookie"] = response.cookies.add_request_headers(headers)["cookie"]
proxy_html = response.body
@ -140,8 +140,8 @@ def fetch_youtube_comments(id, continuation, proxies, format)
headers["content-type"] = "application/x-www-form-urlencoded"
headers["x-client-data"] = "CIi2yQEIpbbJAQipncoBCNedygEIqKPKAQ=="
headers["x-spf-previous"] = "https://www.youtube.com/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1"
headers["x-spf-referer"] = "https://www.youtube.com/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1"
headers["x-spf-previous"] = "https://www.youtube.com/watch?v=#{id}&bpctr=#{Time.new.to_unix + 2000}&gl=US&hl=en&disable_polymer=1"
headers["x-spf-referer"] = "https://www.youtube.com/watch?v=#{id}&bpctr=#{Time.new.to_unix + 2000}&gl=US&hl=en&disable_polymer=1"
headers["x-youtube-client-name"] = "1"
headers["x-youtube-client-version"] = "2.20180719"
@ -229,7 +229,7 @@ def fetch_youtube_comments(id, continuation, proxies, format)
json.field "content", content
json.field "contentHtml", content_html
json.field "published", published.epoch
json.field "published", published.to_unix
json.field "publishedText", "#{recode_date(published)} ago"
json.field "likeCount", node_comment["likeCount"]
json.field "commentId", node_comment["commentId"]
@ -327,7 +327,7 @@ def template_youtube_comments(comments)
<a href="#{child["authorUrl"]}">#{child["author"]}</a>
</b>
<p style="white-space:pre-wrap">#{child["contentHtml"]}</p>
#{recode_date(Time.epoch(child["published"].as_i64))} ago
#{recode_date(Time.unix(child["published"].as_i64))} ago
|
<i class="icon ion-ios-thumbs-up"></i> #{number_with_separator(child["likeCount"])}
</p>

View file

@ -329,7 +329,7 @@ def extract_items(nodeset, ucid = nil)
rescue ex
end
begin
published ||= Time.epoch(metadata[0].xpath_node(%q(.//span)).not_nil!["data-timestamp"].to_i64)
published ||= Time.unix(metadata[0].xpath_node(%q(.//span)).not_nil!["data-timestamp"].to_i64)
rescue ex
end
published ||= Time.now

View file

@ -26,7 +26,7 @@ def fetch_mix(rdid, video_id, cookies = nil)
if cookies
headers = cookies.add_request_headers(headers)
end
response = client.get("/watch?v=#{video_id}&list=#{rdid}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en", headers)
response = client.get("/watch?v=#{video_id}&list=#{rdid}&bpctr=#{Time.new.to_unix + 2000}&gl=US&hl=en", headers)
yt_data = response.body.match(/window\["ytInitialData"\] = (?<data>.*);/)
if yt_data

View file

@ -30,7 +30,7 @@ def fetch_playlist_videos(plid, page, video_count, continuation = nil)
client = make_client(YT_URL)
if continuation
html = client.get("/watch?v=#{continuation}&list=#{plid}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1")
html = client.get("/watch?v=#{continuation}&list=#{plid}&bpctr=#{Time.new.to_unix + 2000}&gl=US&hl=en&disable_polymer=1")
html = XML.parse_html(html.body)
index = html.xpath_node(%q(//span[@id="playlist-current-index"])).try &.content.to_i?
@ -167,11 +167,10 @@ def fetch_playlist(plid)
raise "Invalid playlist."
end
body = response.body.gsub(<<-END_BUTTON
body = response.body.gsub(%(
<button class="yt-uix-button yt-uix-button-size-default yt-uix-button-link yt-uix-expander-head playlist-description-expander yt-uix-inlineedit-ignore-edit" type="button" onclick=";return false;"><span class="yt-uix-button-content"> less <img alt="" src="/yts/img/pixel-vfl3z5WfW.gif">
</span></button>
END_BUTTON
, "")
), "")
document = XML.parse_html(body)
title = document.xpath_node(%q(//h1[@class="pl-header-title"]))

View file

@ -319,7 +319,7 @@ class Video
clen = url.match(/clen\/(?<clen>\d+)/).try &.["clen"]
clen ||= "0"
lmt = url.match(/lmt\/(?<lmt>\d+)/).try &.["lmt"]
lmt ||= "#{((Time.now + 1.hour).epoch_f.to_f64 * 1000000).to_i64}"
lmt ||= "#{((Time.now + 1.hour).to_unix_f.to_f64 * 1000000).to_i64}"
segment_list = representation.xpath_node(%q(.//segmentlist)).not_nil!
init = segment_list.xpath_node(%q(.//initialization))
@ -546,7 +546,7 @@ def fetch_video(id, proxies)
spawn do
client = make_client(YT_URL)
html = client.get("/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1")
html = client.get("/watch?v=#{id}&bpctr=#{Time.new.to_unix + 2000}&gl=US&hl=en&disable_polymer=1")
if md = html.headers["location"]?.try &.match(/v=(?<id>[a-zA-Z0-9_-]{11})/)
next html_channel.send(md["id"])
@ -620,7 +620,7 @@ def fetch_video(id, proxies)
client.connect_timeout = 10.seconds
client.set_proxy(proxy)
html = XML.parse_html(client.get("/watch?v=#{id}&bpctr=#{Time.new.epoch + 2000}&gl=US&hl=en&disable_polymer=1").body)
html = XML.parse_html(client.get("/watch?v=#{id}&bpctr=#{Time.new.to_unix + 2000}&gl=US&hl=en&disable_polymer=1").body)
info = HTTP::Params.parse(client.get("/get_video_info?video_id=#{id}&el=detailpage&ps=default&eurl=&gl=US&hl=en&disable_polymer=1").body)
if info["reason"]?