from common import grab_html, grab_json, grab_xml, download_hls, download_mpd, Node, append_to_qs
import json
+import sys
-BASE = "http://www.sbs.com.au"
-FULL_VIDEO_LIST = BASE + "/api/video_search/v2/?m=1&filters={section}{Programs}"
+BASE = "https://www.sbs.com.au"
+FULL_VIDEO_LIST = BASE + "/api/video_feed/f/Bgtm9B/sbs-section-programs/"
VIDEO_URL = BASE + "/ondemand/video/single/%s"
NS = {
with requests_cache.disabled():
doc = grab_html(VIDEO_URL % self.video_id)
player_params = self.get_player_params(doc)
- release_url = player_params["releaseUrls"]["html"]
+ error = player_params.get("error", None)
+ if error:
+ print("Cannot download:", error)
+ return False
+
+ release_url = player_params["releaseUrls"]["html"]
filename = self.title + ".ts"
hls_url = self.get_hls_url(release_url)
def get_hls_url(self, release_url):
with requests_cache.disabled():
- doc = grab_xml("http:" + release_url.replace("http:", "").replace("https:", ""))
+ doc = grab_xml("https:" + release_url.replace("http:", "").replace("https:", ""))
video = doc.xpath("//smil:video", namespaces=NS)
if not video:
return
def load_all_video_entries(self):
offset = 1
- amount = 500
+ amount = 1000
+ uniq = set()
while True:
- url = append_to_qs(FULL_VIDEO_LIST, {"range": "%s-%s" % (offset, offset+amount)})
+ url = append_to_qs(FULL_VIDEO_LIST, {"range": "%s-%s" % (offset, offset+amount-1)})
data = grab_json(url)
if "entries" not in data:
raise Exception("Missing data in SBS response", data)
entries = data["entries"]
if len(entries) == 0:
break
- for entry in entries:
- yield entry
+ for i, entry in enumerate(entries):
+ if entry["guid"] not in uniq:
+ uniq.add(entry["guid"])
+ yield entry
offset += amount
+ sys.stdout.write(".")
+ sys.stdout.flush()
+ print()
def explode_videos_to_unique_categories(self, all_video_entries):
for entry_data in all_video_entries: