Enigma2 plugin to to play various online streams (mostly Latvian).

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578
  1. #!/usr/bin/python
  2. # -*- coding: utf-8 -*-
  3. """
  4. StreamProxy daemon (based on Livestream daemon)
  5. Ensures persistent cookies, User-Agents and others tricks to play protected HLS/DASH streams
  6. """
  7. import os
  8. import sys
  9. import time
  10. import atexit
  11. import re
  12. import binascii
  13. from signal import SIGTERM
  14. from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
  15. from SocketServer import ThreadingMixIn
  16. from urllib import unquote, quote
  17. import urllib,urlparse
  18. #import cookielib,urllib2
  19. import requests
  20. try:
  21. from requests.packages.urllib3.exceptions import InsecureRequestWarning
  22. requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
  23. except:
  24. pass
  25. HOST_NAME = ""
  26. PORT_NUMBER = 8880
  27. DEBUG = True
  28. DEBUG2 = False
  29. SPLIT_CHAR = "~"
  30. SPLIT_CODE = "%7E"
  31. EQ_CODE = "%3D"
  32. COL_CODE = "%3A"
  33. headers2dict = lambda h: dict([l.strip().split(": ") for l in h.strip().splitlines()])
  34. headers0 = headers2dict("""
  35. icy-metadata: 1
  36. User-Agent: GStreamer souphttpsrc libsoup/2.52.2
  37. """)
  38. sessions = {}
  39. cur_directory = os.path.dirname(os.path.realpath(__file__))
  40. sources = None
  41. slinks = {}
  42. class StreamHandler(BaseHTTPRequestHandler):
  43. def do_HEAD(self):
  44. print "**get_head"
  45. self.send_response(200)
  46. self.send_header("Server", "playstreamproxy")
  47. if ".m3u8" in self.path.lower():
  48. ct = "application/vnd.apple.mpegurl"
  49. elif ".ts" in self.path.lower():
  50. ct = "video/MP2T"
  51. elif ".mp4" in ".ts" in self.path.lower():
  52. ct = "video/mp4"
  53. else:
  54. ct = "text/html"
  55. self.send_header("Content-type", ct)
  56. self.end_headers()
  57. def do_GET(self):
  58. """Respond to a GET request"""
  59. print "\n\n"+40*"#"+"\nget_url: \n%s", self.path
  60. p = self.path.split("~")
  61. #url = urllib.unquote(p[0][1:]) # TODO - vajag nocekot vai visi urli strādā
  62. urlp = p[0][1:]
  63. url = urlp.replace(COL_CODE, ":")
  64. #headers = self.headers.dict
  65. headers = {} # TODO izmanto saņemtos headerus, var aizvietot ar defaultajiem
  66. #headers["host"] = urlparse.urlparse(url).hostname
  67. if len(p)>1:
  68. for h in p[1:]:
  69. k = h.split("=")[0].lower()
  70. v = urllib.unquote(h.split("=")[1])
  71. headers[k]=v
  72. if DEBUG:
  73. print "url=%s"%url
  74. print "Original request headers + url headers:"
  75. print_headers(self.headers.dict)
  76. self.protocol_version = 'HTTP/1.1'
  77. try:
  78. if "::" in url: # encoded source link
  79. self.fetch_source(urlp, headers)
  80. elif ".lattelecom.tv/" in url: # lattelecom.tv hack
  81. self.fetch_ltc( url, headers)
  82. elif "filmas.lv" in url or "viaplay" in url: # HLS session/decode filmas.lv in url:
  83. self.fetch_url2(url, headers)
  84. else: # plain fetch
  85. self.fetch_url( url, headers)
  86. except Exception as e:
  87. print "Got Exception: ", str(e)
  88. import traceback
  89. traceback.print_exc()
  90. ### Remote server request procedures ###
  91. def fetch_offline(self):
  92. print "** Fetch offline"
  93. self.send_response(200)
  94. self.send_header("Server", "playstreamproxy")
  95. self.send_header("Content-type", "video/mp4")
  96. self.end_headers()
  97. self.wfile.write(open("offline.mp4", "rb").read())
  98. #self.wfile.close()
  99. def fetch_source(self, urlp, headers):
  100. if DEBUG:
  101. print "\n***********************************************************"
  102. print "fetch_source: \n%s"%urlp
  103. base_data = hls_base(urlp)
  104. data = urllib.unquote_plus(base_data)[:-1]
  105. if DEBUG: print "base_data=", base_data
  106. if DEBUG: print "data=", data
  107. if not base_data in slinks :
  108. streams = sources.get_streams(data)
  109. if not streams:
  110. self.write_error(500) # TODO
  111. return
  112. url = streams[0]["url"]
  113. base_url = hls_base(url)
  114. if DEBUG: print "New link, base_url=",base_url
  115. ses = requests.Session()
  116. ses.trust_env = False
  117. slinks[base_data] = {"data": data, "urlp":urlp,"url": url, "base_url": base_url,"session":ses}
  118. else:
  119. ses = slinks[base_data]["session"]
  120. if urlp == slinks[base_data]["urlp"]:
  121. url = slinks[base_data]["url"]
  122. if DEBUG: print "Existing base link", url
  123. else:
  124. url = urlp.replace(base_data, slinks[base_data]["base_url"])
  125. if DEBUG: print "Existing new link", url
  126. r = self.get_page_ses(url,ses,True,headers = headers)
  127. code = r.status_code
  128. if not code in (200,206): # TODO mēģina vēlreiz get_streams
  129. self.write_error(code)
  130. return
  131. self.send_response(code)
  132. self.send_headers(r.headers)
  133. CHUNK_SIZE = 1024 *4
  134. for chunk in r.iter_content(chunk_size=CHUNK_SIZE):
  135. try:
  136. self.wfile.write(chunk)
  137. except Exception as e:
  138. print "Exception: ", str(e)
  139. self.wfile.close()
  140. return
  141. if DEBUG: print "**File downloaded"
  142. if "connection" in r.headers and r.headers["connection"] <> "keep-alive":
  143. self.wfile.close()
  144. return
  145. def fetch_url(self, url,headers):
  146. if DEBUG:
  147. print "\n***********************************************************"
  148. print "fetch_url: \n%s"%url
  149. r = self.get_page(url,headers = headers)
  150. code = r.status_code
  151. if not code in (200,206):
  152. self.write_error(code)
  153. return
  154. self.send_response(code)
  155. self.send_headers(r.headers)
  156. CHUNK_SIZE = 1024*4
  157. for chunk in r.iter_content(chunk_size=CHUNK_SIZE):
  158. try:
  159. self.wfile.write(chunk)
  160. except Exception as e:
  161. print "Exception: ", str(e)
  162. self.wfile.close()
  163. return
  164. if DEBUG: print "**File downloaded"
  165. if "connection" in r.headers and r.headers["connection"] <> "keep-alive":
  166. self.wfile.close()
  167. return
  168. def fetch_ltc(self, url, headers):
  169. "lattelecom.tv hack (have to update chunklist after each 6 min"
  170. if DEBUG:
  171. print "\n\n***********************************************************"
  172. print "fetch_ltc: \n%s"%url
  173. base_url = hls_base(url)
  174. if DEBUG: print "base_url=",base_url
  175. if base_url not in sessions:
  176. if DEBUG: print "New session"
  177. sessions[base_url] = {}
  178. sessions[base_url]["session"] = requests.Session()
  179. sessions[base_url]["session"].trust_env = False
  180. sessions[base_url]["session"].headers.update(headers0)
  181. sessions[base_url]["playlist"] = ""
  182. sessions[base_url]["chunklist"] = []
  183. # change ts file to valid one media_w215689190_33.ts?
  184. tsfile = re.search("media_\w+_(\d+)\.ts", url, re.IGNORECASE)
  185. if tsfile and sessions[base_url]["chunklist"]:
  186. tnum = int(tsfile.group(1))
  187. url2 = sessions[base_url]["chunklist"][tnum]
  188. if not url2.startswith("http"):
  189. url2 = base_url + url2
  190. url = url2
  191. if DEBUG: print "[playstreamproxy] url changed to ", url
  192. ### get_page ###
  193. ses = sessions[base_url]["session"]
  194. #ses.headers.update(headers0)
  195. ses.headers.update(headers)
  196. # ses.headers["Connection"]="Keep-Alive"
  197. r = self.get_page_ses(url,ses)
  198. code = r.status_code #r.status_code
  199. if not (code in (200,206)) and tsfile:
  200. # update chunklist
  201. r2 = self.get_page(sessions[base_url]["playlist"])
  202. streams = re.findall(r"#EXT-X-STREAM-INF:.*?BANDWIDTH=(\d+).*?\n(.+?)$", r2.content, re.IGNORECASE | re.MULTILINE)
  203. if streams:
  204. sorted(streams, key=lambda item: int(item[0]), reverse=True)
  205. chunklist = streams[0][1]
  206. if not chunklist.startswith("http"):
  207. chunklist = base_url + chunklist
  208. else:
  209. self.write_error(r.status_code)
  210. return
  211. print "[playstreamproxy] trying to update chunklist", chunklist
  212. r3 = self.get_page_ses(chunklist,ses,True)
  213. ts_list = re.findall(r"#EXTINF:.*?\n(.+?)$", r3.content, re.IGNORECASE | re.MULTILINE)
  214. sessions[base_url]["chunklist"]= ts_list
  215. tnum = int(tsfile.group(1))
  216. url2 = sessions[base_url]["chunklist"][tnum]
  217. if not url2.startswith("http"):
  218. url2 = base_url + url2
  219. r = self.get_page_ses(url2,ses,True)
  220. if not r.status_code in (200,206):
  221. self.write_error(r.status_code)
  222. return
  223. elif not r.status_code in (200,206):
  224. self.write_error(r.status_code)
  225. return
  226. if "playlist.m3u8" in url:
  227. sessions[base_url]["playlist"] = url
  228. ### Start of return formin and sending
  229. self.send_response(200)
  230. #headers2 = del_headers(r.headers,["Content-Encoding",'Transfer-Encoding',"Connection",'content-range',"range"])
  231. headers2 = {"server":"playstreamproxy", "content-type":"text/html"}
  232. if DEBUG: print "\n** Return content"
  233. headers2["content-type"] = r.headers["content-type"]
  234. if "content-length" in r.headers:
  235. headers2["content-length"] = r.headers["content-length"]
  236. self.send_headers(r.headers)
  237. CHUNK_SIZE = 4 * 1024
  238. for chunk in r.iter_content(chunk_size=CHUNK_SIZE):
  239. try:
  240. #print "#",
  241. self.wfile.write(chunk)
  242. except Exception as e:
  243. print "Exception: ", str(e)
  244. return
  245. if DEBUG: print "File downloaded = "
  246. self.wfile.close()
  247. #time.sleep(1)
  248. return
  249. def fetch_url2(self, url, headers):
  250. if DEBUG:
  251. print "\n***********************************************************"
  252. print "fetch_url2: \n%s"%url
  253. base_url = hls_base(url)
  254. if DEBUG: print "base_url=",base_url
  255. if base_url not in sessions:
  256. if DEBUG: print "New session"
  257. sessions[base_url] = {}
  258. sessions[base_url]["session"] = requests.Session()
  259. sessions[base_url]["session"].trust_env = False
  260. sessions[base_url]["session"].headers.update(headers0)
  261. sessions[base_url]["key"] = binascii.a2b_hex(headers["key"]) if "key" in headers and headers["key"] else None
  262. ses = sessions[base_url]["session"]
  263. ses.trust_env = False
  264. key = sessions[base_url]["key"]
  265. #ses.headers.clear()
  266. ses.headers.update(headers)
  267. r = self.get_page_ses(url, ses,stream=False)
  268. code = r.status_code #r.status_code
  269. if not (code in (200,206)):
  270. self.write_error(r.status_code)
  271. return
  272. ### Start of return formin and sending
  273. self.send_response(200)
  274. #headers2 = del_headers(r.headers,["Content-Encoding",'Transfer-Encoding',"Connection",'content-range',"range"])
  275. headers2 = {"server":"playstreamproxy", "content-type":"text/html"}
  276. # Content-Type: application/vnd.apple.mpegurl (encrypted)
  277. if r.headers["content-type"] == "application/vnd.apple.mpegurl" and key:
  278. content = r.content
  279. content = r.content.replace(base_url,"")
  280. content = re.sub("#EXT-X-KEY:METHOD=AES-128.+\n", "", content, 0, re.IGNORECASE | re.MULTILINE)
  281. headers2["content-type"] = "application/vnd.apple.mpegurl"
  282. headers2["content-length"] = "%s"%len(content)
  283. r.headers["content-length"] = "%s"%len(content)
  284. #headers2['content-range'] = 'bytes 0-%s/%s'%(len(content)-1,len(content))
  285. self.send_headers(headers2)
  286. #self.send_headers(r.headers)
  287. self.wfile.write(content)
  288. self.wfile.close()
  289. # Content-Type: video/MP2T (encrypted)
  290. elif r.headers["content-type"] == "video/MP2T" and key:
  291. print "Decode video/MP2T"
  292. content = r.content
  293. from Crypto.Cipher import AES
  294. iv = content[:16]
  295. d = AES.new(key, AES.MODE_CBC, iv)
  296. content = d.decrypt(content[16:])
  297. headers2["content-type"] = "video/MP2T"
  298. headers2["content-length"] = "%s"% (len(content))
  299. #headers2['content-range'] = 'bytes 0-%s/%s' % (len(content) - 1, len(content))
  300. print content[0:16]
  301. print "Finish decode"
  302. self.send_headers(headers2)
  303. self.wfile.write(content)
  304. self.wfile.close()
  305. else:
  306. if DEBUG: print "Return regular content"
  307. headers2["content-type"] = r.headers["content-type"]
  308. if "content-length" in r.headers:
  309. headers2["content-length"] = r.headers["content-length"]
  310. self.send_headers(r.headers)
  311. #self.send_headers(headers2)
  312. CHUNK_SIZE = 4 * 1024
  313. for chunk in r.iter_content(chunk_size=CHUNK_SIZE):
  314. try:
  315. #print "#",
  316. self.wfile.write(chunk)
  317. except Exception as e:
  318. print "Exception: ", str(e)
  319. return
  320. if DEBUG: print "File downloaded = "
  321. if "connection" in r.headers and r.headers["connection"]<>"keep-alive":
  322. self.wfile.close()
  323. #time.sleep(1)
  324. return
  325. def send_headers(self,headers):
  326. #if DEBUG:
  327. #print "**Return headers: "
  328. #print_headers(headers)
  329. for h in headers:
  330. self.send_header(h, headers[h])
  331. self.end_headers()
  332. def write_error(self,code):
  333. print "***Error, code=%s" % code
  334. self.send_response(code)
  335. #self.send_headers(r.headers)
  336. self.wfile.close() # TODO?
  337. # self.fetch_offline()
  338. def get_page_ses(self,url,ses,stream=True, headers=None):
  339. headers= headers if headers else headers0
  340. ses.headers.update(headers)
  341. if DEBUG:
  342. print "\n\n====================================================\n**get_page_ses\n%s"%url
  343. print "**Server request headers: "
  344. print_headers(ses.headers)
  345. r = ses.get(url, stream=stream, verify=False)
  346. if DEBUG:
  347. print "**Server response:", r.status_code
  348. print "**Server response headers: "
  349. print_headers(r.headers)
  350. return r
  351. def get_page(self,url,headers=None):
  352. if not headers:
  353. headers = headers0
  354. if DEBUG:
  355. print "\n\n====================================================\n**get_page\n%s"%url
  356. print "**Server request headers: "
  357. print_headers(headers)
  358. r = requests.get(url, headers=headers,stream=True)
  359. if DEBUG:
  360. print "**Server response:", r.status_code
  361. print "**Server response headers: "
  362. print_headers(r.headers)
  363. return r
  364. class ThreadedHTTPServer(ThreadingMixIn, HTTPServer):
  365. """Handle requests in a separate thread."""
  366. def start(host = HOST_NAME, port = PORT_NUMBER):
  367. import ContentSources, util
  368. global sources
  369. sources = ContentSources.ContentSources(os.path.join(cur_directory, "sources"))
  370. httpd = ThreadedHTTPServer((host, port), StreamHandler)
  371. print time.asctime(), "Server Starts - %s:%s" % (HOST_NAME, PORT_NUMBER)
  372. try:
  373. httpd.serve_forever()
  374. except KeyboardInterrupt:
  375. pass
  376. httpd.server_close()
  377. print time.asctime(), "Server Stops - %s:%s" % (HOST_NAME, PORT_NUMBER)
  378. class Daemon:
  379. """
  380. A generic daemon class.
  381. Usage: subclass the Daemon class and override the run() method
  382. """
  383. def __init__(self, pidfile, stdin="/dev/null", stdout="/dev/null", stderr="/dev/null"):
  384. self.stdin = stdin
  385. self.stdout = stdout
  386. self.stderr = stderr
  387. self.pidfile = pidfile
  388. def daemonize(self):
  389. """
  390. do the UNIX double-fork magic, see Stevens' "Advanced
  391. Programming in the UNIX Environment" for details (ISBN 0201563177)
  392. http://www.erlenstar.demon.co.uk/unix/faq_2.html#SEC16
  393. """
  394. try:
  395. pid = os.fork()
  396. if pid > 0:
  397. # exit first parent
  398. sys.exit(0)
  399. except OSError, e:
  400. sys.stderr.write("fork #1 failed: %d (%s)\n" % (e.errno, e.strerror))
  401. sys.exit(1)
  402. # decouple from parent environment
  403. os.chdir("/")
  404. os.setsid()
  405. os.umask(0)
  406. # do second fork
  407. try:
  408. pid = os.fork()
  409. if pid > 0:
  410. # exit from second parent
  411. sys.exit(0)
  412. except OSError, e:
  413. sys.stderr.write("fork #2 failed: %d (%s)\n" % (e.errno, e.strerror))
  414. sys.exit(1)
  415. # redirect standard file descriptors
  416. sys.stdout.flush()
  417. sys.stderr.flush()
  418. si = file(self.stdin, "r")
  419. so = file(self.stdout, "a+")
  420. se = file(self.stderr, "a+", 0)
  421. os.dup2(si.fileno(), sys.stdin.fileno())
  422. os.dup2(so.fileno(), sys.stdout.fileno())
  423. os.dup2(se.fileno(), sys.stderr.fileno())
  424. # write pidfile
  425. atexit.register(self.delpid)
  426. pid = str(os.getpid())
  427. file(self.pidfile,"w+").write("%s\n" % pid)
  428. def delpid(self):
  429. os.remove(self.pidfile)
  430. def start(self):
  431. """
  432. Start the daemon
  433. """
  434. # Check for a pidfile to see if the daemon already runs
  435. try:
  436. pf = file(self.pidfile,"r")
  437. pid = int(pf.read().strip())
  438. pf.close()
  439. except IOError:
  440. pid = None
  441. if pid:
  442. message = "pidfile %s already exist. Daemon already running?\n"
  443. sys.stderr.write(message % self.pidfile)
  444. sys.exit(1)
  445. # Start the daemon
  446. self.daemonize()
  447. self.run()
  448. def stop(self):
  449. """
  450. Stop the daemon
  451. """
  452. # Get the pid from the pidfile
  453. try:
  454. pf = file(self.pidfile,"r")
  455. pid = int(pf.read().strip())
  456. pf.close()
  457. except IOError:
  458. pid = None
  459. if not pid:
  460. message = "pidfile %s does not exist. Daemon not running?\n"
  461. sys.stderr.write(message % self.pidfile)
  462. return # not an error in a restart
  463. # Try killing the daemon process
  464. try:
  465. while 1:
  466. os.kill(pid, SIGTERM)
  467. time.sleep(0.1)
  468. except OSError, err:
  469. err = str(err)
  470. if err.find("No such process") > 0:
  471. if os.path.exists(self.pidfile):
  472. os.remove(self.pidfile)
  473. else:
  474. print str(err)
  475. sys.exit(1)
  476. def restart(self):
  477. """
  478. Restart the daemon
  479. """
  480. self.stop()
  481. self.start()
  482. def run(self):
  483. """
  484. You should override this method when you subclass Daemon. It will be called after the process has been
  485. daemonized by start() or restart().
  486. """
  487. class ProxyDaemon(Daemon):
  488. def run(self):
  489. start()
  490. def print_headers(headers):
  491. for h in headers:
  492. print "%s: %s"%(h,headers[h])
  493. def del_headers(headers0,tags):
  494. headers = headers0.copy()
  495. for t in tags:
  496. if t in headers:
  497. del headers[t]
  498. if t.lower() in headers:
  499. del headers[t.lower()]
  500. return headers
  501. def hls_base(url):
  502. url2 = url.split("?")[0]
  503. url2 = "/".join(url2.split("/")[0:-1])+ "/"
  504. return url2
  505. if __name__ == "__main__":
  506. daemon = ProxyDaemon("/var/run/playstreamproxy.pid")
  507. if len(sys.argv) == 2:
  508. if "start" == sys.argv[1]:
  509. daemon.start()
  510. elif "stop" == sys.argv[1]:
  511. daemon.stop()
  512. elif "restart" == sys.argv[1]:
  513. daemon.restart()
  514. elif "manualstart" == sys.argv[1]:
  515. start()
  516. else:
  517. print "Unknown command"
  518. sys.exit(2)
  519. sys.exit(0)
  520. else:
  521. print "usage: %s start|stop|restart|manualstart" % sys.argv[0]
  522. sys.exit(2)