Python module (submodule repositary), which provides content (video streams) from various online stream sources to corresponding Enigma2, Kodi, Plex plugins

playstreamproxy0.py 20KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583
  1. #!/usr/bin/python
  2. # -*- coding: utf-8 -*-
  3. """
  4. StreamProxy daemon (based on Livestream daemon)
  5. Ensures persistent cookies, User-Agents and others tricks to play protected HLS/DASH streams
  6. """
  7. import os
  8. import sys
  9. import time
  10. import atexit
  11. import re
  12. import binascii
  13. from signal import SIGTERM
  14. from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
  15. from SocketServer import ThreadingMixIn
  16. from urllib import unquote, quote
  17. import urllib,urlparse
  18. #import cookielib,urllib2
  19. import requests
  20. try:
  21. from requests.packages.urllib3.exceptions import InsecureRequestWarning
  22. requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
  23. except:
  24. pass
  25. HOST_NAME = ""
  26. PORT_NUMBER = 8880
  27. DEBUG = True
  28. DEBUG2 = False
  29. SPLIT_CHAR = "~"
  30. SPLIT_CODE = "%7E"
  31. EQ_CODE = "%3D"
  32. COL_CODE = "%3A"
  33. headers2dict = lambda h: dict([l.strip().split(": ") for l in h.strip().splitlines()])
  34. headers0 = headers2dict("""
  35. icy-metadata: 1
  36. User-Agent: GStreamer souphttpsrc libsoup/2.52.2
  37. """)
  38. sessions = {}
  39. cur_directory = os.path.dirname(os.path.realpath(__file__))
  40. sources = None
  41. slinks = {}
  42. class StreamHandler(BaseHTTPRequestHandler):
  43. def do_HEAD(self):
  44. print "**get_head"
  45. self.send_response(200)
  46. self.send_header("Server", "playstreamproxy")
  47. if ".m3u8" in self.path.lower():
  48. ct = "application/vnd.apple.mpegurl"
  49. elif ".ts" in self.path.lower():
  50. ct = "video/MP2T"
  51. elif ".mp4" in self.path.lower():
  52. ct = "video/mp4"
  53. else:
  54. ct = "text/html"
  55. self.send_header("Content-type", ct)
  56. self.end_headers()
  57. def do_GET(self):
  58. """Respond to a GET request"""
  59. print "\n\n"+40*"#"+"\nget_url: \n%s", self.path
  60. p = self.path.split("~")
  61. #url = urllib.unquote(p[0][1:]) # TODO - vajag nocekot vai visi urli strādā
  62. urlp = p[0][1:]
  63. url = urlp.replace(COL_CODE, ":")
  64. #headers = self.headers.dict
  65. headers = {} # TODO izmanto saņemtos headerus, var aizvietot ar defaultajiem
  66. #headers["host"] = urlparse.urlparse(url).hostname
  67. if len(p)>1:
  68. for h in p[1:]:
  69. k = h.split("=")[0].lower()
  70. v = urllib.unquote(h.split("=")[1])
  71. headers[k]=v
  72. if DEBUG:
  73. print "url=%s"%url
  74. print "Original request headers + url headers:"
  75. print_headers(self.headers.dict)
  76. self.protocol_version = 'HTTP/1.1'
  77. try:
  78. if "::" in url: # encoded source link
  79. self.fetch_source(urlp, headers)
  80. elif ".lattelecom.tv/" in url: # lattelecom.tv hack
  81. self.fetch_ltc( url, headers)
  82. elif "filmas.lv" in url or "viaplay" in url: # HLS session/decode filmas.lv in url:
  83. self.fetch_url2(url, headers)
  84. else: # plain fetch
  85. self.fetch_url( url, headers)
  86. except Exception as e:
  87. print "Got Exception: ", str(e)
  88. import traceback
  89. traceback.print_exc()
  90. ### Remote server request procedures ###
  91. def fetch_offline(self):
  92. print "** Fetch offline"
  93. self.send_response(200)
  94. self.send_header("Server", "playstreamproxy")
  95. self.send_header("Content-type", "video/mp4")
  96. self.end_headers()
  97. self.wfile.write(open("offline.mp4", "rb").read())
  98. #self.wfile.close()
  99. def fetch_source(self, urlp, headers):
  100. if DEBUG:
  101. print "\n***********************************************************"
  102. print "fetch_source: \n%s"%urlp
  103. base_data = hls_base(urlp)
  104. data = urllib.unquote_plus(base_data)[:-1]
  105. if DEBUG: print "base_data=", base_data
  106. if DEBUG: print "data=", data
  107. if not base_data in slinks :
  108. streams = sources.get_streams(data)
  109. if not streams:
  110. self.write_error(500) # TODO
  111. return
  112. url = streams[0]["url"]
  113. base_url = hls_base(url)
  114. if DEBUG: print "New link, base_url=",base_url
  115. ses = requests.Session()
  116. ses.trust_env = False
  117. slinks[base_data] = {"data": data, "urlp":urlp,"url": url, "base_url": base_url,"session":ses}
  118. else:
  119. ses = slinks[base_data]["session"]
  120. if urlp == slinks[base_data]["urlp"]:
  121. url = slinks[base_data]["url"]
  122. if DEBUG: print "Existing base link", url
  123. else:
  124. url = urlp.replace(base_data, slinks[base_data]["base_url"])
  125. if DEBUG: print "Existing new link", url
  126. r = self.get_page_ses(url,ses,True,headers = headers)
  127. code = r.status_code
  128. if not code in (200,206): # TODO mēģina vēlreiz get_streams
  129. self.write_error(code)
  130. return
  131. self.send_response(code)
  132. self.send_headers(r.headers)
  133. CHUNK_SIZE = 1024 *4
  134. for chunk in r.iter_content(chunk_size=CHUNK_SIZE):
  135. try:
  136. self.wfile.write(chunk)
  137. except Exception as e:
  138. print "Exception: ", str(e)
  139. self.wfile.close()
  140. return
  141. if DEBUG: print "**File downloaded"
  142. if "connection" in r.headers and r.headers["connection"] <> "keep-alive":
  143. self.wfile.close()
  144. return
  145. def fetch_url(self, url,headers):
  146. if DEBUG:
  147. print "\n***********************************************************"
  148. print "fetch_url: \n%s"%url
  149. r = self.get_page(url,headers = headers)
  150. code = r.status_code
  151. if not code in (200,206):
  152. self.write_error(code)
  153. return
  154. self.send_response(code)
  155. self.send_headers(r.headers)
  156. CHUNK_SIZE = 1024*4
  157. for chunk in r.iter_content(chunk_size=CHUNK_SIZE):
  158. try:
  159. self.wfile.write(chunk)
  160. except Exception as e:
  161. print "Exception: ", str(e)
  162. self.wfile.close()
  163. return
  164. if DEBUG: print "**File downloaded"
  165. if "connection" in r.headers and r.headers["connection"] <> "keep-alive":
  166. self.wfile.close()
  167. return
  168. def fetch_ltc(self, url, headers):
  169. "lattelecom.tv hack (have to update chunklist after each 6 min"
  170. if DEBUG:
  171. print "\n\n***********************************************************"
  172. print "fetch_ltc: \n%s"%url
  173. base_url = hls_base(url)
  174. if DEBUG: print "base_url=",base_url
  175. if base_url not in sessions:
  176. if DEBUG: print "New session"
  177. sessions[base_url] = {}
  178. sessions[base_url]["session"] = requests.Session()
  179. sessions[base_url]["session"].trust_env = False
  180. sessions[base_url]["session"].headers.update(headers0)
  181. sessions[base_url]["playlist"] = ""
  182. sessions[base_url]["chunklist"] = []
  183. # change ts file to valid one media_w215689190_33.ts?
  184. tsfile = re.search("media_\w+_(\d+)\.ts", url, re.IGNORECASE)
  185. if tsfile and sessions[base_url]["chunklist"]:
  186. tnum = int(tsfile.group(1))
  187. url2 = sessions[base_url]["chunklist"][tnum]
  188. if not url2.startswith("http"):
  189. url2 = base_url + url2
  190. url = url2
  191. if DEBUG: print "[playstreamproxy] url changed to ", url
  192. ### get_page ###
  193. ses = sessions[base_url]["session"]
  194. #ses.headers.update(headers0)
  195. ses.headers.update(headers)
  196. # ses.headers["Connection"]="Keep-Alive"
  197. r = self.get_page_ses(url,ses)
  198. code = r.status_code #r.status_code
  199. if not (code in (200,206)) and tsfile:
  200. # update chunklist
  201. r2 = self.get_page(sessions[base_url]["playlist"])
  202. streams = re.findall(r"#EXT-X-STREAM-INF:.*?BANDWIDTH=(\d+).*?\n(.+?)$", r2.content, re.IGNORECASE | re.MULTILINE)
  203. if streams:
  204. sorted(streams, key=lambda item: int(item[0]), reverse=True)
  205. chunklist = streams[0][1]
  206. if not chunklist.startswith("http"):
  207. chunklist = base_url + chunklist
  208. else:
  209. self.write_error(r.status_code)
  210. return
  211. print "[playstreamproxy] trying to update chunklist", chunklist
  212. r3 = self.get_page_ses(chunklist,ses,True)
  213. ts_list = re.findall(r"#EXTINF:.*?\n(.+?)$", r3.content, re.IGNORECASE | re.MULTILINE)
  214. sessions[base_url]["chunklist"]= ts_list
  215. tnum = int(tsfile.group(1))
  216. url2 = sessions[base_url]["chunklist"][tnum]
  217. if not url2.startswith("http"):
  218. url2 = base_url + url2
  219. r = self.get_page_ses(url2,ses,True)
  220. if not r.status_code in (200,206):
  221. self.write_error(r.status_code)
  222. return
  223. elif not r.status_code in (200,206):
  224. self.write_error(r.status_code)
  225. return
  226. if "playlist.m3u8" in url:
  227. sessions[base_url]["playlist"] = url
  228. ### Start of return formin and sending
  229. self.send_response(200)
  230. #headers2 = del_headers(r.headers,["Content-Encoding",'Transfer-Encoding',"Connection",'content-range',"range"])
  231. headers2 = {"server":"playstreamproxy", "content-type":"text/html"}
  232. if DEBUG: print "\n** Return content"
  233. headers2["content-type"] = r.headers["content-type"]
  234. if "content-length" in r.headers:
  235. headers2["content-length"] = r.headers["content-length"]
  236. self.send_headers(r.headers)
  237. CHUNK_SIZE = 4 * 1024
  238. for chunk in r.iter_content(chunk_size=CHUNK_SIZE):
  239. try:
  240. #print "#",
  241. self.wfile.write(chunk)
  242. except Exception as e:
  243. print "Exception: ", str(e)
  244. return
  245. if DEBUG: print "File downloaded = "
  246. self.wfile.close()
  247. #time.sleep(1)
  248. return
  249. def fetch_url2(self, url, headers):
  250. if DEBUG:
  251. print "\n***********************************************************"
  252. print "fetch_url2: \n%s"%url
  253. base_url = hls_base(url)
  254. if DEBUG: print "base_url=",base_url
  255. if base_url not in sessions:
  256. if DEBUG: print "New session"
  257. sessions[base_url] = {}
  258. sessions[base_url]["session"] = requests.Session()
  259. sessions[base_url]["session"].trust_env = False
  260. sessions[base_url]["session"].headers.update(headers0)
  261. sessions[base_url]["key"] = binascii.a2b_hex(headers["key"]) if "key" in headers and headers["key"] else None
  262. ses = sessions[base_url]["session"]
  263. ses.trust_env = False
  264. key = sessions[base_url]["key"]
  265. #ses.headers.clear()
  266. ses.headers.update(headers)
  267. r = self.get_page_ses(url, ses,stream=False)
  268. code = r.status_code #r.status_code
  269. if not (code in (200,206)):
  270. self.write_error(r.status_code)
  271. return
  272. ### Start of return formin and sending
  273. self.send_response(200)
  274. #headers2 = del_headers(r.headers,["Content-Encoding",'Transfer-Encoding',"Connection",'content-range',"range"])
  275. headers2 = {"server":"playstreamproxy", "content-type":"text/html"}
  276. # Content-Type: application/vnd.apple.mpegurl (encrypted)
  277. if r.headers["content-type"] == "application/vnd.apple.mpegurl" and key:
  278. content = r.content
  279. content = r.content.replace(base_url,"")
  280. content = re.sub("#EXT-X-KEY:METHOD=AES-128.+\n", "", content, 0, re.IGNORECASE | re.MULTILINE)
  281. headers2["content-type"] = "application/vnd.apple.mpegurl"
  282. headers2["content-length"] = "%s"%len(content)
  283. r.headers["content-length"] = "%s"%len(content)
  284. #headers2['content-range'] = 'bytes 0-%s/%s'%(len(content)-1,len(content))
  285. self.send_headers(headers2)
  286. #self.send_headers(r.headers)
  287. self.wfile.write(content)
  288. self.wfile.close()
  289. # Content-Type: video/MP2T (encrypted)
  290. elif r.headers["content-type"] == "video/MP2T" and key:
  291. print "Decode video/MP2T"
  292. content = r.content
  293. from Crypto.Cipher import AES
  294. iv = content[:16]
  295. d = AES.new(key, AES.MODE_CBC, iv)
  296. content = d.decrypt(content[16:])
  297. headers2["content-type"] = "video/MP2T"
  298. headers2["content-length"] = "%s"% (len(content))
  299. #headers2['content-range'] = 'bytes 0-%s/%s' % (len(content) - 1, len(content))
  300. print content[0:16]
  301. print "Finish decode"
  302. self.send_headers(headers2)
  303. self.wfile.write(content)
  304. self.wfile.close()
  305. else:
  306. if DEBUG: print "Return regular content"
  307. headers2["content-type"] = r.headers["content-type"]
  308. if "content-length" in r.headers:
  309. headers2["content-length"] = r.headers["content-length"]
  310. self.send_headers(r.headers)
  311. #self.send_headers(headers2)
  312. CHUNK_SIZE = 4 * 1024
  313. for chunk in r.iter_content(chunk_size=CHUNK_SIZE):
  314. try:
  315. #print "#",
  316. self.wfile.write(chunk)
  317. except Exception as e:
  318. print "Exception: ", str(e)
  319. return
  320. if DEBUG: print "File downloaded = "
  321. if "connection" in r.headers and r.headers["connection"]<>"keep-alive":
  322. self.wfile.close()
  323. #time.sleep(1)
  324. return
  325. def send_headers(self,headers):
  326. #if DEBUG:
  327. #print "**Return headers: "
  328. #print_headers(headers)
  329. for h in headers:
  330. self.send_header(h, headers[h])
  331. self.end_headers()
  332. def write_error(self,code):
  333. print "***Error, code=%s" % code
  334. self.send_response(code)
  335. #self.send_headers(r.headers)
  336. self.wfile.close() # TODO?
  337. # self.fetch_offline()
  338. def get_page_ses(self,url,ses,stream=True, headers=None):
  339. headers= headers if headers else headers0
  340. ses.headers.update(headers)
  341. if DEBUG:
  342. print "\n\n====================================================\n**get_page_ses\n%s"%url
  343. print "**Server request headers: "
  344. print_headers(ses.headers)
  345. r = ses.get(url, stream=stream, verify=False)
  346. if DEBUG:
  347. print "**Server response:", r.status_code
  348. print "**Server response headers: "
  349. print_headers(r.headers)
  350. return r
  351. def get_page(self,url,headers=None):
  352. if not headers:
  353. headers = headers0
  354. if DEBUG:
  355. print "\n\n====================================================\n**get_page\n%s"%url
  356. print "**Server request headers: "
  357. print_headers(headers)
  358. r = requests.get(url, headers=headers,stream=True)
  359. if DEBUG:
  360. print "**Server response:", r.status_code
  361. print "**Server response headers: "
  362. print_headers(r.headers)
  363. return r
  364. def address_string(self):
  365. host, port = self.client_address[:2]
  366. #return socket.getfqdn(host)
  367. return host
  368. class ThreadedHTTPServer(ThreadingMixIn, HTTPServer):
  369. """Handle requests in a separate thread."""
  370. def start(host = HOST_NAME, port = PORT_NUMBER):
  371. import ContentSources, util
  372. global sources
  373. sources = ContentSources.ContentSources(os.path.join(cur_directory, "sources"))
  374. httpd = ThreadedHTTPServer((host, port), StreamHandler)
  375. print time.asctime(), "Server Starts - %s:%s" % (HOST_NAME, PORT_NUMBER)
  376. try:
  377. httpd.serve_forever()
  378. except KeyboardInterrupt:
  379. pass
  380. httpd.server_close()
  381. print time.asctime(), "Server Stops - %s:%s" % (HOST_NAME, PORT_NUMBER)
  382. class Daemon:
  383. """
  384. A generic daemon class.
  385. Usage: subclass the Daemon class and override the run() method
  386. """
  387. def __init__(self, pidfile, stdin="/dev/null", stdout="/dev/null", stderr="/dev/null"):
  388. self.stdin = stdin
  389. self.stdout = stdout
  390. self.stderr = stderr
  391. self.pidfile = pidfile
  392. def daemonize(self):
  393. """
  394. do the UNIX double-fork magic, see Stevens' "Advanced
  395. Programming in the UNIX Environment" for details (ISBN 0201563177)
  396. http://www.erlenstar.demon.co.uk/unix/faq_2.html#SEC16
  397. """
  398. try:
  399. pid = os.fork()
  400. if pid > 0:
  401. # exit first parent
  402. sys.exit(0)
  403. except OSError, e:
  404. sys.stderr.write("fork #1 failed: %d (%s)\n" % (e.errno, e.strerror))
  405. sys.exit(1)
  406. # decouple from parent environment
  407. os.chdir("/")
  408. os.setsid()
  409. os.umask(0)
  410. # do second fork
  411. try:
  412. pid = os.fork()
  413. if pid > 0:
  414. # exit from second parent
  415. sys.exit(0)
  416. except OSError, e:
  417. sys.stderr.write("fork #2 failed: %d (%s)\n" % (e.errno, e.strerror))
  418. sys.exit(1)
  419. # redirect standard file descriptors
  420. sys.stdout.flush()
  421. sys.stderr.flush()
  422. si = file(self.stdin, "r")
  423. so = file(self.stdout, "a+")
  424. se = file(self.stderr, "a+", 0)
  425. os.dup2(si.fileno(), sys.stdin.fileno())
  426. os.dup2(so.fileno(), sys.stdout.fileno())
  427. os.dup2(se.fileno(), sys.stderr.fileno())
  428. # write pidfile
  429. atexit.register(self.delpid)
  430. pid = str(os.getpid())
  431. file(self.pidfile,"w+").write("%s\n" % pid)
  432. def delpid(self):
  433. os.remove(self.pidfile)
  434. def start(self):
  435. """
  436. Start the daemon
  437. """
  438. # Check for a pidfile to see if the daemon already runs
  439. try:
  440. pf = file(self.pidfile,"r")
  441. pid = int(pf.read().strip())
  442. pf.close()
  443. except IOError:
  444. pid = None
  445. if pid:
  446. message = "pidfile %s already exist. Daemon already running?\n"
  447. sys.stderr.write(message % self.pidfile)
  448. sys.exit(1)
  449. # Start the daemon
  450. self.daemonize()
  451. self.run()
  452. def stop(self):
  453. """
  454. Stop the daemon
  455. """
  456. # Get the pid from the pidfile
  457. try:
  458. pf = file(self.pidfile,"r")
  459. pid = int(pf.read().strip())
  460. pf.close()
  461. except IOError:
  462. pid = None
  463. if not pid:
  464. message = "pidfile %s does not exist. Daemon not running?\n"
  465. sys.stderr.write(message % self.pidfile)
  466. return # not an error in a restart
  467. # Try killing the daemon process
  468. try:
  469. while 1:
  470. os.kill(pid, SIGTERM)
  471. time.sleep(0.1)
  472. except OSError, err:
  473. err = str(err)
  474. if err.find("No such process") > 0:
  475. if os.path.exists(self.pidfile):
  476. os.remove(self.pidfile)
  477. else:
  478. print str(err)
  479. sys.exit(1)
  480. def restart(self):
  481. """
  482. Restart the daemon
  483. """
  484. self.stop()
  485. self.start()
  486. def run(self):
  487. """
  488. You should override this method when you subclass Daemon. It will be called after the process has been
  489. daemonized by start() or restart().
  490. """
  491. class ProxyDaemon(Daemon):
  492. def run(self):
  493. start()
  494. def print_headers(headers):
  495. for h in headers:
  496. print "%s: %s"%(h,headers[h])
  497. def del_headers(headers0,tags):
  498. headers = headers0.copy()
  499. for t in tags:
  500. if t in headers:
  501. del headers[t]
  502. if t.lower() in headers:
  503. del headers[t.lower()]
  504. return headers
  505. def hls_base(url):
  506. url2 = url.split("?")[0]
  507. url2 = "/".join(url2.split("/")[0:-1])+ "/"
  508. return url2
  509. if __name__ == "__main__":
  510. daemon = ProxyDaemon("/var/run/playstreamproxy.pid")
  511. if len(sys.argv) == 2:
  512. if "start" == sys.argv[1]:
  513. daemon.start()
  514. elif "stop" == sys.argv[1]:
  515. daemon.stop()
  516. elif "restart" == sys.argv[1]:
  517. daemon.restart()
  518. elif "manualstart" == sys.argv[1]:
  519. start()
  520. else:
  521. print "Unknown command"
  522. sys.exit(2)
  523. sys.exit(0)
  524. else:
  525. print "usage: %s start|stop|restart|manualstart" % sys.argv[0]
  526. sys.exit(2)