Python module (submodule repositary), which provides content (video streams) from various online stream sources to corresponding Enigma2, Kodi, Plex plugins

playstreamproxy.py 14KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462
  1. #!/usr/bin/python
  2. # -*- coding: utf-8 -*-
  3. """
  4. StreamProxy daemon (based on Livestream daemon)
  5. Provides API to ContetSources + stream serving to play via m3u8 playlists
  6. """
  7. import os, sys, time, re, json
  8. import ConfigParser
  9. import atexit
  10. from signal import SIGTERM
  11. from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
  12. from SocketServer import ThreadingMixIn
  13. from urllib import unquote, quote
  14. import urllib,urlparse
  15. import requests
  16. #import cookielib,urllib2
  17. from ContentSources import ContentSources
  18. from sources.SourceBase import stream_type
  19. import util
  20. from util import streamproxy_decode3, streamproxy_encode3
  21. try:
  22. from requests.packages.urllib3.exceptions import InsecureRequestWarning
  23. requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
  24. except:
  25. pass
  26. HOST_NAME = ""
  27. PORT_NUMBER = 8880
  28. DEBUG = True
  29. DEBUG2 = False
  30. REDIRECT = True
  31. SPLIT_CHAR = "~"
  32. SPLIT_CODE = "%7E"
  33. EQ_CODE = "%3D"
  34. COL_CODE = "%3A"
  35. cunicode = lambda s: s.decode("utf8") if isinstance(s, str) else s
  36. cstr = lambda s: s.encode("utf8") if isinstance(s, unicode) else s
  37. headers2dict = lambda h: dict([l.strip().split(": ") for l in h.strip().splitlines()])
  38. headers0 = headers2dict("""
  39. User-Agent: GStreamer souphttpsrc libsoup/2.52.2
  40. icy-metadata: 1
  41. """)
  42. headers0_ = headers2dict("""
  43. Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8
  44. User-Agent: Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36
  45. """)
  46. cur_directory = os.path.dirname(os.path.realpath(__file__))
  47. slinks = {}
  48. sessions = {}
  49. cfg_file = "streams.cfg"
  50. sources = ContentSources("", cfg_file)
  51. config = ConfigParser.ConfigParser()
  52. proxy_cfg_file = os.path.join(cur_directory, "playstreamproxy.cfg")
  53. if not os.path.exists(proxy_cfg_file):
  54. config.add_section("default")
  55. config["default"]["port"] = 80
  56. config["default"]["host"] = localhost
  57. config["default"]["redirect"] = True
  58. config.write(open.file(proxy_cfg_file, "w"))
  59. else:
  60. config.read(proxy_cfg_file)
  61. class StreamHandler(BaseHTTPRequestHandler):
  62. def do_HEAD(self):
  63. print "**get_head"
  64. self.send_response(200)
  65. self.send_header("Server", "playstreamproxy")
  66. if ".m3u8" in self.path.lower():
  67. ct = "application/vnd.apple.mpegurl"
  68. elif ".ts" in self.path.lower():
  69. ct = "video/MP2T"
  70. elif ".mp4" in self.path.lower():
  71. ct = "video/mp4"
  72. else:
  73. ct = "text/html"
  74. self.send_header("Content-type", ct)
  75. self.end_headers()
  76. def do_GET(self):
  77. """Respond to a GET request"""
  78. #
  79. print "\n\n"+40*"#"+"\nget_url: \n%s" % self.path
  80. cmd = self.path.split("/")[1]
  81. if DEBUG:
  82. print "cmd=%s"%cmd
  83. print "Original request headers + url headers:"
  84. print_headers(self.headers.dict)
  85. self.protocol_version = 'HTTP/1.1'
  86. try:
  87. if cmd == "playstream":
  88. self.fetch_source( self.path)
  89. elif cmd in ["get_content", "get_streams", "get_info", "is_video", "options_read", "options_write"]:
  90. cmd, data, headers, qs = streamproxy_decode3(self.path)
  91. if cmd == "get_content":
  92. content = sources.get_content(data)
  93. elif cmd == "get_streams":
  94. content = sources.get_streams(data)
  95. elif cmd == "get_info":
  96. content = sources.get_info(data)
  97. elif cmd == "is_video":
  98. content = sources.is_video(data)
  99. elif cmd == "options_read":
  100. content = sources.options_read(data)
  101. else:
  102. content = []
  103. txt = json.dumps(content)
  104. self.send_response(200)
  105. self.send_header("Server", "playstreamproxy")
  106. self.send_header("Content-type", "application/json")
  107. self.end_headers()
  108. self.wfile.write(txt)
  109. self.wfile.close()
  110. else:
  111. self.write_error(404)
  112. except Exception as e:
  113. print "Got Exception: ", str(e)
  114. import traceback
  115. traceback.print_exc()
  116. ### Remote server request procedures ###
  117. def fetch_offline(self):
  118. print "** Fetch offline"
  119. self.send_response(200)
  120. self.send_header("Server", "playstreamproxy")
  121. self.send_header("Content-type", "video/mp4")
  122. self.end_headers()
  123. self.wfile.write(open("offline.mp4", "rb").read())
  124. #self.wfile.close()
  125. def redirect_source(self, urlp):
  126. cmd, data, headers, qs = streamproxy_decode3(urlp)
  127. streams = sources.get_streams(data)
  128. if not streams:
  129. self.write_error(500) # TODO
  130. return
  131. stream = streams[0]
  132. url = stream["url"]
  133. headers = stream["headers"] if "headers" in stream else headers0
  134. self.send_response(307)
  135. self.send_header("Location", url)
  136. self.end_headers()
  137. def fetch_source(self, urlp):
  138. cmd, data, headers, qs = streamproxy_decode3(urlp)
  139. if DEBUG:
  140. print "\n***********************************************************"
  141. print "fetch_source: \n%s"%urlp
  142. base_data = hls_base(urlp)
  143. if DEBUG:
  144. print "base_data=", base_data
  145. print "data=", data
  146. if not base_data in slinks:
  147. streams = sources.get_streams(data)
  148. if not streams:
  149. self.write_error(500) # TODO
  150. return
  151. stream = streams[0]
  152. url = stream["url"]
  153. headers = stream["headers"] if "headers" in stream else headers0
  154. base_url = hls_base2(url)
  155. if DEBUG: print "New link, base_url=",base_url
  156. ses = requests.Session()
  157. ses.trust_env = False
  158. slinks[base_data] = {"data": data, "urlp":urlp,"url": url, "base_url": base_url,"session":ses}
  159. else:
  160. ses = slinks[base_data]["session"]
  161. if urlp == slinks[base_data]["urlp"]:
  162. url = slinks[base_data]["url"]
  163. if DEBUG: print "Existing base link", url
  164. else:
  165. url = urlp.replace(base_data, slinks[base_data]["base_url"])
  166. if DEBUG: print "Existing new link", url
  167. if REDIRECT:
  168. print "-->redirect to: " + url
  169. self.send_response(307)
  170. self.send_header("Location", url)
  171. self.end_headers()
  172. #self.wfile.close()
  173. return
  174. headers2 = headers if headers else self.headers.dict
  175. headers2 = del_headers(headers2, ["host"])
  176. r = self.get_page_ses(url,ses,True,headers = headers2)
  177. code = r.status_code
  178. if not code in (200,206): # TODO 206 apstrāde!
  179. self.write_error(code)
  180. return
  181. if code == 206:
  182. print "Code=206"
  183. self.send_response(code)
  184. #headers2 = del_headers(r.headers, ["Content-Encoding"])
  185. self.send_headers(r.headers)
  186. CHUNK_SIZE = 1024 *4
  187. while True:
  188. chunk = r.raw.read(CHUNK_SIZE, decode_content=False)
  189. if not chunk:
  190. break
  191. try:
  192. self.wfile.write(chunk)
  193. except Exception as e:
  194. print "Exception: ", str(e)
  195. self.wfile.close()
  196. return
  197. if DEBUG: print "**File downloaded"
  198. #if "connection" in r.headers and r.headers["connection"] <> "keep-alive":
  199. self.wfile.close()
  200. return
  201. def send_headers(self,headers):
  202. #if DEBUG:
  203. #print "**Return headers: "
  204. #print_headers(headers)
  205. for h in headers:
  206. self.send_header(h, headers[h])
  207. self.end_headers()
  208. def write_error(self,code):
  209. print "***Error, code=%s" % code
  210. self.send_response(code)
  211. #self.send_headers(r.headers)
  212. self.wfile.close() # TODO?
  213. # self.fetch_offline()
  214. def get_page_ses(self,url,ses,stream=True, headers=None):
  215. headers= headers if headers else headers0
  216. ses.headers.update(headers)
  217. if DEBUG:
  218. print "\n\n====================================================\n**get_page_ses\n%s"%url
  219. print "**Server request headers: "
  220. print_headers(ses.headers)
  221. r = ses.get(url, stream=stream, verify=False)
  222. if DEBUG:
  223. print "**Server response:", r.status_code
  224. print "**Server response headers: "
  225. print_headers(r.headers)
  226. return r
  227. def get_page(self,url,headers=None):
  228. if not headers:
  229. headers = headers0
  230. if DEBUG:
  231. print "\n\n====================================================\n**get_page\n%s"%url
  232. print "**Server request headers: "
  233. print_headers(headers)
  234. r = requests.get(url, headers=headers,stream=True)
  235. if DEBUG:
  236. print "**Server response:", r.status_code
  237. print "**Server response headers: "
  238. print_headers(r.headers)
  239. return r
  240. def address_string(self):
  241. host, port = self.client_address[:2]
  242. #return socket.getfqdn(host)
  243. return host
  244. class ThreadedHTTPServer(ThreadingMixIn, HTTPServer):
  245. """Handle requests in a separate thread."""
  246. def start(host = HOST_NAME, port = PORT_NUMBER, redirect=None):
  247. global REDIRECT
  248. if redirect:
  249. REDIRECT = redirect
  250. httpd = ThreadedHTTPServer((host, port), StreamHandler)
  251. print time.asctime(), "Server Starts - %s:%s" % (HOST_NAME, PORT_NUMBER)
  252. try:
  253. httpd.serve_forever()
  254. except KeyboardInterrupt:
  255. pass
  256. httpd.server_close()
  257. print time.asctime(), "Server Stops - %s:%s" % (HOST_NAME, PORT_NUMBER)
  258. class Daemon:
  259. """
  260. A generic daemon class.
  261. Usage: subclass the Daemon class and override the run() method
  262. """
  263. def __init__(self, pidfile, stdin="/dev/null", stdout="/dev/null", stderr="/dev/null"):
  264. self.stdin = stdin
  265. self.stdout = stdout
  266. self.stderr = stderr
  267. self.pidfile = pidfile
  268. def daemonize(self):
  269. """
  270. do the UNIX double-fork magic, see Stevens' "Advanced
  271. Programming in the UNIX Environment" for details (ISBN 0201563177)
  272. http://www.erlenstar.demon.co.uk/unix/faq_2.html#SEC16
  273. """
  274. try:
  275. pid = os.fork()
  276. if pid > 0:
  277. # exit first parent
  278. sys.exit(0)
  279. except OSError, e:
  280. sys.stderr.write("fork #1 failed: %d (%s)\n" % (e.errno, e.strerror))
  281. sys.exit(1)
  282. # decouple from parent environment
  283. os.chdir("/")
  284. os.setsid()
  285. os.umask(0)
  286. # do second fork
  287. try:
  288. pid = os.fork()
  289. if pid > 0:
  290. # exit from second parent
  291. sys.exit(0)
  292. except OSError, e:
  293. sys.stderr.write("fork #2 failed: %d (%s)\n" % (e.errno, e.strerror))
  294. sys.exit(1)
  295. # redirect standard file descriptors
  296. sys.stdout.flush()
  297. sys.stderr.flush()
  298. si = file(self.stdin, "r")
  299. so = file(self.stdout, "a+")
  300. se = file(self.stderr, "a+", 0)
  301. os.dup2(si.fileno(), sys.stdin.fileno())
  302. os.dup2(so.fileno(), sys.stdout.fileno())
  303. os.dup2(se.fileno(), sys.stderr.fileno())
  304. # write pidfile
  305. atexit.register(self.delpid)
  306. pid = str(os.getpid())
  307. file(self.pidfile,"w+").write("%s\n" % pid)
  308. def delpid(self):
  309. os.remove(self.pidfile)
  310. def start(self):
  311. """
  312. Start the daemon
  313. """
  314. # Check for a pidfile to see if the daemon already runs
  315. try:
  316. pf = file(self.pidfile,"r")
  317. pid = int(pf.read().strip())
  318. pf.close()
  319. except IOError:
  320. pid = None
  321. if pid:
  322. message = "pidfile %s already exist. Daemon already running?\n"
  323. sys.stderr.write(message % self.pidfile)
  324. sys.exit(1)
  325. # Start the daemon
  326. self.daemonize()
  327. self.run()
  328. def stop(self):
  329. """
  330. Stop the daemon
  331. """
  332. # Get the pid from the pidfile
  333. try:
  334. pf = file(self.pidfile,"r")
  335. pid = int(pf.read().strip())
  336. pf.close()
  337. except IOError:
  338. pid = None
  339. if not pid:
  340. message = "pidfile %s does not exist. Daemon not running?\n"
  341. sys.stderr.write(message % self.pidfile)
  342. return # not an error in a restart
  343. # Try killing the daemon process
  344. try:
  345. while 1:
  346. os.kill(pid, SIGTERM)
  347. time.sleep(0.1)
  348. except OSError, err:
  349. err = str(err)
  350. if err.find("No such process") > 0:
  351. if os.path.exists(self.pidfile):
  352. os.remove(self.pidfile)
  353. else:
  354. print str(err)
  355. sys.exit(1)
  356. def restart(self):
  357. """
  358. Restart the daemon
  359. """
  360. self.stop()
  361. self.start()
  362. def run(self):
  363. """
  364. You should override this method when you subclass Daemon. It will be called after the process has been
  365. daemonized by start() or restart().
  366. """
  367. class ProxyDaemon(Daemon):
  368. def run(self):
  369. start()
  370. def print_headers(headers):
  371. for h in headers:
  372. print "%s: %s"%(h,headers[h])
  373. def del_headers(headers0,tags):
  374. headers = headers0.copy()
  375. for t in tags:
  376. if t in headers:
  377. del headers[t]
  378. if t.lower() in headers:
  379. del headers[t.lower()]
  380. return headers
  381. def hls_base(url):
  382. base = url.split("?")[0]
  383. base = "/".join(base.split("/")[0:3])+ "/"
  384. rest = url.replace(base, "")
  385. return base
  386. def hls_base2(url):
  387. base = url.split("?")[0]
  388. base = "/".join(base.split("/")[0:-1])+ "/"
  389. rest = url.replace(base, "")
  390. return base
  391. if __name__ == "__main__":
  392. daemon = ProxyDaemon("/var/run/playstreamproxy.pid")
  393. if len(sys.argv) == 2:
  394. if "start" == sys.argv[1]:
  395. daemon.start()
  396. elif "stop" == sys.argv[1]:
  397. daemon.stop()
  398. elif "restart" == sys.argv[1]:
  399. daemon.restart()
  400. elif "manualstart" == sys.argv[1]:
  401. start()
  402. else:
  403. print "Unknown command"
  404. sys.exit(2)
  405. sys.exit(0)
  406. else:
  407. print "usage: %s start|stop|restart|manualstart" % sys.argv[0]
  408. sys.exit(2)