123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292 |
- #!/usr/bin/python
- """
- StreamProxy daemon (based on Livestream daemon)
- Ensures persistent cookies, User-Agents and others tricks to play protected HLS/DASH streams
- """
- import os
- import sys
- import time
- import atexit
- import re
-
- from signal import SIGTERM
-
- from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
- from SocketServer import ThreadingMixIn
- from urllib import unquote, quote
- import urllib,urlparse
- #import cookielib,urllib2
- import requests
- from requests.packages.urllib3.exceptions import InsecureRequestWarning
- requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
-
- HOST_NAME = ""
- PORT_NUMBER = 88
- DEBUG = True
-
- headers2dict = lambda h: dict([l.strip().split(": ") for l in h.strip().splitlines()])
- sessions = {}
-
- class StreamHandler(BaseHTTPRequestHandler):
-
- def do_HEAD(self):
- self.send_response(200)
- self.send_header("Server", "StreamProxy")
- self.send_header("Content-type", "text/html")
- self.end_headers()
-
- def do_GET(self):
- """Respond to a GET request."""
- SPLIT_CHAR = "~"
- SPLIT_CODE = "%7E"
- EQ_CODE = "%3D"
- COL_CODE = "%3A"
-
- p = self.path.split("~")
- url = urllib.unquote(p[0][1:])
- url = url.replace(COL_CODE, ":")
- headers = headers2dict("""
- User-Agent: Mozilla/5.0 (iPhone; CPU iPhone OS 8_0_2 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12A366 Safari/600.1.4
- """)
- if len(p)>1:
- for h in p[1:]:
- headers[h.split("=")[0]]=urllib.unquote(h.split("=")[1])
- #self.fetch_offline(self.wfile)
- try:
- self.fetch_url2(self.wfile, url, headers)
- except Exception as e:
- print "Got Exception: ", str(e)
-
- def fetch_offline(self,wfile):
- self.send_response(200)
- self.send_header("Server", "StreamProxy")
- self.send_header("Content-type", "video/mp4")
- self.end_headers()
- self.wfile.write(open("offline.mp4", "rb").read())
- self.wfile.close()
-
-
- def fetch_url2(self, wfile, url, headers):
- if DEBUG: print "\n***********************************************************"
- self.log_message("fetch_url: %s", url)
- #self.log_message("headers: %s", headers)
-
- base_url = "/".join(url.split("/")[0:-1])
- if base_url not in sessions:
- sessions[base_url] = requests.Session()
- #cj = cookielib.CookieJar()
- #sessions[base_url] = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
- else:
- pass
- ses = sessions[base_url]
- if DEBUG: print "**Request headers: "
- ses.headers.update(headers)
- #ses.addheaders=[]
- for h in ses.headers:
- #ses.addheaders.append((h,headers[h]))
- if DEBUG: print h,"=",ses.headers[h]
- r = ses.get(url, stream=True,verify=False)
- #r = ses.open(url)
- code = r.status_code #r.status_code
- if DEBUG: print "**Response:", code #r.status_code
- if DEBUG: print "**Response headers: "
- for h in r.headers:
- if DEBUG: print h,"=",r.headers[h]
- self.send_response(code)
- if DEBUG: print "**Return headers:"
- for h in r.headers:
- if h.lower() in ("user-agent","server"):continue
- if h.lower()=="connection":
- if DEBUG: print h," skipped"
- continue
- self.send_header(h, r.headers[h])
- if DEBUG:print h,"=",r.headers[h]
- self.end_headers()
-
- CHUNK_SIZE = 4 * 1024
- if code == 200:
- #while True:
- #chunk = r.read(CHUNK_SIZE)
- #if not chunk:
- #break
- #wfile.write(chunk)
- #pass
- #wfile.close()
- for chunk in r.iter_content(1024):
- try:
- #print "#",
- wfile.write(chunk)
- except Exception as e:
- print "Exception: ", str(e)
- return
- if DEBUG: print " = file downloaded = "
- time.sleep(2)
- #self.wfile.close()
-
-
- else:
- print code
- self.fetch_offline(wfile)
- pass
-
- class ThreadedHTTPServer(ThreadingMixIn, HTTPServer):
- """Handle requests in a separate thread."""
-
- def start():
- httpd = ThreadedHTTPServer((HOST_NAME, PORT_NUMBER), StreamHandler)
- print time.asctime(), "Server Starts - %s:%s" % (HOST_NAME, PORT_NUMBER)
- try:
- httpd.serve_forever()
- except KeyboardInterrupt:
- pass
- httpd.server_close()
- print time.asctime(), "Server Stops - %s:%s" % (HOST_NAME, PORT_NUMBER)
-
-
- class Daemon:
- """
- A generic daemon class.
- Usage: subclass the Daemon class and override the run() method
- """
- def __init__(self, pidfile, stdin="/dev/null", stdout="/dev/null", stderr="/dev/null"):
- self.stdin = stdin
- self.stdout = stdout
- self.stderr = stderr
- self.pidfile = pidfile
-
- def daemonize(self):
- """
- do the UNIX double-fork magic, see Stevens' "Advanced
- Programming in the UNIX Environment" for details (ISBN 0201563177)
- http://www.erlenstar.demon.co.uk/unix/faq_2.html#SEC16
- """
- try:
- pid = os.fork()
- if pid > 0:
- # exit first parent
- sys.exit(0)
- except OSError, e:
- sys.stderr.write("fork #1 failed: %d (%s)\n" % (e.errno, e.strerror))
- sys.exit(1)
-
- # decouple from parent environment
- os.chdir("/")
- os.setsid()
- os.umask(0)
-
- # do second fork
- try:
- pid = os.fork()
- if pid > 0:
- # exit from second parent
- sys.exit(0)
- except OSError, e:
- sys.stderr.write("fork #2 failed: %d (%s)\n" % (e.errno, e.strerror))
- sys.exit(1)
-
- # redirect standard file descriptors
- sys.stdout.flush()
- sys.stderr.flush()
- si = file(self.stdin, "r")
- so = file(self.stdout, "a+")
- se = file(self.stderr, "a+", 0)
- os.dup2(si.fileno(), sys.stdin.fileno())
- os.dup2(so.fileno(), sys.stdout.fileno())
- os.dup2(se.fileno(), sys.stderr.fileno())
-
- # write pidfile
- atexit.register(self.delpid)
- pid = str(os.getpid())
- file(self.pidfile,"w+").write("%s\n" % pid)
-
- def delpid(self):
- os.remove(self.pidfile)
-
- def start(self):
- """
- Start the daemon
- """
- # Check for a pidfile to see if the daemon already runs
- try:
- pf = file(self.pidfile,"r")
- pid = int(pf.read().strip())
- pf.close()
- except IOError:
- pid = None
-
- if pid:
- message = "pidfile %s already exist. Daemon already running?\n"
- sys.stderr.write(message % self.pidfile)
- sys.exit(1)
-
- # Start the daemon
- self.daemonize()
- self.run()
-
- def stop(self):
- """
- Stop the daemon
- """
- # Get the pid from the pidfile
- try:
- pf = file(self.pidfile,"r")
- pid = int(pf.read().strip())
- pf.close()
- except IOError:
- pid = None
-
- if not pid:
- message = "pidfile %s does not exist. Daemon not running?\n"
- sys.stderr.write(message % self.pidfile)
- return # not an error in a restart
-
- # Try killing the daemon process
- try:
- while 1:
- os.kill(pid, SIGTERM)
- time.sleep(0.1)
- except OSError, err:
- err = str(err)
- if err.find("No such process") > 0:
- if os.path.exists(self.pidfile):
- os.remove(self.pidfile)
- else:
- print str(err)
- sys.exit(1)
-
- def restart(self):
- """
- Restart the daemon
- """
- self.stop()
- self.start()
-
- def run(self):
- """
- You should override this method when you subclass Daemon. It will be called after the process has been
- daemonized by start() or restart().
- """
-
- class ProxyDaemon(Daemon):
- def run(self):
- start()
-
- if __name__ == "__main__":
- daemon = ProxyDaemon("/var/run/streamproxy.pid")
- if len(sys.argv) == 2:
- if "start" == sys.argv[1]:
- daemon.start()
- elif "stop" == sys.argv[1]:
- daemon.stop()
- elif "restart" == sys.argv[1]:
- daemon.restart()
- elif "manualstart" == sys.argv[1]:
- start()
- else:
- print "Unknown command"
- sys.exit(2)
- sys.exit(0)
- else:
- print "usage: %s start|stop|restart|manualstart" % sys.argv[0]
- sys.exit(2)
|