|
@@ -0,0 +1,583 @@
|
|
1
|
+#!/usr/bin/python
|
|
2
|
+# -*- coding: utf-8 -*-
|
|
3
|
+"""
|
|
4
|
+StreamProxy daemon (based on Livestream daemon)
|
|
5
|
+Ensures persistent cookies, User-Agents and others tricks to play protected HLS/DASH streams
|
|
6
|
+"""
|
|
7
|
+import os
|
|
8
|
+import sys
|
|
9
|
+import time
|
|
10
|
+import atexit
|
|
11
|
+import re
|
|
12
|
+import binascii
|
|
13
|
+
|
|
14
|
+from signal import SIGTERM
|
|
15
|
+
|
|
16
|
+from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
|
|
17
|
+from SocketServer import ThreadingMixIn
|
|
18
|
+from urllib import unquote, quote
|
|
19
|
+import urllib,urlparse
|
|
20
|
+#import cookielib,urllib2
|
|
21
|
+import requests
|
|
22
|
+
|
|
23
|
+try:
|
|
24
|
+ from requests.packages.urllib3.exceptions import InsecureRequestWarning
|
|
25
|
+ requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
|
|
26
|
+except:
|
|
27
|
+ pass
|
|
28
|
+
|
|
29
|
+HOST_NAME = ""
|
|
30
|
+PORT_NUMBER = 8880
|
|
31
|
+DEBUG = True
|
|
32
|
+DEBUG2 = False
|
|
33
|
+
|
|
34
|
+SPLIT_CHAR = "~"
|
|
35
|
+SPLIT_CODE = "%7E"
|
|
36
|
+EQ_CODE = "%3D"
|
|
37
|
+COL_CODE = "%3A"
|
|
38
|
+headers2dict = lambda h: dict([l.strip().split(": ") for l in h.strip().splitlines()])
|
|
39
|
+headers0 = headers2dict("""
|
|
40
|
+icy-metadata: 1
|
|
41
|
+User-Agent: GStreamer souphttpsrc libsoup/2.52.2
|
|
42
|
+""")
|
|
43
|
+sessions = {}
|
|
44
|
+cur_directory = os.path.dirname(os.path.realpath(__file__))
|
|
45
|
+sources = None
|
|
46
|
+slinks = {}
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+class StreamHandler(BaseHTTPRequestHandler):
|
|
50
|
+
|
|
51
|
+ def do_HEAD(self):
|
|
52
|
+ print "**get_head"
|
|
53
|
+ self.send_response(200)
|
|
54
|
+ self.send_header("Server", "playstreamproxy")
|
|
55
|
+ if ".m3u8" in self.path.lower():
|
|
56
|
+ ct = "application/vnd.apple.mpegurl"
|
|
57
|
+ elif ".ts" in self.path.lower():
|
|
58
|
+ ct = "video/MP2T"
|
|
59
|
+ elif ".mp4" in self.path.lower():
|
|
60
|
+ ct = "video/mp4"
|
|
61
|
+ else:
|
|
62
|
+ ct = "text/html"
|
|
63
|
+ self.send_header("Content-type", ct)
|
|
64
|
+ self.end_headers()
|
|
65
|
+
|
|
66
|
+ def do_GET(self):
|
|
67
|
+ """Respond to a GET request"""
|
|
68
|
+ print "\n\n"+40*"#"+"\nget_url: \n%s", self.path
|
|
69
|
+ p = self.path.split("~")
|
|
70
|
+ #url = urllib.unquote(p[0][1:]) # TODO - vajag nocekot vai visi urli strādā
|
|
71
|
+ urlp = p[0][1:]
|
|
72
|
+ url = urlp.replace(COL_CODE, ":")
|
|
73
|
+ #headers = self.headers.dict
|
|
74
|
+ headers = {} # TODO izmanto saņemtos headerus, var aizvietot ar defaultajiem
|
|
75
|
+ #headers["host"] = urlparse.urlparse(url).hostname
|
|
76
|
+ if len(p)>1:
|
|
77
|
+ for h in p[1:]:
|
|
78
|
+ k = h.split("=")[0].lower()
|
|
79
|
+ v = urllib.unquote(h.split("=")[1])
|
|
80
|
+ headers[k]=v
|
|
81
|
+ if DEBUG:
|
|
82
|
+ print "url=%s"%url
|
|
83
|
+ print "Original request headers + url headers:"
|
|
84
|
+ print_headers(self.headers.dict)
|
|
85
|
+ self.protocol_version = 'HTTP/1.1'
|
|
86
|
+
|
|
87
|
+ try:
|
|
88
|
+ if "::" in url: # encoded source link
|
|
89
|
+ self.fetch_source(urlp, headers)
|
|
90
|
+ elif ".lattelecom.tv/" in url: # lattelecom.tv hack
|
|
91
|
+ self.fetch_ltc( url, headers)
|
|
92
|
+ elif "filmas.lv" in url or "viaplay" in url: # HLS session/decode filmas.lv in url:
|
|
93
|
+ self.fetch_url2(url, headers)
|
|
94
|
+ else: # plain fetch
|
|
95
|
+ self.fetch_url( url, headers)
|
|
96
|
+ except Exception as e:
|
|
97
|
+ print "Got Exception: ", str(e)
|
|
98
|
+ import traceback
|
|
99
|
+ traceback.print_exc()
|
|
100
|
+
|
|
101
|
+ ### Remote server request procedures ###
|
|
102
|
+
|
|
103
|
+ def fetch_offline(self):
|
|
104
|
+ print "** Fetch offline"
|
|
105
|
+ self.send_response(200)
|
|
106
|
+ self.send_header("Server", "playstreamproxy")
|
|
107
|
+ self.send_header("Content-type", "video/mp4")
|
|
108
|
+ self.end_headers()
|
|
109
|
+ self.wfile.write(open("offline.mp4", "rb").read())
|
|
110
|
+ #self.wfile.close()
|
|
111
|
+
|
|
112
|
+ def fetch_source(self, urlp, headers):
|
|
113
|
+ if DEBUG:
|
|
114
|
+ print "\n***********************************************************"
|
|
115
|
+ print "fetch_source: \n%s"%urlp
|
|
116
|
+ base_data = hls_base(urlp)
|
|
117
|
+ data = urllib.unquote_plus(base_data)[:-1]
|
|
118
|
+ if DEBUG: print "base_data=", base_data
|
|
119
|
+ if DEBUG: print "data=", data
|
|
120
|
+ if not base_data in slinks :
|
|
121
|
+ streams = sources.get_streams(data)
|
|
122
|
+ if not streams:
|
|
123
|
+ self.write_error(500) # TODO
|
|
124
|
+ return
|
|
125
|
+ url = streams[0]["url"]
|
|
126
|
+ base_url = hls_base(url)
|
|
127
|
+ if DEBUG: print "New link, base_url=",base_url
|
|
128
|
+ ses = requests.Session()
|
|
129
|
+ ses.trust_env = False
|
|
130
|
+ slinks[base_data] = {"data": data, "urlp":urlp,"url": url, "base_url": base_url,"session":ses}
|
|
131
|
+ else:
|
|
132
|
+ ses = slinks[base_data]["session"]
|
|
133
|
+ if urlp == slinks[base_data]["urlp"]:
|
|
134
|
+ url = slinks[base_data]["url"]
|
|
135
|
+ if DEBUG: print "Existing base link", url
|
|
136
|
+ else:
|
|
137
|
+ url = urlp.replace(base_data, slinks[base_data]["base_url"])
|
|
138
|
+ if DEBUG: print "Existing new link", url
|
|
139
|
+ r = self.get_page_ses(url,ses,True,headers = headers)
|
|
140
|
+ code = r.status_code
|
|
141
|
+ if not code in (200,206): # TODO mēģina vēlreiz get_streams
|
|
142
|
+ self.write_error(code)
|
|
143
|
+ return
|
|
144
|
+ self.send_response(code)
|
|
145
|
+ self.send_headers(r.headers)
|
|
146
|
+ CHUNK_SIZE = 1024 *4
|
|
147
|
+ for chunk in r.iter_content(chunk_size=CHUNK_SIZE):
|
|
148
|
+ try:
|
|
149
|
+ self.wfile.write(chunk)
|
|
150
|
+ except Exception as e:
|
|
151
|
+ print "Exception: ", str(e)
|
|
152
|
+ self.wfile.close()
|
|
153
|
+ return
|
|
154
|
+ if DEBUG: print "**File downloaded"
|
|
155
|
+ if "connection" in r.headers and r.headers["connection"] <> "keep-alive":
|
|
156
|
+ self.wfile.close()
|
|
157
|
+ return
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+ def fetch_url(self, url,headers):
|
|
161
|
+ if DEBUG:
|
|
162
|
+ print "\n***********************************************************"
|
|
163
|
+ print "fetch_url: \n%s"%url
|
|
164
|
+ r = self.get_page(url,headers = headers)
|
|
165
|
+ code = r.status_code
|
|
166
|
+ if not code in (200,206):
|
|
167
|
+ self.write_error(code)
|
|
168
|
+ return
|
|
169
|
+ self.send_response(code)
|
|
170
|
+ self.send_headers(r.headers)
|
|
171
|
+ CHUNK_SIZE = 1024*4
|
|
172
|
+ for chunk in r.iter_content(chunk_size=CHUNK_SIZE):
|
|
173
|
+ try:
|
|
174
|
+ self.wfile.write(chunk)
|
|
175
|
+ except Exception as e:
|
|
176
|
+ print "Exception: ", str(e)
|
|
177
|
+ self.wfile.close()
|
|
178
|
+ return
|
|
179
|
+ if DEBUG: print "**File downloaded"
|
|
180
|
+ if "connection" in r.headers and r.headers["connection"] <> "keep-alive":
|
|
181
|
+ self.wfile.close()
|
|
182
|
+ return
|
|
183
|
+
|
|
184
|
+ def fetch_ltc(self, url, headers):
|
|
185
|
+ "lattelecom.tv hack (have to update chunklist after each 6 min"
|
|
186
|
+ if DEBUG:
|
|
187
|
+ print "\n\n***********************************************************"
|
|
188
|
+ print "fetch_ltc: \n%s"%url
|
|
189
|
+ base_url = hls_base(url)
|
|
190
|
+ if DEBUG: print "base_url=",base_url
|
|
191
|
+ if base_url not in sessions:
|
|
192
|
+ if DEBUG: print "New session"
|
|
193
|
+ sessions[base_url] = {}
|
|
194
|
+ sessions[base_url]["session"] = requests.Session()
|
|
195
|
+ sessions[base_url]["session"].trust_env = False
|
|
196
|
+ sessions[base_url]["session"].headers.update(headers0)
|
|
197
|
+ sessions[base_url]["playlist"] = ""
|
|
198
|
+ sessions[base_url]["chunklist"] = []
|
|
199
|
+
|
|
200
|
+ # change ts file to valid one media_w215689190_33.ts?
|
|
201
|
+ tsfile = re.search("media_\w+_(\d+)\.ts", url, re.IGNORECASE)
|
|
202
|
+ if tsfile and sessions[base_url]["chunklist"]:
|
|
203
|
+ tnum = int(tsfile.group(1))
|
|
204
|
+ url2 = sessions[base_url]["chunklist"][tnum]
|
|
205
|
+ if not url2.startswith("http"):
|
|
206
|
+ url2 = base_url + url2
|
|
207
|
+ url = url2
|
|
208
|
+ if DEBUG: print "[playstreamproxy] url changed to ", url
|
|
209
|
+
|
|
210
|
+ ### get_page ###
|
|
211
|
+ ses = sessions[base_url]["session"]
|
|
212
|
+ #ses.headers.update(headers0)
|
|
213
|
+ ses.headers.update(headers)
|
|
214
|
+ # ses.headers["Connection"]="Keep-Alive"
|
|
215
|
+ r = self.get_page_ses(url,ses)
|
|
216
|
+ code = r.status_code #r.status_code
|
|
217
|
+
|
|
218
|
+ if not (code in (200,206)) and tsfile:
|
|
219
|
+ # update chunklist
|
|
220
|
+ r2 = self.get_page(sessions[base_url]["playlist"])
|
|
221
|
+ streams = re.findall(r"#EXT-X-STREAM-INF:.*?BANDWIDTH=(\d+).*?\n(.+?)$", r2.content, re.IGNORECASE | re.MULTILINE)
|
|
222
|
+ if streams:
|
|
223
|
+ sorted(streams, key=lambda item: int(item[0]), reverse=True)
|
|
224
|
+ chunklist = streams[0][1]
|
|
225
|
+ if not chunklist.startswith("http"):
|
|
226
|
+ chunklist = base_url + chunklist
|
|
227
|
+ else:
|
|
228
|
+ self.write_error(r.status_code)
|
|
229
|
+ return
|
|
230
|
+ print "[playstreamproxy] trying to update chunklist", chunklist
|
|
231
|
+ r3 = self.get_page_ses(chunklist,ses,True)
|
|
232
|
+ ts_list = re.findall(r"#EXTINF:.*?\n(.+?)$", r3.content, re.IGNORECASE | re.MULTILINE)
|
|
233
|
+ sessions[base_url]["chunklist"]= ts_list
|
|
234
|
+ tnum = int(tsfile.group(1))
|
|
235
|
+ url2 = sessions[base_url]["chunklist"][tnum]
|
|
236
|
+ if not url2.startswith("http"):
|
|
237
|
+ url2 = base_url + url2
|
|
238
|
+ r = self.get_page_ses(url2,ses,True)
|
|
239
|
+ if not r.status_code in (200,206):
|
|
240
|
+ self.write_error(r.status_code)
|
|
241
|
+ return
|
|
242
|
+ elif not r.status_code in (200,206):
|
|
243
|
+ self.write_error(r.status_code)
|
|
244
|
+ return
|
|
245
|
+
|
|
246
|
+ if "playlist.m3u8" in url:
|
|
247
|
+ sessions[base_url]["playlist"] = url
|
|
248
|
+
|
|
249
|
+ ### Start of return formin and sending
|
|
250
|
+ self.send_response(200)
|
|
251
|
+ #headers2 = del_headers(r.headers,["Content-Encoding",'Transfer-Encoding',"Connection",'content-range',"range"])
|
|
252
|
+ headers2 = {"server":"playstreamproxy", "content-type":"text/html"}
|
|
253
|
+
|
|
254
|
+ if DEBUG: print "\n** Return content"
|
|
255
|
+ headers2["content-type"] = r.headers["content-type"]
|
|
256
|
+ if "content-length" in r.headers:
|
|
257
|
+ headers2["content-length"] = r.headers["content-length"]
|
|
258
|
+ self.send_headers(r.headers)
|
|
259
|
+ CHUNK_SIZE = 4 * 1024
|
|
260
|
+ for chunk in r.iter_content(chunk_size=CHUNK_SIZE):
|
|
261
|
+ try:
|
|
262
|
+ #print "#",
|
|
263
|
+ self.wfile.write(chunk)
|
|
264
|
+ except Exception as e:
|
|
265
|
+ print "Exception: ", str(e)
|
|
266
|
+ return
|
|
267
|
+ if DEBUG: print "File downloaded = "
|
|
268
|
+ self.wfile.close()
|
|
269
|
+ #time.sleep(1)
|
|
270
|
+ return
|
|
271
|
+
|
|
272
|
+
|
|
273
|
+ def fetch_url2(self, url, headers):
|
|
274
|
+ if DEBUG:
|
|
275
|
+ print "\n***********************************************************"
|
|
276
|
+ print "fetch_url2: \n%s"%url
|
|
277
|
+ base_url = hls_base(url)
|
|
278
|
+ if DEBUG: print "base_url=",base_url
|
|
279
|
+ if base_url not in sessions:
|
|
280
|
+ if DEBUG: print "New session"
|
|
281
|
+ sessions[base_url] = {}
|
|
282
|
+ sessions[base_url]["session"] = requests.Session()
|
|
283
|
+ sessions[base_url]["session"].trust_env = False
|
|
284
|
+ sessions[base_url]["session"].headers.update(headers0)
|
|
285
|
+ sessions[base_url]["key"] = binascii.a2b_hex(headers["key"]) if "key" in headers and headers["key"] else None
|
|
286
|
+ ses = sessions[base_url]["session"]
|
|
287
|
+ ses.trust_env = False
|
|
288
|
+ key = sessions[base_url]["key"]
|
|
289
|
+ #ses.headers.clear()
|
|
290
|
+ ses.headers.update(headers)
|
|
291
|
+ r = self.get_page_ses(url, ses,stream=False)
|
|
292
|
+ code = r.status_code #r.status_code
|
|
293
|
+ if not (code in (200,206)):
|
|
294
|
+ self.write_error(r.status_code)
|
|
295
|
+ return
|
|
296
|
+
|
|
297
|
+ ### Start of return formin and sending
|
|
298
|
+ self.send_response(200)
|
|
299
|
+ #headers2 = del_headers(r.headers,["Content-Encoding",'Transfer-Encoding',"Connection",'content-range',"range"])
|
|
300
|
+ headers2 = {"server":"playstreamproxy", "content-type":"text/html"}
|
|
301
|
+
|
|
302
|
+ # Content-Type: application/vnd.apple.mpegurl (encrypted)
|
|
303
|
+ if r.headers["content-type"] == "application/vnd.apple.mpegurl" and key:
|
|
304
|
+ content = r.content
|
|
305
|
+ content = r.content.replace(base_url,"")
|
|
306
|
+ content = re.sub("#EXT-X-KEY:METHOD=AES-128.+\n", "", content, 0, re.IGNORECASE | re.MULTILINE)
|
|
307
|
+ headers2["content-type"] = "application/vnd.apple.mpegurl"
|
|
308
|
+ headers2["content-length"] = "%s"%len(content)
|
|
309
|
+ r.headers["content-length"] = "%s"%len(content)
|
|
310
|
+ #headers2['content-range'] = 'bytes 0-%s/%s'%(len(content)-1,len(content))
|
|
311
|
+ self.send_headers(headers2)
|
|
312
|
+ #self.send_headers(r.headers)
|
|
313
|
+ self.wfile.write(content)
|
|
314
|
+ self.wfile.close()
|
|
315
|
+
|
|
316
|
+ # Content-Type: video/MP2T (encrypted)
|
|
317
|
+ elif r.headers["content-type"] == "video/MP2T" and key:
|
|
318
|
+ print "Decode video/MP2T"
|
|
319
|
+ content = r.content
|
|
320
|
+ from Crypto.Cipher import AES
|
|
321
|
+ iv = content[:16]
|
|
322
|
+ d = AES.new(key, AES.MODE_CBC, iv)
|
|
323
|
+ content = d.decrypt(content[16:])
|
|
324
|
+ headers2["content-type"] = "video/MP2T"
|
|
325
|
+ headers2["content-length"] = "%s"% (len(content))
|
|
326
|
+ #headers2['content-range'] = 'bytes 0-%s/%s' % (len(content) - 1, len(content))
|
|
327
|
+ print content[0:16]
|
|
328
|
+ print "Finish decode"
|
|
329
|
+ self.send_headers(headers2)
|
|
330
|
+ self.wfile.write(content)
|
|
331
|
+ self.wfile.close()
|
|
332
|
+
|
|
333
|
+ else:
|
|
334
|
+ if DEBUG: print "Return regular content"
|
|
335
|
+ headers2["content-type"] = r.headers["content-type"]
|
|
336
|
+ if "content-length" in r.headers:
|
|
337
|
+ headers2["content-length"] = r.headers["content-length"]
|
|
338
|
+ self.send_headers(r.headers)
|
|
339
|
+ #self.send_headers(headers2)
|
|
340
|
+ CHUNK_SIZE = 4 * 1024
|
|
341
|
+ for chunk in r.iter_content(chunk_size=CHUNK_SIZE):
|
|
342
|
+ try:
|
|
343
|
+ #print "#",
|
|
344
|
+ self.wfile.write(chunk)
|
|
345
|
+ except Exception as e:
|
|
346
|
+ print "Exception: ", str(e)
|
|
347
|
+ return
|
|
348
|
+ if DEBUG: print "File downloaded = "
|
|
349
|
+ if "connection" in r.headers and r.headers["connection"]<>"keep-alive":
|
|
350
|
+ self.wfile.close()
|
|
351
|
+ #time.sleep(1)
|
|
352
|
+ return
|
|
353
|
+
|
|
354
|
+ def send_headers(self,headers):
|
|
355
|
+ #if DEBUG:
|
|
356
|
+ #print "**Return headers: "
|
|
357
|
+ #print_headers(headers)
|
|
358
|
+ for h in headers:
|
|
359
|
+ self.send_header(h, headers[h])
|
|
360
|
+ self.end_headers()
|
|
361
|
+
|
|
362
|
+ def write_error(self,code):
|
|
363
|
+ print "***Error, code=%s" % code
|
|
364
|
+ self.send_response(code)
|
|
365
|
+ #self.send_headers(r.headers)
|
|
366
|
+ self.wfile.close() # TODO?
|
|
367
|
+ # self.fetch_offline()
|
|
368
|
+
|
|
369
|
+ def get_page_ses(self,url,ses,stream=True, headers=None):
|
|
370
|
+ headers= headers if headers else headers0
|
|
371
|
+ ses.headers.update(headers)
|
|
372
|
+ if DEBUG:
|
|
373
|
+ print "\n\n====================================================\n**get_page_ses\n%s"%url
|
|
374
|
+ print "**Server request headers: "
|
|
375
|
+ print_headers(ses.headers)
|
|
376
|
+ r = ses.get(url, stream=stream, verify=False)
|
|
377
|
+ if DEBUG:
|
|
378
|
+ print "**Server response:", r.status_code
|
|
379
|
+ print "**Server response headers: "
|
|
380
|
+ print_headers(r.headers)
|
|
381
|
+ return r
|
|
382
|
+
|
|
383
|
+ def get_page(self,url,headers=None):
|
|
384
|
+ if not headers:
|
|
385
|
+ headers = headers0
|
|
386
|
+ if DEBUG:
|
|
387
|
+ print "\n\n====================================================\n**get_page\n%s"%url
|
|
388
|
+ print "**Server request headers: "
|
|
389
|
+ print_headers(headers)
|
|
390
|
+ r = requests.get(url, headers=headers,stream=True)
|
|
391
|
+ if DEBUG:
|
|
392
|
+ print "**Server response:", r.status_code
|
|
393
|
+ print "**Server response headers: "
|
|
394
|
+ print_headers(r.headers)
|
|
395
|
+ return r
|
|
396
|
+
|
|
397
|
+ def address_string(self):
|
|
398
|
+ host, port = self.client_address[:2]
|
|
399
|
+ #return socket.getfqdn(host)
|
|
400
|
+ return host
|
|
401
|
+
|
|
402
|
+class ThreadedHTTPServer(ThreadingMixIn, HTTPServer):
|
|
403
|
+ """Handle requests in a separate thread."""
|
|
404
|
+
|
|
405
|
+def start(host = HOST_NAME, port = PORT_NUMBER):
|
|
406
|
+ import ContentSources, util
|
|
407
|
+ global sources
|
|
408
|
+ sources = ContentSources.ContentSources(os.path.join(cur_directory, "sources"))
|
|
409
|
+ httpd = ThreadedHTTPServer((host, port), StreamHandler)
|
|
410
|
+ print time.asctime(), "Server Starts - %s:%s" % (HOST_NAME, PORT_NUMBER)
|
|
411
|
+ try:
|
|
412
|
+ httpd.serve_forever()
|
|
413
|
+ except KeyboardInterrupt:
|
|
414
|
+ pass
|
|
415
|
+ httpd.server_close()
|
|
416
|
+ print time.asctime(), "Server Stops - %s:%s" % (HOST_NAME, PORT_NUMBER)
|
|
417
|
+
|
|
418
|
+
|
|
419
|
+class Daemon:
|
|
420
|
+ """
|
|
421
|
+ A generic daemon class.
|
|
422
|
+ Usage: subclass the Daemon class and override the run() method
|
|
423
|
+ """
|
|
424
|
+ def __init__(self, pidfile, stdin="/dev/null", stdout="/dev/null", stderr="/dev/null"):
|
|
425
|
+ self.stdin = stdin
|
|
426
|
+ self.stdout = stdout
|
|
427
|
+ self.stderr = stderr
|
|
428
|
+ self.pidfile = pidfile
|
|
429
|
+
|
|
430
|
+ def daemonize(self):
|
|
431
|
+ """
|
|
432
|
+ do the UNIX double-fork magic, see Stevens' "Advanced
|
|
433
|
+ Programming in the UNIX Environment" for details (ISBN 0201563177)
|
|
434
|
+ http://www.erlenstar.demon.co.uk/unix/faq_2.html#SEC16
|
|
435
|
+ """
|
|
436
|
+ try:
|
|
437
|
+ pid = os.fork()
|
|
438
|
+ if pid > 0:
|
|
439
|
+ # exit first parent
|
|
440
|
+ sys.exit(0)
|
|
441
|
+ except OSError, e:
|
|
442
|
+ sys.stderr.write("fork #1 failed: %d (%s)\n" % (e.errno, e.strerror))
|
|
443
|
+ sys.exit(1)
|
|
444
|
+
|
|
445
|
+ # decouple from parent environment
|
|
446
|
+ os.chdir("/")
|
|
447
|
+ os.setsid()
|
|
448
|
+ os.umask(0)
|
|
449
|
+
|
|
450
|
+ # do second fork
|
|
451
|
+ try:
|
|
452
|
+ pid = os.fork()
|
|
453
|
+ if pid > 0:
|
|
454
|
+ # exit from second parent
|
|
455
|
+ sys.exit(0)
|
|
456
|
+ except OSError, e:
|
|
457
|
+ sys.stderr.write("fork #2 failed: %d (%s)\n" % (e.errno, e.strerror))
|
|
458
|
+ sys.exit(1)
|
|
459
|
+
|
|
460
|
+ # redirect standard file descriptors
|
|
461
|
+ sys.stdout.flush()
|
|
462
|
+ sys.stderr.flush()
|
|
463
|
+ si = file(self.stdin, "r")
|
|
464
|
+ so = file(self.stdout, "a+")
|
|
465
|
+ se = file(self.stderr, "a+", 0)
|
|
466
|
+ os.dup2(si.fileno(), sys.stdin.fileno())
|
|
467
|
+ os.dup2(so.fileno(), sys.stdout.fileno())
|
|
468
|
+ os.dup2(se.fileno(), sys.stderr.fileno())
|
|
469
|
+
|
|
470
|
+ # write pidfile
|
|
471
|
+ atexit.register(self.delpid)
|
|
472
|
+ pid = str(os.getpid())
|
|
473
|
+ file(self.pidfile,"w+").write("%s\n" % pid)
|
|
474
|
+
|
|
475
|
+ def delpid(self):
|
|
476
|
+ os.remove(self.pidfile)
|
|
477
|
+
|
|
478
|
+ def start(self):
|
|
479
|
+ """
|
|
480
|
+ Start the daemon
|
|
481
|
+ """
|
|
482
|
+ # Check for a pidfile to see if the daemon already runs
|
|
483
|
+ try:
|
|
484
|
+ pf = file(self.pidfile,"r")
|
|
485
|
+ pid = int(pf.read().strip())
|
|
486
|
+ pf.close()
|
|
487
|
+ except IOError:
|
|
488
|
+ pid = None
|
|
489
|
+
|
|
490
|
+ if pid:
|
|
491
|
+ message = "pidfile %s already exist. Daemon already running?\n"
|
|
492
|
+ sys.stderr.write(message % self.pidfile)
|
|
493
|
+ sys.exit(1)
|
|
494
|
+
|
|
495
|
+ # Start the daemon
|
|
496
|
+ self.daemonize()
|
|
497
|
+ self.run()
|
|
498
|
+
|
|
499
|
+ def stop(self):
|
|
500
|
+ """
|
|
501
|
+ Stop the daemon
|
|
502
|
+ """
|
|
503
|
+ # Get the pid from the pidfile
|
|
504
|
+ try:
|
|
505
|
+ pf = file(self.pidfile,"r")
|
|
506
|
+ pid = int(pf.read().strip())
|
|
507
|
+ pf.close()
|
|
508
|
+ except IOError:
|
|
509
|
+ pid = None
|
|
510
|
+
|
|
511
|
+ if not pid:
|
|
512
|
+ message = "pidfile %s does not exist. Daemon not running?\n"
|
|
513
|
+ sys.stderr.write(message % self.pidfile)
|
|
514
|
+ return # not an error in a restart
|
|
515
|
+
|
|
516
|
+ # Try killing the daemon process
|
|
517
|
+ try:
|
|
518
|
+ while 1:
|
|
519
|
+ os.kill(pid, SIGTERM)
|
|
520
|
+ time.sleep(0.1)
|
|
521
|
+ except OSError, err:
|
|
522
|
+ err = str(err)
|
|
523
|
+ if err.find("No such process") > 0:
|
|
524
|
+ if os.path.exists(self.pidfile):
|
|
525
|
+ os.remove(self.pidfile)
|
|
526
|
+ else:
|
|
527
|
+ print str(err)
|
|
528
|
+ sys.exit(1)
|
|
529
|
+
|
|
530
|
+ def restart(self):
|
|
531
|
+ """
|
|
532
|
+ Restart the daemon
|
|
533
|
+ """
|
|
534
|
+ self.stop()
|
|
535
|
+ self.start()
|
|
536
|
+
|
|
537
|
+ def run(self):
|
|
538
|
+ """
|
|
539
|
+ You should override this method when you subclass Daemon. It will be called after the process has been
|
|
540
|
+ daemonized by start() or restart().
|
|
541
|
+ """
|
|
542
|
+
|
|
543
|
+class ProxyDaemon(Daemon):
|
|
544
|
+ def run(self):
|
|
545
|
+ start()
|
|
546
|
+
|
|
547
|
+def print_headers(headers):
|
|
548
|
+ for h in headers:
|
|
549
|
+ print "%s: %s"%(h,headers[h])
|
|
550
|
+
|
|
551
|
+def del_headers(headers0,tags):
|
|
552
|
+ headers = headers0.copy()
|
|
553
|
+ for t in tags:
|
|
554
|
+ if t in headers:
|
|
555
|
+ del headers[t]
|
|
556
|
+ if t.lower() in headers:
|
|
557
|
+ del headers[t.lower()]
|
|
558
|
+ return headers
|
|
559
|
+
|
|
560
|
+def hls_base(url):
|
|
561
|
+ url2 = url.split("?")[0]
|
|
562
|
+ url2 = "/".join(url2.split("/")[0:-1])+ "/"
|
|
563
|
+ return url2
|
|
564
|
+
|
|
565
|
+if __name__ == "__main__":
|
|
566
|
+ daemon = ProxyDaemon("/var/run/playstreamproxy.pid")
|
|
567
|
+ if len(sys.argv) == 2:
|
|
568
|
+ if "start" == sys.argv[1]:
|
|
569
|
+ daemon.start()
|
|
570
|
+ elif "stop" == sys.argv[1]:
|
|
571
|
+ daemon.stop()
|
|
572
|
+ elif "restart" == sys.argv[1]:
|
|
573
|
+ daemon.restart()
|
|
574
|
+ elif "manualstart" == sys.argv[1]:
|
|
575
|
+ start()
|
|
576
|
+ else:
|
|
577
|
+ print "Unknown command"
|
|
578
|
+ sys.exit(2)
|
|
579
|
+ sys.exit(0)
|
|
580
|
+ else:
|
|
581
|
+ print "usage: %s start|stop|restart|manualstart" % sys.argv[0]
|
|
582
|
+ sys.exit(2)
|
|
583
|
+
|