Plex plugin to to play various online streams (mostly Latvian).

adapters.py 19KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503
  1. # -*- coding: utf-8 -*-
  2. """
  3. requests.adapters
  4. ~~~~~~~~~~~~~~~~~
  5. This module contains the transport adapters that Requests uses to define
  6. and maintain connections.
  7. """
  8. import os.path
  9. import socket
  10. from .models import Response
  11. from .packages.urllib3.poolmanager import PoolManager, proxy_from_url
  12. from .packages.urllib3.response import HTTPResponse
  13. from .packages.urllib3.util import Timeout as TimeoutSauce
  14. from .packages.urllib3.util.retry import Retry
  15. from .compat import urlparse, basestring
  16. from .utils import (DEFAULT_CA_BUNDLE_PATH, get_encoding_from_headers,
  17. prepend_scheme_if_needed, get_auth_from_url, urldefragauth,
  18. select_proxy, to_native_string)
  19. from .structures import CaseInsensitiveDict
  20. from .packages.urllib3.exceptions import ClosedPoolError
  21. from .packages.urllib3.exceptions import ConnectTimeoutError
  22. from .packages.urllib3.exceptions import HTTPError as _HTTPError
  23. from .packages.urllib3.exceptions import MaxRetryError
  24. from .packages.urllib3.exceptions import NewConnectionError
  25. from .packages.urllib3.exceptions import ProxyError as _ProxyError
  26. from .packages.urllib3.exceptions import ProtocolError
  27. from .packages.urllib3.exceptions import ReadTimeoutError
  28. from .packages.urllib3.exceptions import SSLError as _SSLError
  29. from .packages.urllib3.exceptions import ResponseError
  30. from .cookies import extract_cookies_to_jar
  31. from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError,
  32. ProxyError, RetryError, InvalidSchema)
  33. from .auth import _basic_auth_str
  34. try:
  35. from .packages.urllib3.contrib.socks import SOCKSProxyManager
  36. except ImportError:
  37. def SOCKSProxyManager(*args, **kwargs):
  38. raise InvalidSchema("Missing dependencies for SOCKS support.")
  39. DEFAULT_POOLBLOCK = False
  40. DEFAULT_POOLSIZE = 10
  41. DEFAULT_RETRIES = 0
  42. DEFAULT_POOL_TIMEOUT = None
  43. class BaseAdapter(object):
  44. """The Base Transport Adapter"""
  45. def __init__(self):
  46. super(BaseAdapter, self).__init__()
  47. def send(self, request, stream=False, timeout=None, verify=True,
  48. cert=None, proxies=None):
  49. """Sends PreparedRequest object. Returns Response object.
  50. :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
  51. :param stream: (optional) Whether to stream the request content.
  52. :param timeout: (optional) How long to wait for the server to send
  53. data before giving up, as a float, or a :ref:`(connect timeout,
  54. read timeout) <timeouts>` tuple.
  55. :type timeout: float or tuple
  56. :param verify: (optional) Whether to verify SSL certificates.
  57. :param cert: (optional) Any user-provided SSL certificate to be trusted.
  58. :param proxies: (optional) The proxies dictionary to apply to the request.
  59. """
  60. raise NotImplementedError
  61. def close(self):
  62. """Cleans up adapter specific items."""
  63. raise NotImplementedError
  64. class HTTPAdapter(BaseAdapter):
  65. """The built-in HTTP Adapter for urllib3.
  66. Provides a general-case interface for Requests sessions to contact HTTP and
  67. HTTPS urls by implementing the Transport Adapter interface. This class will
  68. usually be created by the :class:`Session <Session>` class under the
  69. covers.
  70. :param pool_connections: The number of urllib3 connection pools to cache.
  71. :param pool_maxsize: The maximum number of connections to save in the pool.
  72. :param max_retries: The maximum number of retries each connection
  73. should attempt. Note, this applies only to failed DNS lookups, socket
  74. connections and connection timeouts, never to requests where data has
  75. made it to the server. By default, Requests does not retry failed
  76. connections. If you need granular control over the conditions under
  77. which we retry a request, import urllib3's ``Retry`` class and pass
  78. that instead.
  79. :param pool_block: Whether the connection pool should block for connections.
  80. Usage::
  81. >>> import requests
  82. >>> s = requests.Session()
  83. >>> a = requests.adapters.HTTPAdapter(max_retries=3)
  84. >>> s.mount('http://', a)
  85. """
  86. __attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize',
  87. '_pool_block']
  88. def __init__(self, pool_connections=DEFAULT_POOLSIZE,
  89. pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES,
  90. pool_block=DEFAULT_POOLBLOCK):
  91. if max_retries == DEFAULT_RETRIES:
  92. self.max_retries = Retry(0, read=False)
  93. else:
  94. self.max_retries = Retry.from_int(max_retries)
  95. self.config = {}
  96. self.proxy_manager = {}
  97. super(HTTPAdapter, self).__init__()
  98. self._pool_connections = pool_connections
  99. self._pool_maxsize = pool_maxsize
  100. self._pool_block = pool_block
  101. self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block)
  102. def __getstate__(self):
  103. return dict((attr, getattr(self, attr, None)) for attr in
  104. self.__attrs__)
  105. def __setstate__(self, state):
  106. # Can't handle by adding 'proxy_manager' to self.__attrs__ because
  107. # self.poolmanager uses a lambda function, which isn't pickleable.
  108. self.proxy_manager = {}
  109. self.config = {}
  110. for attr, value in state.items():
  111. setattr(self, attr, value)
  112. self.init_poolmanager(self._pool_connections, self._pool_maxsize,
  113. block=self._pool_block)
  114. def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs):
  115. """Initializes a urllib3 PoolManager.
  116. This method should not be called from user code, and is only
  117. exposed for use when subclassing the
  118. :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
  119. :param connections: The number of urllib3 connection pools to cache.
  120. :param maxsize: The maximum number of connections to save in the pool.
  121. :param block: Block when no free connections are available.
  122. :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.
  123. """
  124. # save these values for pickling
  125. self._pool_connections = connections
  126. self._pool_maxsize = maxsize
  127. self._pool_block = block
  128. self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize,
  129. block=block, strict=True, **pool_kwargs)
  130. def proxy_manager_for(self, proxy, **proxy_kwargs):
  131. """Return urllib3 ProxyManager for the given proxy.
  132. This method should not be called from user code, and is only
  133. exposed for use when subclassing the
  134. :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
  135. :param proxy: The proxy to return a urllib3 ProxyManager for.
  136. :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager.
  137. :returns: ProxyManager
  138. :rtype: requests.packages.urllib3.ProxyManager
  139. """
  140. if proxy in self.proxy_manager:
  141. manager = self.proxy_manager[proxy]
  142. elif proxy.lower().startswith('socks'):
  143. username, password = get_auth_from_url(proxy)
  144. manager = self.proxy_manager[proxy] = SOCKSProxyManager(
  145. proxy,
  146. username=username,
  147. password=password,
  148. num_pools=self._pool_connections,
  149. maxsize=self._pool_maxsize,
  150. block=self._pool_block,
  151. **proxy_kwargs
  152. )
  153. else:
  154. proxy_headers = self.proxy_headers(proxy)
  155. manager = self.proxy_manager[proxy] = proxy_from_url(
  156. proxy,
  157. proxy_headers=proxy_headers,
  158. num_pools=self._pool_connections,
  159. maxsize=self._pool_maxsize,
  160. block=self._pool_block,
  161. **proxy_kwargs)
  162. return manager
  163. def cert_verify(self, conn, url, verify, cert):
  164. """Verify a SSL certificate. This method should not be called from user
  165. code, and is only exposed for use when subclassing the
  166. :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
  167. :param conn: The urllib3 connection object associated with the cert.
  168. :param url: The requested URL.
  169. :param verify: Whether we should actually verify the certificate.
  170. :param cert: The SSL certificate to verify.
  171. """
  172. if url.lower().startswith('https') and verify:
  173. cert_loc = None
  174. # Allow self-specified cert location.
  175. if verify is not True:
  176. cert_loc = verify
  177. if not cert_loc:
  178. cert_loc = DEFAULT_CA_BUNDLE_PATH
  179. if not cert_loc:
  180. raise Exception("Could not find a suitable SSL CA certificate bundle.")
  181. conn.cert_reqs = 'CERT_REQUIRED'
  182. if not os.path.isdir(cert_loc):
  183. conn.ca_certs = cert_loc
  184. else:
  185. conn.ca_cert_dir = cert_loc
  186. else:
  187. conn.cert_reqs = 'CERT_NONE'
  188. conn.ca_certs = None
  189. conn.ca_cert_dir = None
  190. if cert:
  191. if not isinstance(cert, basestring):
  192. conn.cert_file = cert[0]
  193. conn.key_file = cert[1]
  194. else:
  195. conn.cert_file = cert
  196. def build_response(self, req, resp):
  197. """Builds a :class:`Response <requests.Response>` object from a urllib3
  198. response. This should not be called from user code, and is only exposed
  199. for use when subclassing the
  200. :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`
  201. :param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response.
  202. :param resp: The urllib3 response object.
  203. :rtype: requests.Response
  204. """
  205. response = Response()
  206. # Fallback to None if there's no status_code, for whatever reason.
  207. response.status_code = getattr(resp, 'status', None)
  208. # Make headers case-insensitive.
  209. response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {}))
  210. # Set encoding.
  211. response.encoding = get_encoding_from_headers(response.headers)
  212. response.raw = resp
  213. response.reason = response.raw.reason
  214. if isinstance(req.url, bytes):
  215. response.url = req.url.decode('utf-8')
  216. else:
  217. response.url = req.url
  218. # Add new cookies from the server.
  219. extract_cookies_to_jar(response.cookies, req, resp)
  220. # Give the Response some context.
  221. response.request = req
  222. response.connection = self
  223. return response
  224. def get_connection(self, url, proxies=None):
  225. """Returns a urllib3 connection for the given URL. This should not be
  226. called from user code, and is only exposed for use when subclassing the
  227. :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
  228. :param url: The URL to connect to.
  229. :param proxies: (optional) A Requests-style dictionary of proxies used on this request.
  230. :rtype: requests.packages.urllib3.ConnectionPool
  231. """
  232. proxy = select_proxy(url, proxies)
  233. if proxy:
  234. proxy = prepend_scheme_if_needed(proxy, 'http')
  235. proxy_manager = self.proxy_manager_for(proxy)
  236. conn = proxy_manager.connection_from_url(url)
  237. else:
  238. # Only scheme should be lower case
  239. parsed = urlparse(url)
  240. url = parsed.geturl()
  241. conn = self.poolmanager.connection_from_url(url)
  242. return conn
  243. def close(self):
  244. """Disposes of any internal state.
  245. Currently, this closes the PoolManager and any active ProxyManager,
  246. which closes any pooled connections.
  247. """
  248. self.poolmanager.clear()
  249. for proxy in self.proxy_manager.values():
  250. proxy.clear()
  251. def request_url(self, request, proxies):
  252. """Obtain the url to use when making the final request.
  253. If the message is being sent through a HTTP proxy, the full URL has to
  254. be used. Otherwise, we should only use the path portion of the URL.
  255. This should not be called from user code, and is only exposed for use
  256. when subclassing the
  257. :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
  258. :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
  259. :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs.
  260. :rtype: str
  261. """
  262. proxy = select_proxy(request.url, proxies)
  263. scheme = urlparse(request.url).scheme
  264. is_proxied_http_request = (proxy and scheme != 'https')
  265. using_socks_proxy = False
  266. if proxy:
  267. proxy_scheme = urlparse(proxy).scheme.lower()
  268. using_socks_proxy = proxy_scheme.startswith('socks')
  269. url = request.path_url
  270. if is_proxied_http_request and not using_socks_proxy:
  271. url = urldefragauth(request.url)
  272. return url
  273. def add_headers(self, request, **kwargs):
  274. """Add any headers needed by the connection. As of v2.0 this does
  275. nothing by default, but is left for overriding by users that subclass
  276. the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
  277. This should not be called from user code, and is only exposed for use
  278. when subclassing the
  279. :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
  280. :param request: The :class:`PreparedRequest <PreparedRequest>` to add headers to.
  281. :param kwargs: The keyword arguments from the call to send().
  282. """
  283. pass
  284. def proxy_headers(self, proxy):
  285. """Returns a dictionary of the headers to add to any request sent
  286. through a proxy. This works with urllib3 magic to ensure that they are
  287. correctly sent to the proxy, rather than in a tunnelled request if
  288. CONNECT is being used.
  289. This should not be called from user code, and is only exposed for use
  290. when subclassing the
  291. :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
  292. :param proxies: The url of the proxy being used for this request.
  293. :rtype: dict
  294. """
  295. headers = {}
  296. username, password = get_auth_from_url(proxy)
  297. if username and password:
  298. headers['Proxy-Authorization'] = _basic_auth_str(username,
  299. password)
  300. return headers
  301. def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):
  302. """Sends PreparedRequest object. Returns Response object.
  303. :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
  304. :param stream: (optional) Whether to stream the request content.
  305. :param timeout: (optional) How long to wait for the server to send
  306. data before giving up, as a float, or a :ref:`(connect timeout,
  307. read timeout) <timeouts>` tuple.
  308. :type timeout: float or tuple
  309. :param verify: (optional) Whether to verify SSL certificates.
  310. :param cert: (optional) Any user-provided SSL certificate to be trusted.
  311. :param proxies: (optional) The proxies dictionary to apply to the request.
  312. :rtype: requests.Response
  313. """
  314. conn = self.get_connection(request.url, proxies)
  315. self.cert_verify(conn, request.url, verify, cert)
  316. url = self.request_url(request, proxies)
  317. self.add_headers(request)
  318. chunked = not (request.body is None or 'Content-Length' in request.headers)
  319. if isinstance(timeout, tuple):
  320. try:
  321. connect, read = timeout
  322. timeout = TimeoutSauce(connect=connect, read=read)
  323. except ValueError as e:
  324. # this may raise a string formatting error.
  325. err = ("Invalid timeout {0}. Pass a (connect, read) "
  326. "timeout tuple, or a single float to set "
  327. "both timeouts to the same value".format(timeout))
  328. raise ValueError(err)
  329. else:
  330. timeout = TimeoutSauce(connect=timeout, read=timeout)
  331. try:
  332. if not chunked:
  333. resp = conn.urlopen(
  334. method=request.method,
  335. url=url,
  336. body=request.body,
  337. headers=request.headers,
  338. redirect=False,
  339. assert_same_host=False,
  340. preload_content=False,
  341. decode_content=False,
  342. retries=self.max_retries,
  343. timeout=timeout
  344. )
  345. # Send the request.
  346. else:
  347. if hasattr(conn, 'proxy_pool'):
  348. conn = conn.proxy_pool
  349. low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT)
  350. try:
  351. low_conn.putrequest(request.method,
  352. url,
  353. skip_accept_encoding=True)
  354. for header, value in request.headers.items():
  355. low_conn.putheader(header, value)
  356. low_conn.endheaders()
  357. for i in request.body:
  358. low_conn.send(hex(len(i))[2:].encode('utf-8'))
  359. low_conn.send(b'\r\n')
  360. low_conn.send(i)
  361. low_conn.send(b'\r\n')
  362. low_conn.send(b'0\r\n\r\n')
  363. # Receive the response from the server
  364. try:
  365. # For Python 2.7+ versions, use buffering of HTTP
  366. # responses
  367. r = low_conn.getresponse(buffering=True)
  368. except TypeError:
  369. # For compatibility with Python 2.6 versions and back
  370. r = low_conn.getresponse()
  371. resp = HTTPResponse.from_httplib(
  372. r,
  373. pool=conn,
  374. connection=low_conn,
  375. preload_content=False,
  376. decode_content=False
  377. )
  378. except:
  379. # If we hit any problems here, clean up the connection.
  380. # Then, reraise so that we can handle the actual exception.
  381. low_conn.close()
  382. raise
  383. except (ProtocolError, socket.error) as err:
  384. raise ConnectionError(err, request=request)
  385. except MaxRetryError as e:
  386. if isinstance(e.reason, ConnectTimeoutError):
  387. # TODO: Remove this in 3.0.0: see #2811
  388. if not isinstance(e.reason, NewConnectionError):
  389. raise ConnectTimeout(e, request=request)
  390. if isinstance(e.reason, ResponseError):
  391. raise RetryError(e, request=request)
  392. if isinstance(e.reason, _ProxyError):
  393. raise ProxyError(e, request=request)
  394. raise ConnectionError(e, request=request)
  395. except ClosedPoolError as e:
  396. raise ConnectionError(e, request=request)
  397. except _ProxyError as e:
  398. raise ProxyError(e)
  399. except (_SSLError, _HTTPError) as e:
  400. if isinstance(e, _SSLError):
  401. raise SSLError(e, request=request)
  402. elif isinstance(e, ReadTimeoutError):
  403. raise ReadTimeout(e, request=request)
  404. else:
  405. raise
  406. return self.build_response(request, resp)