Browse Source

salabots lmt.lv u.c.

Ivars 5 years ago
parent
commit
b1add8217b
14 changed files with 968 additions and 5058 deletions
  1. 3
    0
      changelog.md
  2. 0
    36
      diskcache/__init__.py
  3. 0
    1
      diskcache/cli.py
  4. 0
    1947
      diskcache/core.py
  5. 0
    595
      diskcache/fanout.py
  6. 0
    105
      diskcache/memo.py
  7. 0
    1348
      diskcache/persistent.py
  8. 0
    78
      diskcache/stampede.py
  9. 7
    2
      ltcproxy.py
  10. BIN
      picons/shortcut2.png
  11. 4
    0
      playstreamproxy.py
  12. 928
    919
      project.wpr
  13. 26
    4
      sources/lmt.py
  14. 0
    23
      test_url.py

+ 3
- 0
changelog.md View File

@@ -1,3 +1,6 @@
1
+**06.10.2019**
2
+- salabots lmt.lv (facebook video)
3
+
1 4
 **08.07.2019**
2 5
 - izmaiņas GITā (autonoms projekts)
3 6
 

+ 0
- 36
diskcache/__init__.py View File

@@ -1,36 +0,0 @@
1
-"DiskCache: disk and file backed cache."
2
-
3
-from .core import Cache, Disk, UnknownFileWarning, EmptyDirWarning, Timeout
4
-from .core import DEFAULT_SETTINGS, ENOVAL, EVICTION_POLICY, UNKNOWN
5
-from .fanout import FanoutCache
6
-from .persistent import Deque, Index
7
-
8
-__all__ = [
9
-    'Cache',
10
-    'Disk',
11
-    'UnknownFileWarning',
12
-    'EmptyDirWarning',
13
-    'Timeout',
14
-    'DEFAULT_SETTINGS',
15
-    'ENOVAL',
16
-    'EVICTION_POLICY',
17
-    'UNKNOWN',
18
-    'FanoutCache',
19
-    'Deque',
20
-    'Index',
21
-]
22
-
23
-try:
24
-    from .djangocache import DjangoCache  # pylint: disable=wrong-import-position
25
-    __all__.append('DjangoCache')
26
-except Exception:  # pylint: disable=broad-except
27
-    # Django not installed or not setup so ignore.
28
-    pass
29
-
30
-
31
-__title__ = 'diskcache'
32
-__version__ = '3.1.1'
33
-__build__ = 0x030101
34
-__author__ = 'Grant Jenks'
35
-__license__ = 'Apache 2.0'
36
-__copyright__ = 'Copyright 2016-2018 Grant Jenks'

+ 0
- 1
diskcache/cli.py View File

@@ -1 +0,0 @@
1
-"Command line interface to disk cache."

+ 0
- 1947
diskcache/core.py
File diff suppressed because it is too large
View File


+ 0
- 595
diskcache/fanout.py View File

@@ -1,595 +0,0 @@
1
-"Fanout cache automatically shards keys and values."
2
-
3
-import itertools as it
4
-import os.path as op
5
-import sqlite3
6
-import time
7
-
8
-from .core import ENOVAL, DEFAULT_SETTINGS, Cache, Disk, Timeout
9
-from .memo import memoize
10
-from .persistent import Deque, Index
11
-
12
-
13
-class FanoutCache(object):
14
-    "Cache that shards keys and values."
15
-    def __init__(self, directory, shards=8, timeout=0.010, disk=Disk,
16
-                 **settings):
17
-        """Initialize cache instance.
18
-
19
-        :param str directory: cache directory
20
-        :param int shards: number of shards to distribute writes
21
-        :param float timeout: SQLite connection timeout
22
-        :param disk: `Disk` instance for serialization
23
-        :param settings: any of `DEFAULT_SETTINGS`
24
-
25
-        """
26
-        self._directory = directory
27
-        self._count = shards
28
-        default_size_limit = DEFAULT_SETTINGS['size_limit']
29
-        size_limit = settings.pop('size_limit', default_size_limit) / shards
30
-        self._shards = tuple(
31
-            Cache(
32
-                op.join(directory, '%03d' % num),
33
-                timeout=timeout,
34
-                disk=disk,
35
-                size_limit=size_limit,
36
-                **settings
37
-            )
38
-            for num in range(shards)
39
-        )
40
-        self._hash = self._shards[0].disk.hash
41
-        self._deques = {}
42
-        self._indexes = {}
43
-
44
-
45
-    @property
46
-    def directory(self):
47
-        """Cache directory."""
48
-        return self._directory
49
-
50
-
51
-    def __getattr__(self, name):
52
-        return getattr(self._shards[0], name)
53
-
54
-
55
-    def set(self, key, value, expire=None, read=False, tag=None, retry=False):
56
-        """Set `key` and `value` item in cache.
57
-
58
-        When `read` is `True`, `value` should be a file-like object opened
59
-        for reading in binary mode.
60
-
61
-        If database timeout occurs then fails silently unless `retry` is set to
62
-        `True` (default `False`).
63
-
64
-        :param key: key for item
65
-        :param value: value for item
66
-        :param float expire: seconds until the key expires
67
-            (default None, no expiry)
68
-        :param bool read: read value as raw bytes from file (default False)
69
-        :param str tag: text to associate with key (default None)
70
-        :param bool retry: retry if database timeout expires (default False)
71
-        :return: True if item was set
72
-
73
-        """
74
-        index = self._hash(key) % self._count
75
-        set_func = self._shards[index].set
76
-
77
-        while True:
78
-            try:
79
-                return set_func(key, value, expire, read, tag)
80
-            except Timeout:
81
-                if retry:
82
-                    continue
83
-                else:
84
-                    return False
85
-
86
-
87
-    def __setitem__(self, key, value):
88
-        """Set `key` and `value` item in cache.
89
-
90
-        Calls :func:`FanoutCache.set` internally with `retry` set to `True`.
91
-
92
-        :param key: key for item
93
-        :param value: value for item
94
-
95
-        """
96
-        self.set(key, value, retry=True)
97
-
98
-
99
-    def add(self, key, value, expire=None, read=False, tag=None, retry=False):
100
-        """Add `key` and `value` item to cache.
101
-
102
-        Similar to `set`, but only add to cache if key not present.
103
-
104
-        This operation is atomic. Only one concurrent add operation for given
105
-        key from separate threads or processes will succeed.
106
-
107
-        When `read` is `True`, `value` should be a file-like object opened
108
-        for reading in binary mode.
109
-
110
-        :param key: key for item
111
-        :param value: value for item
112
-        :param float expire: seconds until the key expires
113
-            (default None, no expiry)
114
-        :param bool read: read value as bytes from file (default False)
115
-        :param str tag: text to associate with key (default None)
116
-        :param bool retry: retry if database timeout expires (default False)
117
-        :return: True if item was added
118
-
119
-        """
120
-        index = self._hash(key) % self._count
121
-        add_func = self._shards[index].add
122
-
123
-        while True:
124
-            try:
125
-                return add_func(key, value, expire, read, tag)
126
-            except Timeout:
127
-                if retry:
128
-                    continue
129
-                else:
130
-                    return False
131
-
132
-
133
-    def incr(self, key, delta=1, default=0, retry=False):
134
-        """Increment value by delta for item with key.
135
-
136
-        If key is missing and default is None then raise KeyError. Else if key
137
-        is missing and default is not None then use default for value.
138
-
139
-        Operation is atomic. All concurrent increment operations will be
140
-        counted individually.
141
-
142
-        Assumes value may be stored in a SQLite column. Most builds that target
143
-        machines with 64-bit pointer widths will support 64-bit signed
144
-        integers.
145
-
146
-        :param key: key for item
147
-        :param int delta: amount to increment (default 1)
148
-        :param int default: value if key is missing (default 0)
149
-        :param bool retry: retry if database timeout expires (default False)
150
-        :return: new value for item on success else None
151
-        :raises KeyError: if key is not found and default is None
152
-
153
-        """
154
-        index = self._hash(key) % self._count
155
-        incr_func = self._shards[index].incr
156
-
157
-        while True:
158
-            try:
159
-                return incr_func(key, delta, default)
160
-            except Timeout:
161
-                if retry:
162
-                    continue
163
-                else:
164
-                    return None
165
-
166
-
167
-    def decr(self, key, delta=1, default=0, retry=False):
168
-        """Decrement value by delta for item with key.
169
-
170
-        If key is missing and default is None then raise KeyError. Else if key
171
-        is missing and default is not None then use default for value.
172
-
173
-        Operation is atomic. All concurrent decrement operations will be
174
-        counted individually.
175
-
176
-        Unlike Memcached, negative values are supported. Value may be
177
-        decremented below zero.
178
-
179
-        Assumes value may be stored in a SQLite column. Most builds that target
180
-        machines with 64-bit pointer widths will support 64-bit signed
181
-        integers.
182
-
183
-        :param key: key for item
184
-        :param int delta: amount to decrement (default 1)
185
-        :param int default: value if key is missing (default 0)
186
-        :param bool retry: retry if database timeout expires (default False)
187
-        :return: new value for item on success else None
188
-        :raises KeyError: if key is not found and default is None
189
-
190
-        """
191
-        return self.incr(key, -delta, default, retry)
192
-
193
-
194
-    def get(self, key, default=None, read=False, expire_time=False, tag=False,
195
-            retry=False):
196
-        """Retrieve value from cache. If `key` is missing, return `default`.
197
-
198
-        If database timeout occurs then returns `default` unless `retry` is set
199
-        to `True` (default `False`).
200
-
201
-        :param key: key for item
202
-        :param default: return value if key is missing (default None)
203
-        :param bool read: if True, return file handle to value
204
-            (default False)
205
-        :param float expire_time: if True, return expire_time in tuple
206
-            (default False)
207
-        :param tag: if True, return tag in tuple (default False)
208
-        :param bool retry: retry if database timeout expires (default False)
209
-        :return: value for item if key is found else default
210
-
211
-        """
212
-        index = self._hash(key) % self._count
213
-        get_func = self._shards[index].get
214
-
215
-        while True:
216
-            try:
217
-                return get_func(
218
-                    key, default=default, read=read, expire_time=expire_time,
219
-                    tag=tag,
220
-                )
221
-            except (Timeout, sqlite3.OperationalError):
222
-                if retry:
223
-                    continue
224
-                else:
225
-                    return default
226
-
227
-
228
-    def __getitem__(self, key):
229
-        """Return corresponding value for `key` from cache.
230
-
231
-        Calls :func:`FanoutCache.get` internally with `retry` set to `True`.
232
-
233
-        :param key: key for item
234
-        :return: value for item
235
-        :raises KeyError: if key is not found
236
-
237
-        """
238
-        value = self.get(key, default=ENOVAL, retry=True)
239
-
240
-        if value is ENOVAL:
241
-            raise KeyError(key)
242
-
243
-        return value
244
-
245
-
246
-    def read(self, key):
247
-        """Return file handle corresponding to `key` from cache.
248
-
249
-        :param key: key for item
250
-        :return: file open for reading in binary mode
251
-        :raises KeyError: if key is not found
252
-
253
-        """
254
-        handle = self.get(key, default=ENOVAL, read=True, retry=True)
255
-        if handle is ENOVAL:
256
-            raise KeyError(key)
257
-        return handle
258
-
259
-
260
-    def __contains__(self, key):
261
-        """Return `True` if `key` matching item is found in cache.
262
-
263
-        :param key: key for item
264
-        :return: True if key is found
265
-
266
-        """
267
-        index = self._hash(key) % self._count
268
-        return key in self._shards[index]
269
-
270
-
271
-    def pop(self, key, default=None, expire_time=False, tag=False,
272
-            retry=False):
273
-        """Remove corresponding item for `key` from cache and return value.
274
-
275
-        If `key` is missing, return `default`.
276
-
277
-        Operation is atomic. Concurrent operations will be serialized.
278
-
279
-        :param key: key for item
280
-        :param default: return value if key is missing (default None)
281
-        :param float expire_time: if True, return expire_time in tuple
282
-            (default False)
283
-        :param tag: if True, return tag in tuple (default False)
284
-        :param bool retry: retry if database timeout expires (default False)
285
-        :return: value for item if key is found else default
286
-
287
-        """
288
-        index = self._hash(key) % self._count
289
-        pop_func = self._shards[index].pop
290
-
291
-        while True:
292
-            try:
293
-                return pop_func(
294
-                    key, default=default, expire_time=expire_time, tag=tag,
295
-                )
296
-            except Timeout:
297
-                if retry:
298
-                    continue
299
-                else:
300
-                    return default
301
-
302
-
303
-    def delete(self, key, retry=False):
304
-        """Delete corresponding item for `key` from cache.
305
-
306
-        Missing keys are ignored.
307
-
308
-        If database timeout occurs then fails silently unless `retry` is set to
309
-        `True` (default `False`).
310
-
311
-        :param key: key for item
312
-        :param bool retry: retry if database timeout expires (default False)
313
-        :return: True if item was deleted
314
-
315
-        """
316
-        index = self._hash(key) % self._count
317
-        del_func = self._shards[index].__delitem__
318
-
319
-        while True:
320
-            try:
321
-                return del_func(key)
322
-            except Timeout:
323
-                if retry:
324
-                    continue
325
-                else:
326
-                    return False
327
-            except KeyError:
328
-                return False
329
-
330
-
331
-    def __delitem__(self, key):
332
-        """Delete corresponding item for `key` from cache.
333
-
334
-        Calls :func:`FanoutCache.delete` internally with `retry` set to `True`.
335
-
336
-        :param key: key for item
337
-        :raises KeyError: if key is not found
338
-
339
-        """
340
-        deleted = self.delete(key, retry=True)
341
-
342
-        if not deleted:
343
-            raise KeyError(key)
344
-
345
-
346
-    memoize = memoize
347
-
348
-
349
-    def check(self, fix=False):
350
-        """Check database and file system consistency.
351
-
352
-        Intended for use in testing and post-mortem error analysis.
353
-
354
-        While checking the cache table for consistency, a writer lock is held
355
-        on the database. The lock blocks other cache clients from writing to
356
-        the database. For caches with many file references, the lock may be
357
-        held for a long time. For example, local benchmarking shows that a
358
-        cache with 1,000 file references takes ~60ms to check.
359
-
360
-        :param bool fix: correct inconsistencies
361
-        :return: list of warnings
362
-        :raises Timeout: if database timeout expires
363
-
364
-        """
365
-        return sum((shard.check(fix=fix) for shard in self._shards), [])
366
-
367
-
368
-    def expire(self):
369
-        """Remove expired items from cache.
370
-
371
-        :return: count of items removed
372
-
373
-        """
374
-        return self._remove('expire', args=(time.time(),))
375
-
376
-
377
-    def create_tag_index(self):
378
-        """Create tag index on cache database.
379
-
380
-        It is better to initialize cache with `tag_index=True` than use this.
381
-
382
-        :raises Timeout: if database timeout expires
383
-
384
-        """
385
-        for shard in self._shards:
386
-            shard.create_tag_index()
387
-
388
-
389
-    def drop_tag_index(self):
390
-        """Drop tag index on cache database.
391
-
392
-        :raises Timeout: if database timeout expires
393
-
394
-        """
395
-        for shard in self._shards:
396
-            shard.drop_tag_index()
397
-
398
-
399
-    def evict(self, tag):
400
-        """Remove items with matching `tag` from cache.
401
-
402
-        :param str tag: tag identifying items
403
-        :return: count of items removed
404
-
405
-        """
406
-        return self._remove('evict', args=(tag,))
407
-
408
-
409
-    def cull(self):
410
-        """Cull items from cache until volume is less than size limit.
411
-
412
-        :return: count of items removed
413
-
414
-        """
415
-        return self._remove('cull')
416
-
417
-
418
-    def clear(self):
419
-        """Remove all items from cache.
420
-
421
-        :return: count of items removed
422
-
423
-        """
424
-        return self._remove('clear')
425
-
426
-
427
-    def _remove(self, name, args=()):
428
-        total = 0
429
-        for shard in self._shards:
430
-            method = getattr(shard, name)
431
-            while True:
432
-                try:
433
-                    count = method(*args)
434
-                    total += count
435
-                except Timeout as timeout:
436
-                    total += timeout.args[0]
437
-                else:
438
-                    break
439
-        return total
440
-
441
-
442
-    def stats(self, enable=True, reset=False):
443
-        """Return cache statistics hits and misses.
444
-
445
-        :param bool enable: enable collecting statistics (default True)
446
-        :param bool reset: reset hits and misses to 0 (default False)
447
-        :return: (hits, misses)
448
-
449
-        """
450
-        results = [shard.stats(enable, reset) for shard in self._shards]
451
-        return (sum(result[0] for result in results),
452
-                sum(result[1] for result in results))
453
-
454
-
455
-    def volume(self):
456
-        """Return estimated total size of cache on disk.
457
-
458
-        :return: size in bytes
459
-
460
-        """
461
-        return sum(shard.volume() for shard in self._shards)
462
-
463
-
464
-    def close(self):
465
-        "Close database connection."
466
-        for shard in self._shards:
467
-            shard.close()
468
-        self._deques.clear()
469
-        self._indexes.clear()
470
-
471
-
472
-    def __enter__(self):
473
-        return self
474
-
475
-
476
-    def __exit__(self, *exception):
477
-        self.close()
478
-
479
-
480
-    def __getstate__(self):
481
-        return (self._directory, self._count, self.timeout, type(self.disk))
482
-
483
-
484
-    def __setstate__(self, state):
485
-        self.__init__(*state)
486
-
487
-
488
-    def __iter__(self):
489
-        "Iterate keys in cache including expired items."
490
-        iterators = [iter(shard) for shard in self._shards]
491
-        return it.chain.from_iterable(iterators)
492
-
493
-
494
-    def __reversed__(self):
495
-        "Reverse iterate keys in cache including expired items."
496
-        iterators = [reversed(shard) for shard in self._shards]
497
-        return it.chain.from_iterable(reversed(iterators))
498
-
499
-
500
-    def __len__(self):
501
-        "Count of items in cache including expired items."
502
-        return sum(len(shard) for shard in self._shards)
503
-
504
-
505
-    def reset(self, key, value=ENOVAL):
506
-        """Reset `key` and `value` item from Settings table.
507
-
508
-        If `value` is not given, it is reloaded from the Settings
509
-        table. Otherwise, the Settings table is updated.
510
-
511
-        Settings attributes on cache objects are lazy-loaded and
512
-        read-only. Use `reset` to update the value.
513
-
514
-        Settings with the ``sqlite_`` prefix correspond to SQLite
515
-        pragmas. Updating the value will execute the corresponding PRAGMA
516
-        statement.
517
-
518
-        :param str key: Settings key for item
519
-        :param value: value for item (optional)
520
-        :return: updated value for item
521
-        :raises Timeout: if database timeout expires
522
-
523
-        """
524
-        for shard in self._shards:
525
-            while True:
526
-                try:
527
-                    result = shard.reset(key, value)
528
-                except Timeout:
529
-                    pass
530
-                else:
531
-                    break
532
-        return result
533
-
534
-
535
-    def deque(self, name):
536
-        """Return Deque with given `name` in subdirectory.
537
-
538
-        >>> cache = FanoutCache('/tmp/diskcache/fanoutcache')
539
-        >>> deque = cache.deque('test')
540
-        >>> deque.clear()
541
-        >>> deque.extend('abc')
542
-        >>> deque.popleft()
543
-        'a'
544
-        >>> deque.pop()
545
-        'c'
546
-        >>> len(deque)
547
-        1
548
-
549
-        :param str name: subdirectory name for Deque
550
-        :return: Deque with given name
551
-
552
-        """
553
-        _deques = self._deques
554
-
555
-        try:
556
-            return _deques[name]
557
-        except KeyError:
558
-            parts = name.split('/')
559
-            directory = op.join(self._directory, 'deque', *parts)
560
-            temp = Deque(directory=directory)
561
-            _deques[name] = temp
562
-            return temp
563
-
564
-
565
-    def index(self, name):
566
-        """Return Index with given `name` in subdirectory.
567
-
568
-        >>> cache = FanoutCache('/tmp/diskcache/fanoutcache')
569
-        >>> index = cache.index('test')
570
-        >>> index.clear()
571
-        >>> index['abc'] = 123
572
-        >>> index['def'] = 456
573
-        >>> index['ghi'] = 789
574
-        >>> index.popitem()
575
-        ('ghi', 789)
576
-        >>> del index['abc']
577
-        >>> len(index)
578
-        1
579
-        >>> index['def']
580
-        456
581
-
582
-        :param str name: subdirectory name for Index
583
-        :return: Index with given name
584
-
585
-        """
586
-        _indexes = self._indexes
587
-
588
-        try:
589
-            return _indexes[name]
590
-        except KeyError:
591
-            parts = name.split('/')
592
-            directory = op.join(self._directory, 'index', *parts)
593
-            temp = Index(directory)
594
-            _indexes[name] = temp
595
-            return temp

+ 0
- 105
diskcache/memo.py View File

@@ -1,105 +0,0 @@
1
-"""Memoization utilities.
2
-
3
-"""
4
-
5
-from functools import wraps
6
-
7
-from .core import ENOVAL
8
-
9
-def memoize(cache, name=None, typed=False, expire=None, tag=None):
10
-    """Memoizing cache decorator.
11
-
12
-    Decorator to wrap callable with memoizing function using cache. Repeated
13
-    calls with the same arguments will lookup result in cache and avoid
14
-    function evaluation.
15
-
16
-    If name is set to None (default), the callable name will be determined
17
-    automatically.
18
-
19
-    If typed is set to True, function arguments of different types will be
20
-    cached separately. For example, f(3) and f(3.0) will be treated as distinct
21
-    calls with distinct results.
22
-
23
-    The original underlying function is accessible through the __wrapped__
24
-    attribute. This is useful for introspection, for bypassing the cache, or
25
-    for rewrapping the function with a different cache.
26
-
27
-    >>> from diskcache import FanoutCache
28
-    >>> cache = FanoutCache('/tmp/diskcache/fanoutcache')
29
-    >>> @cache.memoize(typed=True, expire=1, tag='fib')
30
-    ... def fibonacci(number):
31
-    ...     if number == 0:
32
-    ...         return 0
33
-    ...     elif number == 1:
34
-    ...         return 1
35
-    ...     else:
36
-    ...         return fibonacci(number - 1) + fibonacci(number - 2)
37
-    >>> print(sum(fibonacci(number=value) for value in range(100)))
38
-    573147844013817084100
39
-
40
-    Remember to call memoize when decorating a callable. If you forget, then a
41
-    TypeError will occur. Note the lack of parenthenses after memoize below:
42
-
43
-    >>> @cache.memoize
44
-    ... def test():
45
-    ...     pass
46
-    Traceback (most recent call last):
47
-        ...
48
-    TypeError: name cannot be callable
49
-
50
-    :param cache: cache to store callable arguments and return values
51
-    :param str name: name given for callable (default None, automatic)
52
-    :param bool typed: cache different types separately (default False)
53
-    :param float expire: seconds until arguments expire
54
-        (default None, no expiry)
55
-    :param str tag: text to associate with arguments (default None)
56
-    :return: callable decorator
57
-
58
-    """
59
-    if callable(name):
60
-        raise TypeError('name cannot be callable')
61
-
62
-    def decorator(function):
63
-        "Decorator created by memoize call for callable."
64
-        if name is None:
65
-            try:
66
-                reference = function.__qualname__
67
-            except AttributeError:
68
-                reference = function.__name__
69
-
70
-            reference = function.__module__ + reference
71
-        else:
72
-            reference = name
73
-
74
-        reference = (reference,)
75
-
76
-        @wraps(function)
77
-        def wrapper(*args, **kwargs):
78
-            "Wrapper for callable to cache arguments and return values."
79
-
80
-            key = reference + args
81
-
82
-            if kwargs:
83
-                key += (ENOVAL,)
84
-                sorted_items = sorted(kwargs.items())
85
-
86
-                for item in sorted_items:
87
-                    key += item
88
-
89
-            if typed:
90
-                key += tuple(type(arg) for arg in args)
91
-
92
-                if kwargs:
93
-                    key += tuple(type(value) for _, value in sorted_items)
94
-
95
-            result = cache.get(key, default=ENOVAL, retry=True)
96
-
97
-            if result is ENOVAL:
98
-                result = function(*args, **kwargs)
99
-                cache.set(key, result, expire=expire, tag=tag, retry=True)
100
-
101
-            return result
102
-
103
-        return wrapper
104
-
105
-    return decorator

+ 0
- 1348
diskcache/persistent.py
File diff suppressed because it is too large
View File


+ 0
- 78
diskcache/stampede.py View File

@@ -1,78 +0,0 @@
1
-"Stampede barrier implementation."
2
-
3
-import functools as ft
4
-import math
5
-import random
6
-import tempfile
7
-import time
8
-
9
-from .core import Cache, ENOVAL
10
-
11
-
12
-class StampedeBarrier(object):
13
-    """Stampede barrier mitigates cache stampedes.
14
-
15
-    Cache stampedes are also known as dog-piling, cache miss storm, cache
16
-    choking, or the thundering herd problem.
17
-
18
-    Based on research by Vattani, A.; Chierichetti, F.; Lowenstein, K. (2015),
19
-    Optimal Probabilistic Cache Stampede Prevention,
20
-    VLDB, pp. 886?897, ISSN 2150-8097
21
-
22
-    Example:
23
-
24
-    ```python
25
-    stampede_barrier = StampedeBarrier('/tmp/user_data', expire=3)
26
-
27
-    @stampede_barrier
28
-    def load_user_info(user_id):
29
-        return database.lookup_user_info_by_id(user_id)
30
-    ```
31
-
32
-    """
33
-    # pylint: disable=too-few-public-methods
34
-    def __init__(self, cache=None, expire=None):
35
-        if isinstance(cache, Cache):
36
-            pass
37
-        elif cache is None:
38
-            cache = Cache(tempfile.mkdtemp())
39
-        else:
40
-            cache = Cache(cache)
41
-
42
-        self._cache = cache
43
-        self._expire = expire
44
-
45
-    def __call__(self, func):
46
-        cache = self._cache
47
-        expire = self._expire
48
-
49
-        @ft.wraps(func)
50
-        def wrapper(*args, **kwargs):
51
-            "Wrapper function to cache function result."
52
-            key = (args, kwargs)
53
-
54
-            try:
55
-                result, expire_time, delta = cache.get(
56
-                    key, default=ENOVAL, expire_time=True, tag=True
57
-                )
58
-
59
-                if result is ENOVAL:
60
-                    raise KeyError
61
-
62
-                now = time.time()
63
-                ttl = expire_time - now
64
-
65
-                if (-delta * math.log(random.random())) < ttl:
66
-                    return result
67
-
68
-            except KeyError:
69
-                pass
70
-
71
-            now = time.time()
72
-            result = func(*args, **kwargs)
73
-            delta = time.time() - now
74
-            cache.set(key, result, expire=expire, tag=delta)
75
-
76
-            return result
77
-
78
-        return wrapper

+ 7
- 2
ltcproxy.py View File

@@ -152,8 +152,13 @@ def get_live_chunk(key, ch, tail):
152 152
             stream_url = s[ch]
153 153
             mediaid= s["m"+ch]
154 154
         else:
155
-            print "No stream_url %s in cache" % path0
156
-            raise bottle.HTTPError(500)
155
+            refresh_live_chunklist_url(ch)
156
+            if ch in s:
157
+                stream_url = s[ch]
158
+                mediaid= s["m"+ch]
159
+            else:
160
+                print "No stream_url %s in cache" % path0
161
+                raise bottle.HTTPError(500)
157 162
         base0, rest0 = hls_base(stream_url)
158 163
         rest2 = "media_%s_%s.ts?resource_id=c-%s&auth_token=app_" % (mediaid, chunkid, chid)
159 164
         url = base0 + rest2 + token

BIN
picons/shortcut2.png View File


+ 4
- 0
playstreamproxy.py View File

@@ -135,10 +135,14 @@ class StreamHandler(BaseHTTPRequestHandler):
135 135
                     content = sources.get_streams(data)
136 136
                 elif cmd == "get_info":
137 137
                     content = sources.get_info(data)
138
+                elif cmd == "get_image":
139
+                    content = sources.get_image(data)
138 140
                 elif cmd == "is_video":
139 141
                     content = sources.is_video(data)
140 142
                 elif cmd == "options_read":
141 143
                     content = sources.options_read(data)
144
+                elif cmd == "options_write":
145
+                    content = sources.options_write(data)
142 146
                 else:
143 147
                     content = []
144 148
                 txt = json.dumps(content)

+ 928
- 919
project.wpr
File diff suppressed because it is too large
View File


+ 26
- 4
sources/lmt.py View File

@@ -130,7 +130,7 @@ Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8
130 130
         r = requests.get(url,headers = headers)
131 131
         return r.content
132 132
         #result = self._http_request(url,params,headers=headers)
133
-        return result
133
+        # return result
134 134
 
135 135
     def get_streams(self,data):
136 136
         print "[lmt] get_streams:", data
@@ -150,9 +150,13 @@ Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8
150 150
         if img.startswith("//"):
151 151
             img = "http:" + img
152 152
         desc = title + "\n" + re.search('<meta property="og:description" content="([^"]+)">', r, re.IGNORECASE | re.DOTALL).group(1)
153
+
153 154
         m = re.search('file: "([^"]+)"', r, re.IGNORECASE)
154 155
         if m:
156
+            #  LMT file
155 157
             data2 = m.group(1)
158
+            if not data2.startswith("http"):
159
+                data2 = "http:" + data2
156 160
             stream = util.item()
157 161
             stream["name"] = title
158 162
             stream["url"] = data2
@@ -160,9 +164,11 @@ Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8
160 164
             stream["desc"] = desc
161 165
             stream["resolver"] = "lmt"
162 166
             return [stream]
163
-        elif re.search('src="http*://www.youtube.com/embed/([\w-]+).*"',r):
164
-            m = re.search('src="http*://www.youtube.com/embed/([\w-]+).*"',r)
165
-            video_id = m.group(1)
167
+
168
+        elif re.search(r'src="(http*:)*//www.youtube.com/embed/([\w-]+).*"', r):
169
+            #  Youtube
170
+            m = re.search(r'src="(http*:)*//www.youtube.com/embed/([\w-]+).*"', r)
171
+            video_id = m.group(2)
166 172
             #http://www.youtube.com/embed/RUyQ_JJ6A84?rel=0&fs=1&wmode=transparent
167 173
             data2 = YouTubeVideoUrl().extract(video_id)
168 174
             s = util.item()
@@ -219,6 +225,22 @@ Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8
219 225
             sdata = js["data"]["versions"][ver]
220 226
             s["url"] = "http:" + sdata[1]["src"]
221 227
             return [s]
228
+        elif re.search('src="(https://www.facebook.com/plugins/video.php[^"]+)', r):
229
+            m = re.search('src="(https://www.facebook.com/plugins/video.php[^"]+)', r)
230
+            url = m.group(1)
231
+            r2 = self._http_request(url)
232
+            if not r2:
233
+                raise Exception("No stream found")
234
+            streams = []
235
+            for it in re.findall('"(sd|hd)_src":"([^"]+)', r2):
236
+                s = util.item()
237
+                s["name"] = tite
238
+                s["desc"] = desc
239
+                s["img"] = img
240
+                s["resolver"] = "lmt"
241
+                s["url"] = it[1].replace("\\", "")
242
+                s["quality"] = it[0]
243
+            return [s]
222 244
 
223 245
         else:
224 246
             raise Exception("No stream found")

+ 0
- 23
test_url.py View File

@@ -1,23 +0,0 @@
1
-#!/usr/bin/python
2
-# -*- coding: utf-8 -*-
3
-
4
-from util import streamproxy_encode3, streamproxy_decode3
5
-dt2ts = lambda dt: int(time.mktime(dt.timetuple()))
6
-ts2dt = lambda ts: datetime.datetime.fromtimestamp(ts)
7
-import time, datetime, pytz
8
-from datetime import datetime as dt
9
-
10
-# "20190302203000 +0200" panorāma
11
-print dt2ts(dt.now())
12
-print dt2ts(dt(2019, 3, 2, 20, 30))
13
-#print urlp
14
-# ltc::content/live-streams/101?include=quality
15
-print streamproxy_encode3("playstream", "ltc::content/catchup/101?start=1551551400")
16
-# http://localhost:8880/playstream/ltc%3A%3Acontent%2Fcatchup%2F101%3Fstart%3D1551551400/
17
-
18
-print streamproxy_encode3("playstream", "replay::tiesraide/ltv1")
19
-print streamproxy_encode3("playstream", "tvdom::tiesraides/ltv1/")
20
-print streamproxy_encode3("playstream", "tvplay::asset/10311641")
21
-print streamproxy_encode3("playstream", "xtv::rigatv24/video/Zw4pVPqr7OX-festivals_mainam_pasauli_sakam_ar_sevi")
22
-print streamproxy_encode3("playstream", "iplayer::episodes/b094f49s")
23
-#cmd, data, headers, qs = streamproxy_decode3(urlp)