Compare commits

...

17 Commits

Author SHA1 Message Date
Felix Stupp fb28eb99ea
Also update element lookup cache on mass extract collections with errors 11 months ago
Felix Stupp 414765f221
.preferences/init: Add __all__ entries 11 months ago
Felix Stupp f7e53f3097
Reformat code 11 months ago
Felix Stupp 70be1210d3
entities: Make {collection,media}_list public in Tag 11 months ago
Felix Stupp d901d3739e
tvmaze: Use extractor name in title dynamically 11 months ago
Felix Stupp 838840d1a0
common: Add (unused) all_same 11 months ago
Felix Stupp 6cfc1fcf3a
common: Add already used helpers fix_iter & iter_lookahead 11 months ago
Felix Stupp afe0ef0a56
tvmaze: Ignore missing language when generating tags 11 months ago
Felix Stupp 447785286b
Remove not needed import 11 months ago
Felix Stupp a3dd645cb7
Introduce gen_api_error for all API requests 11 months ago
Felix Stupp 0d9a3a54e1
app: Fix preference_from_base64 for generic PreferenceScore 11 months ago
Felix Stupp 029651821b
Decrease log level for included modules 11 months ago
Felix Stupp d608d0c98e
Improve logging format to include module_name & level 11 months ago
Felix Stupp 4779d7ba7f
vscode: Add settings for python linting & testing 11 months ago
Felix Stupp 6c5ca895f0
Reformat code 11 months ago
Felix Stupp b6fcb55fbc
tmdb: Fix retrieval of tags 11 months ago
Felix Stupp 1e27f7b3d1
UriHolder: Make uri & uri_set proper properties
- to avoid direct DB access
- to allow for getter & setter functions
- makes consistency checks on each lookup unneccesary
- propose async consistency checks (TODO)
11 months ago

@ -0,0 +1,21 @@
{
"python.analysis.typeCheckingMode": "off",
"python.linting.enabled": false,
"python.linting.mypyArgs": [
"--follow-imports=silent",
"--ignore-missing-imports",
"--show-column-numbers",
"--no-pretty",
"--warn-return-any",
"--disallow-untyped-calls",
"--disallow-untyped-defs",
"--disallow-incomplete-defs",
"--no-implicit-optional",
],
"python.linting.mypyEnabled": false,
"python.testing.pytestArgs": [
"tests",
],
"python.testing.pytestEnabled": true,
"python.testing.unittestEnabled": false,
}

@ -1,3 +1,7 @@
# TODO check that some queries check DB integrity like:
# SELECT e.uri FROM mediaelment e LEFT JOIN mediaurimapping m ON e.uri = m.uri WHERE m.uri IS NULL;
# SELECT e.uri FROM mediacollection e LEFT JOIN collectionurimapping m ON e.uri = m.uri WHERE m.uri IS NULL;
####
# Imports
####
@ -64,7 +68,10 @@ from entertainment_decider.extractors.media import (
media_extract_uri,
media_update,
)
from entertainment_decider.extras import remove_common_trails
from entertainment_decider.extras import (
gen_api_error,
remove_common_trails,
)
T = TypeVar("T")
@ -80,12 +87,29 @@ def adapt_score_list(
return o
def preference_from_base64(in_data: str) -> PreferenceScore[Tag]:
return PreferenceScore.from_base64(
in_data=in_data,
get_tag=lambda i: Tag[i],
)
####
# Logging Config
####
logging.basicConfig(format="%(asctime)s === %(message)s", level=logging.DEBUG)
logging.basicConfig(
format="%(asctime)s - %(name)s === %(levelname)s: %(message)s",
level=logging.DEBUG,
)
_LOG_LEVELS = {
"httpcore": logging.WARNING, # >INFO to hide outgoing requests
"httpx": logging.WARNING, # >DEBUG to hide outgoing SSL infos
"werkzeug": logging.INFO, # INFO so incoming requests show up
}
for module_name, log_level in _LOG_LEVELS.items():
logging.getLogger(module_name).setLevel(log_level)
####
@ -636,7 +660,7 @@ def recommend_adaptive() -> ResponseReturnValue:
preferences = request.cookies.get(
key=PREFERENCES_SCORE_NAME,
default=PreferenceScore(),
type=PreferenceScore.from_base64,
type=preference_from_base64,
) * (1 if score_adapt > 0 else -1 if score_adapt < 0 else 0)
if "max_length" not in request.args:
# ask for max length before calculating to save time
@ -684,7 +708,7 @@ def cookies_rating(negative: bool) -> ResponseReturnValue:
preferences = request.cookies.get(
key=PREFERENCES_SCORE_NAME,
default=PreferenceScore(),
type=PreferenceScore.from_base64,
type=preference_from_base64,
).adapt_score(element, score=3 if negative else -3)
resp = redirect_back_or_okay()
resp.set_cookie(PREFERENCES_SCORE_NAME, preferences.to_base64())
@ -730,11 +754,9 @@ def refresh_collections() -> ResponseReturnValue:
"collection": {
"id": coll.id,
"title": coll.title,
"uri": coll.uri,
},
"error": {
"args": repr(e.args),
"uri": coll.primary_uri,
},
"error": gen_api_error(e),
},
)
update_element_lookup_cache(changed_colls)
@ -914,12 +936,11 @@ def api_collection_extract_mass() -> ResponseReturnValue:
errors.append(
{
"uri": u,
"error": {
"type": repr(type(e)),
"args": repr(e.args),
},
"error": gen_api_error(e),
}
)
if coll_ids:
update_element_lookup_cache(coll_ids)
if errors:
return {
"status": False,
@ -929,8 +950,6 @@ def api_collection_extract_mass() -> ResponseReturnValue:
"data": errors,
},
}, 501
if coll_ids:
update_element_lookup_cache(coll_ids)
if coll_ids and environ_bool(data.get("redirect_to_overview", False)):
return redirect(
"/collection/overview?ids=" + ",".join(str(i) for i in coll_ids)
@ -1098,10 +1117,7 @@ def api_media_extract_mass() -> ResponseReturnValue:
errors.append(
{
"uri": u,
"error": {
"type": repr(type(e)),
"args": repr(e.args),
},
"error": gen_api_error(e),
}
)
if errors:

@ -5,15 +5,24 @@ import sys
from typing import (
IO,
Iterable,
Iterator,
List,
Literal,
Optional,
Sequence,
Tuple,
TypeVar,
Union,
overload,
)
def all_same(iterable: Iterable) -> bool:
it = iter(iterable)
first = next(it)
return all(first == elem for elem in it)
def call(
args: Sequence[str],
check: bool = True,
@ -78,5 +87,75 @@ def limit_iter(iter: Iterable[T], limit: int) -> List[T]:
return list(itertools.islice(iter, limit))
class _IterFixer(Iterator[T]):
__it: Iterator[T]
def __init__(self, it: Iterator[T]) -> None:
super().__init__()
self.__it = it
def __iter__(self) -> Iterator[T]:
return self
def __next__(self) -> T:
return next(self.__it)
def fix_iter(iterable: Iterable[T]) -> Iterable[T]:
return _IterFixer(iter(iterable))
@overload
def iter_lookahead(
iterable: Iterable[T],
get_first: Literal[False] = False,
get_last: Literal[False] = False,
) -> Iterable[Tuple[T, T]]:
...
@overload
def iter_lookahead(
iterable: Iterable[T],
get_first: Literal[True],
get_last: Literal[False] = False,
) -> Iterable[Tuple[None, T] | Tuple[T, T]]:
...
@overload
def iter_lookahead(
iterable: Iterable[T],
get_first: Literal[False] = False,
get_last: Literal[True] = True, # <- default only to satisfy python
) -> Iterable[Tuple[T, T] | Tuple[T, None]]:
...
@overload
def iter_lookahead(
iterable: Iterable[T],
get_first: Literal[True],
get_last: Literal[True],
) -> Iterable[Tuple[None, T] | Tuple[T, T] | Tuple[T, None]]:
...
def iter_lookahead(
iterable: Iterable[T],
get_first: bool = False,
get_last: bool = False,
) -> Iterable[Tuple[None, T] | Tuple[T, T] | Tuple[T, None]]:
it = iter(iterable)
last = next(it)
if get_first:
yield None, last
for cur in it:
yield last, cur
last = cur
if get_last:
yield last, None
def date_to_datetime(d: date) -> datetime:
return datetime(d.year, d.month, d.day)

@ -189,7 +189,9 @@ WEB_CHANNEL_PREFIX = f"{EXTRACTOR_KEY}/web_channel"
def get_show_tags(show: TvmazeShow) -> Iterable[Tag]:
yield predefined_series_tag()
yield get_show_type_tag(show["type"])
yield get_language_tag(show["language"])
language = show["language"]
if language is not None:
yield get_language_tag(language)
for genre in show["genres"]:
yield get_genre_tag(genre)
network = show["network"]

@ -43,7 +43,7 @@ def collection_update(
collection: MediaCollection,
check_cache_expired: bool = True,
) -> ChangedReport:
ex = collection_expect_extractor(collection.uri)
ex = collection_expect_extractor(collection.primary_uri)
return ex.update_object(
object=collection,
check_cache_expired=check_cache_expired,

@ -48,7 +48,7 @@ class AggregatedCollectionExtractor(CollectionExtractor[DataType]):
return True
def _cache_expired(self, object: MediaCollection) -> bool:
colls = self.__get_collections(object.uri)
colls = self.__get_collections(object.primary_uri)
for c in colls:
if c.last_updated is None or object.last_updated <= c.last_updated:
return True
@ -86,7 +86,7 @@ class AggregatedCollectionExtractor(CollectionExtractor[DataType]):
data: DataType,
) -> ChangedReport:
if object.title is None or "[aggregated]" not in object.title:
object.title = f"[aggregated] {object.uri}"
object.title = f"[aggregated] {object.primary_uri}"
object.creator = None
object.set_watch_in_order_auto(True)
all_links: Set[int] = set(
@ -101,6 +101,5 @@ class AggregatedCollectionExtractor(CollectionExtractor[DataType]):
episode=episode + 1,
)
orm.delete(link for link in object.media_links if link.element.id in all_links)
for uri_link in list(object.uris):
uri_link.delete()
object.set_as_only_uri(object.primary_uri)
return ChangedReport.ChangedSome # TODO improve

@ -37,14 +37,6 @@ class CollectionExtractor(GeneralExtractor[MediaCollection, T]):
mapping: CollectionUriMapping = CollectionUriMapping.get(uri=uri)
if mapping:
return mapping.element
elem: MediaCollection = MediaCollection.get(uri=uri)
if elem:
logging.warning(
f"Add missing URI mapping entry for uri {uri!r}, "
+ "this should not happen at this point and is considered a bug"
)
elem.add_single_uri(uri)
return elem
return None
@staticmethod
@ -116,7 +108,7 @@ class CollectionExtractor(GeneralExtractor[MediaCollection, T]):
element = extractor.inject_object(data)
except ExtractionError:
logging.warning(
f"Failed while extracting media {data.object_uri!r} while injecting from {collection.uri!r}",
f"Failed while extracting media {data.object_uri!r} while injecting from {collection.primary_uri!r}",
exc_info=True,
)
return None

@ -76,7 +76,7 @@ class RssCollectionExtractor(CollectionExtractor[RSSFeed]):
object.description = data.description
object.set_watch_in_order_auto(True)
object.add_single_uri(
self.__get_uri(object.uri)
self.__get_uri(object.primary_uri)
) # add url without prefix if required
for item in data.feed:
element = self._add_episode(

@ -80,11 +80,11 @@ class TtRssCollectionExtractor(CollectionExtractor[HeadlineList]):
data: HeadlineList,
) -> ChangedReport:
if not object.title:
object.title = object.uri
object.title = object.primary_uri
object.creator = None
object.set_watch_in_order_auto(True)
logging.debug(f"Got {len(data)} headlines")
rss_uri = self.__decode_uri(object.uri)
rss_uri = self.__decode_uri(object.primary_uri)
readed_headlines = list[int]()
for headline in data:
elem = self._add_episode(collection=object, uri=headline.url)

@ -120,7 +120,7 @@ class TvmazeCollectionExtractor(CollectionExtractor[TvmazeShowEmbedded]):
object: MediaCollection,
data: TvmazeShowEmbedded,
) -> ChangedReport:
object.title = f"[tvmaze] {data['name']}"
object.title = f"[{self.name}] {data['name']}"
object.description = data.get("summary", "")
object.release_date = datetime.strptime(data["premiered"], "%Y-%m-%d")
object.set_watch_in_order_auto(True)

@ -77,14 +77,14 @@ class ExtractedDataLight:
object_key: str
def create_media(self) -> MediaElement:
return MediaElement(
return MediaElement.new(
uri=self.object_uri,
extractor_name=self.extractor_name,
extractor_key=self.object_key,
)
def create_collection(self) -> MediaCollection:
return MediaCollection(
return MediaCollection.new(
uri=self.object_uri,
extractor_name=self.extractor_name,
extractor_key=self.object_key,
@ -219,7 +219,7 @@ class GeneralExtractor(Generic[E, T]):
return self._extract_online(data.object_uri)
def _update_object(self, object: E, data: ExtractedDataOnline[T]) -> ChangedReport:
object.uri = data.object_uri
object.primary_uri = data.object_uri
object.tag_list.add(self._get_extractor_tag())
self._update_object_raw(object, data.data)
self._update_hook(object, data)
@ -240,7 +240,7 @@ class GeneralExtractor(Generic[E, T]):
f"Skip info for element as already extracted and cache valid: {object.title!r}"
)
return ChangedReport.StayedSame
data = self._extract_online(object.uri)
data = self._extract_online(object.primary_uri)
logging.debug(f"Updating info for media: {data!r}")
return self._update_object(object, data)

@ -28,7 +28,7 @@ def media_expect_extractor(uri: str) -> MediaExtractor:
def media_update(element: MediaElement, check_cache_expired: bool = True) -> None:
ex = media_expect_extractor(element.uri)
ex = media_expect_extractor(element.primary_uri)
ex.update_object(
object=element,
check_cache_expired=check_cache_expired,

@ -1,6 +1,5 @@
from __future__ import annotations
import logging
from typing import Optional, TypeVar
from ...models import MediaCollection, MediaElement, MediaUriMapping
@ -17,7 +16,6 @@ T = TypeVar("T")
class MediaExtractor(GeneralExtractor[MediaElement, T]):
# abstract
def _get_author_data(self, data: T) -> Optional[AuthorExtractedData]:
@ -30,14 +28,6 @@ class MediaExtractor(GeneralExtractor[MediaElement, T]):
mapping: MediaUriMapping = MediaUriMapping.get(uri=uri)
if mapping:
return mapping.element
elem: MediaElement = MediaElement.get(uri=uri)
if elem:
logging.warning(
f"Add missing URI mapping entry for uri {uri!r}, "
+ "this should not happen at this point and is considered a bug"
)
elem.add_single_uri(uri)
return elem
return None
def _create_object(self, data: ExtractedDataOffline[T]) -> MediaElement:

@ -81,7 +81,7 @@ class TmdbMovieMediaExtractor(MediaExtractor[TmdbMovieData]):
# sanity check
if not data.was_released:
raise ExtractionError(
f"Could not extract {object.uri!r} because of missing data probably due to not being released yet"
f"Could not extract {object.primary_uri!r} because of missing data probably due to not being released yet"
)
# extract data
object.title = data.title
@ -98,6 +98,6 @@ class TmdbMovieMediaExtractor(MediaExtractor[TmdbMovieData]):
data.tmdb_short_uri,
)
)
for tag in data.get_tags(data):
for tag in data.get_tags():
object.tag_list.add(tag)
return ChangedReport.ChangedSome # TODO improve

@ -106,7 +106,7 @@ class TvmazeMediaExtractor(MediaExtractor[TvmazeEpisodeEmbedded]):
airstamp = data.get("airstamp")
if airstamp is None: # not released yet
raise ExtractionError(
f"Could not extract {object.uri!r} because of missing data probably due to not being released yet"
f"Could not extract {object.primary_uri!r} because of missing data probably due to not being released yet"
)
# extract data
show = data["_embedded"]["show"]

@ -112,7 +112,7 @@ class YoutubeMediaExtractor(MediaExtractor[YoutubeVideoData]):
object.length = int(data["duration"]["secondsText"])
for tag in get_video_tags(data):
object.tag_list.add(tag)
object.uri = f"https://www.youtube.com/watch?v={data['id']}"
object.primary_uri = f"https://www.youtube.com/watch?v={data['id']}"
object.add_uris(
(
f"https://youtu.be/{data['id']}",

@ -1,4 +1,5 @@
from .chain import Chain
from .errors import gen_api_error
from .strings import remove_common_trails
from .typing import LazyValue
@ -6,5 +7,6 @@ from .typing import LazyValue
__all__ = [
"Chain",
"LazyValue",
"gen_api_error",
"remove_common_trails",
]

@ -0,0 +1,12 @@
from __future__ import annotations
from traceback import format_exception
from typing import Dict
def gen_api_error(exc: Exception) -> Dict:
return {
"type": repr(type(exc)),
"args": repr(exc.args),
"traceback": list(format_exception(exc)),
}

@ -189,8 +189,8 @@ class Tag(db.Entity, Tagable, TagProto["Tag"]):
super_tag_list: Iterable[Tag] = orm.Set(lambda: Tag, reverse="sub_tag_list")
sub_tag_list: Iterable[Tag] = orm.Set(lambda: Tag, reverse="super_tag_list")
_collection_list: Iterable[MediaCollection] = orm.Set(lambda: MediaCollection)
_media_list: Iterable[MediaElement] = orm.Set(lambda: MediaElement)
collection_list: Iterable[MediaCollection] = orm.Set(lambda: MediaCollection)
media_list: Iterable[MediaElement] = orm.Set(lambda: MediaElement)
@property
def orm_assigned_tags(self) -> Query[Tag]:
@ -202,7 +202,6 @@ class Tag(db.Entity, Tagable, TagProto["Tag"]):
class TagKey(db.Entity):
num_id: int = orm.PrimaryKey(int, auto=True)
tag_key: str = orm.Required(str, unique=True)
"""Format: <domain>[/<kind>][/<id>]
@ -330,8 +329,9 @@ class MediaElement(db.Entity, UriHolder, Tagable):
int,
auto=True,
)
uri: str = orm.Required(
__uri: str = orm.Required(
str,
column="uri",
unique=True,
)
@ -377,7 +377,7 @@ class MediaElement(db.Entity, UriHolder, Tagable):
tag_list: Iterable[Tag] = orm.Set(
lambda: Tag,
)
uris: Iterable[MediaUriMapping] = orm.Set(
__uri_list: Iterable[MediaUriMapping] = orm.Set(
lambda: MediaUriMapping,
)
collection_links: Iterable[MediaCollectionLink] = orm.Set(
@ -393,21 +393,35 @@ class MediaElement(db.Entity, UriHolder, Tagable):
reverse="blocked_by",
)
@classmethod
def new(
cls,
*,
extractor_name: str,
extractor_key: str,
uri: str,
) -> MediaElement:
return cls(
extractor_name=extractor_name,
extractor_key=extractor_key,
_MediaElement__uri=uri, # manual mangling for MediaElement
)
### for UriHolder
@property
def _primary_uri(self) -> str:
return self.uri
return self.__uri
def _set_primary_uri(self, uri: str) -> None:
self.uri = uri
self.__uri = uri
@property
def _get_uri_set(self) -> Set[str]:
return {m.uri for m in self.uris}
def _uri_set(self) -> Set[str]:
return {m.uri for m in self.__uri_list}
def _set_uri_set(self, uri_set: Set[str]) -> None:
self.uris = set()
self.__uri_list = set()
self.add_uris(uri_set)
### for Tagable
@ -530,7 +544,7 @@ class MediaElement(db.Entity, UriHolder, Tagable):
other.ignored = True
if self.progress >= 0 and other.progress <= 0:
other.progress = self.progress
for uri_map in self.uris:
for uri_map in self.__uri_list:
uri_map.element = other
for link in self.collection_links:
if not MediaCollectionLink.get(collection=link.collection, element=other):
@ -557,7 +571,7 @@ class MediaElement(db.Entity, UriHolder, Tagable):
self.before_update()
def before_update(self) -> None:
self.add_single_uri(self.uri)
self.add_single_uri(self.__uri)
class MediaThumbnail(db.Entity):
@ -640,8 +654,9 @@ class MediaCollection(db.Entity, UriHolder, Tagable):
int,
auto=True,
)
uri: str = orm.Required(
__uri: str = orm.Required(
str,
column="uri",
unique=True,
)
@ -697,7 +712,7 @@ class MediaCollection(db.Entity, UriHolder, Tagable):
tag_list: Iterable[Tag] = orm.Set(
lambda: Tag,
)
uris: Iterable[CollectionUriMapping] = orm.Set(
__uri_set: Iterable[CollectionUriMapping] = orm.Set(
lambda: CollectionUriMapping,
)
media_links: Iterable[MediaCollectionLink] = orm.Set(
@ -707,21 +722,35 @@ class MediaCollection(db.Entity, UriHolder, Tagable):
lambda: MediaCollection,
)
@classmethod
def new(
cls,
*,
extractor_name: str,
extractor_key: str,
uri: str,
) -> MediaCollection:
return cls(
extractor_name=extractor_name,
extractor_key=extractor_key,
_MediaCollection__uri=uri, # manual mangling for MediaCollection
)
### for UriHolder
@property
def _primary_uri(self) -> str:
return self.uri
return self.__uri
def _set_primary_uri(self, uri: str) -> None:
self.uri = uri
self.__uri = uri
@property
def _get_uri_set(self) -> Set[str]:
return {m.uri for m in self.uris}
def _uri_set(self) -> Set[str]:
return {m.uri for m in self.__uri_set}
def _set_uri_set(self, uri_set: Set[str]) -> None:
self.uris = set()
self.__uri_set = set()
self.add_uris(uri_set)
### for Tagable
@ -917,7 +946,7 @@ class MediaCollection(db.Entity, UriHolder, Tagable):
self.before_update()
def before_update(self) -> None:
self.add_single_uri(self.uri)
self.add_single_uri(self.__uri)
class CollectionUriMapping(db.Entity):

@ -5,7 +5,6 @@ from typing import Iterable, Optional, Set
class UriHolder:
### abstracted
@abstractproperty
@ -17,7 +16,7 @@ class UriHolder:
"""Sets the primary uri of this object in a naive way."""
@abstractproperty
def _get_uri_set(self) -> Set[str]:
def _uri_set(self) -> Set[str]:
"""Returns the uri set of this object in a naive way."""
@abstractmethod
@ -45,6 +44,17 @@ class UriHolder:
"""Returns the current primary uri of this object."""
return self._primary_uri
@primary_uri.setter
def primary_uri(self, uri: str) -> None:
self.set_primary_uri(uri)
@property
def uri_set(self) -> Set[str]:
return self._uri_set
# uri_set has no setter due to the problem which uri then becomes primary
# instead, set_as_only_uri & add_uris should be used so the primary becomes obvious
def is_primary_uri(self, compare_uri: str) -> bool:
"""Returns True if the given uri is equal to the current primary uri."""
return self.primary_uri == compare_uri
@ -54,6 +64,8 @@ class UriHolder:
It will also add the uri to the uri set.
Returns True if the uri was not in the uri set before.
You may also just write the primary_uri property if you do not need the return value.
"""
ret = self._add_uri_to_set(uri) # might fail, so try first
self._set_primary_uri(uri)
@ -68,3 +80,9 @@ class UriHolder:
def add_uris(self, uri_list: Iterable[Optional[str]]) -> bool:
return any([self.add_single_uri(uri) for uri in set(uri_list) if uri])
def remove_single_uri(self, uri: str) -> bool:
return self._remove_uri_from_set(uri)
def remove_uris(self, uri_list: Iterable[Optional[str]]) -> bool:
return any([self.remove_single_uri(uri) for uri in set(uri_list) if uri])

@ -8,3 +8,12 @@ from .tag_scoring import (
PreferenceScoreCompatible,
PreferenceScoreSuper,
)
__all__ = [
"PreferenceScore",
"PreferenceScoreAppender",
"PreferenceScoreCompatible",
"PreferenceScoreSuper",
"generate_preference_list",
]

@ -88,8 +88,8 @@
<pre>{{ collection.notes or "" }}</pre>
<h2>Links</h2>
<ul>
{% for link in collection.uris|sort(attribute="uri") %}
<li>{{ link.uri | as_link }} {% if collection.uri == link.uri %}*{% endif %}</li>
{% for link in collection.uri_set|sort %}
<li>{{ link | as_link }} {% if collection.is_primary_uri(link) %}*{% endif %}</li>
{% endfor %}
</ul>
{% if collection.created_collections %}

@ -358,7 +358,7 @@
{# TODO do not hardcode certain extractors here #}
{% if element.extractor_name in ["ytdl", "youtube"] %}
{%- set opts = {
"video_uri": element.uri,
"video_uri": element.primary_uri,
"start": element.progress,
} -%}
<a class="button play_button" href="entertainment-decider:///player/play?{{ opts | encode_options }}">{{ symbol | safe }}</a>

@ -77,8 +77,8 @@
</ul>
<h2>Links</h2>
<ul>
{% for link in element.uris|sort(attribute="uri") %}
<li>{{ link.uri | as_link }} {% if element.uri == link.uri %}*{% endif %}</li>
{% for link in element.uri_set|sort %}
<li>{{ link | as_link }} {% if element.is_primary_uri(link) %}*{% endif %}</li>
{% endfor %}
</ul>
<h2>Blocked By</h2>

Loading…
Cancel
Save