Compare commits

..

18 Commits

Author SHA1 Message Date
Felix Stupp 7f2df84fe7
Implement media filter for raw media lists 1 year ago
Felix Stupp dc8fe05649
models/sql_helpers: add sql_condition_join 1 year ago
Felix Stupp ac9baab047
app: extract common code to prepare_media_sql 1 year ago
Felix Stupp d0bb9f538c
app.list_media: use kwarg explictly 1 year ago
Felix Stupp 4554988b6a
MediaCollectionLink.sorted: add arg reverse 1 year ago
Felix Stupp 4b448b364d
models/entities: reformat code 1 year ago
Felix Stupp ee0a77ca17
app.prepare_collection_episodes: invert order for channels 1 year ago
Felix Stupp c8767ead66
app.show_collection(_episodes): extract prepare_collection_episodes 1 year ago
Felix Stupp 7d2b0675c0
app.show_collection(_episodes): use SQL based sorting 1 year ago
Felix Stupp 5b94844b87
app.show_collection: rewrite if small collection part to using one if 1 year ago
Felix Stupp f311c53c9f
Add common types for natively Json objects 1 year ago
Felix Stupp e49789ede1
force_refresh_collection: Implement better API error handling 1 year ago
Felix Stupp 0ab972bbf5
refresh_collections: Use coll.json_summary for error generation 1 year ago
Felix Stupp 7f7971b9e6
Add Media(Element|Collection).json_summary 1 year ago
Felix Stupp a2e6dd87bf
Overhaul RSS collection extractor after update of library rss-parser to 1.1 1 year ago
Felix Stupp 2e81fc188f
models: Extract db into own file to prevent circular dependency 1 year ago
Felix Stupp c2c7cd69ed
Rename models/extras/exids_base to exidkeys 1 year ago
Felix Stupp bf8744a700
UriHolder._clear_uri_set: Change docstring to reflect clearing 1 year ago

@ -59,6 +59,9 @@ from entertainment_decider.models import (
setup_custom_tables,
update_element_lookup_cache,
)
from entertainment_decider.models.sql_helpers import (
sql_condition_join,
)
from entertainment_decider.preferences import PreferenceScore, generate_preference_list
from entertainment_decider.extractors.collection import (
collection_extract_uri,
@ -440,18 +443,10 @@ def show_collection(collection_id: int) -> ResponseReturnValue:
collection: MediaCollection = MediaCollection.get(id=collection_id)
if collection is None:
return make_response(f"Not found", 404)
media_links = (
MediaCollectionLink.sorted(
MediaCollectionLink.select(lambda l: l.collection == collection)
)
if orm.count(collection.media_links) <= SMALL_COLLECTION_MAX_COUNT
else None
)
media_titles = (
remove_common_trails([link.element.title for link in media_links])
if media_links is not None
else None
)
media_links = None
media_titles = None
if orm.count(collection.media_links) <= SMALL_COLLECTION_MAX_COUNT:
media_links, media_titles = prepare_collection_episodes(collection)
return render_template(
"collection_element.htm",
collection=collection,
@ -465,10 +460,7 @@ def show_collection_episodes(collection_id: int) -> ResponseReturnValue:
collection: MediaCollection = MediaCollection.get(id=collection_id)
if collection is None:
return make_response(f"Not found", 404)
media_links = MediaCollectionLink.sorted(
MediaCollectionLink.select(lambda l: l.collection == collection)
)
media_titles = remove_common_trails([link.element.title for link in media_links])
media_links, media_titles = prepare_collection_episodes(collection)
return render_template(
"collection_episodes.htm",
collection=collection,
@ -477,43 +469,28 @@ def show_collection_episodes(collection_id: int) -> ResponseReturnValue:
)
@flask_app.route("/media")
def list_media() -> ResponseReturnValue:
media_list: Iterable[MediaElement] = get_all_considered(
"elem.release_date DESC, elem.id"
)
return render_template(
"media_list.htm",
media_list=common.limit_iter(media_list, 100),
check_considered=False,
)
@flask_app.route("/media/short")
@flask_app.route("/media/short/<int:seconds>")
def list_short_media(seconds: int = 10 * 60) -> ResponseReturnValue:
media_list: Iterable[MediaElement] = get_all_considered(
filter_by=f"(length - progress) <= {seconds}",
order_by="elem.release_date DESC, elem.id",
)
return render_template(
"media_list.htm",
media_list=list(itertools.islice(media_list, 100)),
check_considered=False,
def prepare_collection_episodes(
collection: MediaCollection,
) -> tuple[Iterable[MediaCollectionLink], Iterable[str]]:
media_links = MediaCollectionLink.select(
lambda l: l.collection == collection
).order_by(
MediaCollectionLink.desc_sort_key
if collection.is_creator
else MediaCollectionLink.sort_key
)
media_titles = remove_common_trails([link.element.title for link in media_links])
return media_links, media_titles
@flask_app.route("/media/long")
@flask_app.route("/media/long/<int:seconds>")
def list_long_media(seconds: int = 10 * 60) -> ResponseReturnValue:
media_list: Iterable[MediaElement] = get_all_considered(
filter_by=f"{seconds} <= (length - progress)",
order_by="elem.release_date DESC, elem.id",
)
@flask_app.route("/media")
def list_media() -> ResponseReturnValue:
media_list = prepare_media_sql()
return render_template(
"media_list.htm",
media_list=list(itertools.islice(media_list, 100)),
media_list=common.limit_iter(media_list, 100),
check_considered=False,
**pass_media_filter_vals(),
)
@ -550,6 +527,33 @@ def list_unsorted_media() -> ResponseReturnValue:
)
def prepare_media_sql(
filter_by: str | None = None,
) -> Sequence[MediaElement]:
elem_len = "(elem.length - elem.progress)"
min_len = request.args.get("min_length", default=None, type=int)
max_len = request.args.get("max_length", default=None, type=int)
filter_str = sql_condition_join(
filter_by,
f"{min_len * 60} <= {elem_len}" if min_len else None,
f"{elem_len} <= {max_len * 60}" if max_len else None,
)
return get_all_considered(
filter_by=filter_str,
order_by="elem.release_date DESC, elem.id",
)
def pass_media_filter_vals() -> Mapping[str, Any]:
KEYS = [
"min_length",
"max_length",
]
return {
"show_filters": True,
} | {key: request.args.get(key) for key in KEYS}
@flask_app.route("/media/extract")
def extract_media() -> ResponseReturnValue:
return render_template("media_extract.htm")
@ -779,11 +783,7 @@ def refresh_collections() -> ResponseReturnValue:
orm.rollback()
errors.append(
{
"collection": {
"id": coll.id,
"title": coll.title,
"uri": coll.primary_uri,
},
"collection": coll.json_summary,
"error": gen_api_error(e),
},
)
@ -807,7 +807,25 @@ def force_refresh_collection(collection_id: int) -> ResponseReturnValue:
coll: MediaCollection = MediaCollection.get(id=collection_id)
if coll is None:
return "404 Not Found", 404
state = collection_update(coll, check_cache_expired=False)
try:
state = collection_update(coll, check_cache_expired=False)
except Exception as e:
orm.rollback()
return (
{
"status": False,
"error": {
"msg": "Failed to update collection successfully",
"data": [
{
"collection": coll.json_summary,
"error": gen_api_error(e),
}
],
},
},
501,
)
if state.may_has_changed:
update_element_lookup_cache((coll.id,))
return redirect_back_or_okay()

@ -20,9 +20,23 @@ from ._string import (
from ._subprocess import (
call,
)
from ._types import (
JsonContainer,
JsonLeaf,
JsonList,
JsonMapping,
JsonMappingKey,
JsonRepr,
)
__all__ = [
"JsonContainer",
"JsonLeaf",
"JsonList",
"JsonMapping",
"JsonMappingKey",
"JsonRepr",
"all_same",
"call",
"date_to_datetime",

@ -0,0 +1,25 @@
from __future__ import annotations
from collections.abc import (
Sequence,
Mapping,
)
from typing import (
TypeAlias,
)
JsonMappingKey: TypeAlias = str | int | float
"""type for use in JSON mappings as key"""
JsonLeaf: TypeAlias = JsonMappingKey | bool | None
"""object natively mapping to JSON as values excluding containers"""
JsonMapping: TypeAlias = Mapping[JsonMappingKey, "JsonRepr"]
"""mapping natively mapping to JSON"""
JsonList: TypeAlias = Sequence["JsonRepr"]
"""list natively mapping to JSON"""
JsonContainer: TypeAlias = JsonList | JsonMapping
"""container natively mapping to JSON"""
JsonRepr: TypeAlias = JsonContainer | JsonLeaf
"""object natively mapping to JSON"""

@ -3,7 +3,7 @@ from __future__ import annotations
from pony import orm # TODO remove
import requests
from rss_parser import Parser
from rss_parser.models import RSSFeed
from rss_parser.models.rss import RSS
from ...models import MediaCollection
from ..generic import (
@ -15,7 +15,7 @@ from ..generic import (
from .base import CollectionExtractor
class RssCollectionExtractor(CollectionExtractor[RSSFeed]):
class RssCollectionExtractor(CollectionExtractor[RSS]):
PROTOCOL_PREFIX = "rss+"
SUPPORTED_PROTOCOLS = [
"http://",
@ -47,20 +47,20 @@ class RssCollectionExtractor(CollectionExtractor[RSSFeed]):
def can_extract_offline(self, uri: str) -> bool:
return True
def _extract_offline(self, uri: str) -> ExtractedDataOffline[RSSFeed]:
def _extract_offline(self, uri: str) -> ExtractedDataOffline[RSS]:
cuted = self.__get_uri(uri)
return ExtractedDataOffline[RSSFeed](
return ExtractedDataOffline[RSS](
extractor_name=self.name,
object_key=cuted,
object_uri=uri,
)
def _extract_online(self, uri: str) -> ExtractedDataOnline[RSSFeed]:
def _extract_online(self, uri: str) -> ExtractedDataOnline[RSS]:
cuted = self.__get_uri(uri)
res = requests.get(cuted)
parser = Parser(xml=res.content)
data = parser.parse()
return ExtractedDataOnline[RSSFeed](
parser = Parser()
data = parser.parse(data=res.text)
return ExtractedDataOnline[RSS](
extractor_name=self.name,
object_key=cuted,
object_uri=uri,
@ -70,18 +70,18 @@ class RssCollectionExtractor(CollectionExtractor[RSSFeed]):
def _update_object_raw(
self,
object: MediaCollection,
data: RSSFeed,
data: RSS,
) -> ChangedReport:
object.title = f"[rss] {data.title.strip()}"
object.description = data.description
object.title = f"[rss] {data.channel.title.content.strip()}"
object.description = data.channel.description.content
object.set_watch_in_order_auto(True)
object.add_single_uri(
self.__get_uri(object.primary_uri)
) # add url without prefix if required
for item in data.feed:
for item in data.channel.items:
element = self._add_episode(
collection=object,
uri=item.link,
uri=item.link.content,
)
if element:
orm.commit()

@ -8,6 +8,10 @@ from .custom_types import (
SafeStr,
)
from .db import (
db,
)
from .entities import (
CollectionStats,
CollectionUriMapping,
@ -20,7 +24,6 @@ from .entities import (
Tag,
Tagable,
TagKey,
db,
)
from .predefined_tags import (

@ -0,0 +1,4 @@
from pony import orm
db = orm.Database()

@ -1,6 +1,9 @@
from __future__ import annotations
from abc import abstractproperty
from collections.abc import (
Mapping,
)
from dataclasses import dataclass
from datetime import datetime, timedelta
import logging
@ -18,13 +21,13 @@ import requests
from pony import orm
from .custom_types import Query
from .db import db
from .thumbnails import THUMBNAIL_ALLOWED_TYPES, THUMBNAIL_HEADERS
from .extras import (
UriHolder,
)
from ..preferences.tag_protocol import TagableProto, TagProto
db = orm.Database()
T = TypeVar("T")
@ -321,8 +324,15 @@ class MediaCollectionLink(db.Entity):
)
@staticmethod
def sorted(iterable: Iterable[MediaCollectionLink]) -> List[MediaCollectionLink]:
return sorted(iterable, key=MediaCollectionLink.sort_key)
def sorted(
iterable: Iterable[MediaCollectionLink],
reverse: bool = False,
) -> List[MediaCollectionLink]:
return sorted(
iterable,
key=MediaCollectionLink.sort_key,
reverse=reverse,
)
## Media Elements
@ -578,6 +588,14 @@ class MediaElement(db.Entity, UriHolder, Tagable):
def info_link(self) -> str:
return f"/media/{self.id}"
@property
def json_summary(self) -> Mapping[str, str]:
return {
"id": self.id,
"title": self.title,
"uri": self.primary_uri,
}
### methods
def merge_to(self, other: MediaElement) -> None:
@ -968,6 +986,14 @@ class MediaCollection(db.Entity, UriHolder, Tagable):
def info_link(self) -> str:
return f"/collection/{self.id}"
@property
def json_summary(self) -> Mapping[str, str]:
return {
"id": self.id,
"title": self.title,
"uri": self.primary_uri,
}
### methods
def set_watch_in_order_auto(self, watch_in_order: bool) -> None:

@ -1,4 +1,4 @@
from .exids_base import (
from .exidkeys import (
ExIdKey_Columns,
ExIdKeyCompatible,
ExIdKeyData,

@ -42,7 +42,7 @@ class UriHolder:
@abstractmethod
def _clear_uri_set(self) -> None:
"""Sets the uri set of this object in a naive way."""
"""Clears the uri set of this object in a naive way."""
@abstractmethod
def _add_uri_to_set(self, uri: str) -> bool:

@ -13,5 +13,16 @@ def sql_cleanup(sql: str) -> str:
return SQL_WHITESPACE_PATTERN.sub(" ", sql).strip()
def sql_condition_join(
*sql_conditions: str | None,
joiner: str = "AND",
default: str = "TRUE",
) -> str:
return (
f" {joiner} ".join(f"({cond})" for cond in sql_conditions if cond is not None)
or default
)
def sql_where_in(id: str, id_list: Iterable[str | int]) -> str:
return f"{id} IN ({','.join(str(i) for i in id_list)})"

@ -6,7 +6,7 @@ pycountry>=20
python-magic>=0.4.25
pyyaml>=5.4.1
requests>=2.26
rss-parser>=0.2.3
rss-parser>=1.1
tmdbsimple>=2.9.1
yt-dlp>=2022.6.29
git+https://git.banananet.work/zocker/python-jsoncache#egg=jsoncache

@ -10,6 +10,21 @@
<body>
{{ macros.body_header() }}
<h1>{{ title }}</h1>
{% if show_filters | default(False) %}
<form class="form-single-button" method="get" action="{{ this_url() }}">
<label>
<span>Min Length:</span>
<input type="number" name="min_length" min="0" size="4" value="{{ min_length or '' }}"/>
<span>min</span>
</label>
<label>
<span>Max Length:</span>
<input type="number" name="max_length" min="0" size="4" value="{{ max_length or '' }}"/>
<span>min</span>
</label>
<button type="submit">Filter</button>
</form>
{% endif %}
{{ macros.media_thumbnail_list(
elements=media_list,
check_considered=check_considered|default(True),

Loading…
Cancel
Save