Use element_lookup_cache

- caches by what elements other elements are blocked
- resolves lookups for ordered playlists
master
Felix Stupp 2 years ago committed by Felix Stupp
parent 05fcd9fb6a
commit 7ea87a1e0d
Signed by: zocker
GPG Key ID: 93E1BD26F6B02FB7

@ -45,6 +45,7 @@ from entertainment_decider.models import (
MediaCollectionLink,
MediaElement,
generate_preference_list,
get_all_considered,
setup_custom_tables,
update_element_lookup_cache,
)
@ -273,17 +274,16 @@ def dashboard():
)[:pinned_limit]
already_listed.update(link.element for link in links_from_pinned_collections)
# for media
media_list: Iterable[MediaElement] = orm.select(m for m in MediaElement if not (m.ignored or m.watched)).order_by(orm.desc(MediaElement.release_date), MediaElement.id)
def get_considerable():
for element in media_list:
if element not in episodes_from_pinned_collections and element.can_considered:
yield element
media_list: Iterable[MediaElement] = get_all_considered(
order_by="elem.release_date DESC, elem.id",
)
limited_media = common.limit_iter(media_list, media_limit)
# render
return render_template(
"dashboard.htm",
began_videos=began_videos,
links_from_pinned_collections = links_from_pinned_collections,
media_list = common.limit_iter(get_considerable(), media_limit),
media_list=limited_media,
)
@ -344,28 +344,25 @@ def show_collection_episodes(collection_id):
@flask_app.route("/media")
def list_media():
media_list: Iterable[MediaElement] = orm.select(m for m in MediaElement if not (m.ignored or m.watched)).order_by(orm.desc(MediaElement.release_date), MediaElement.id)
def get_considerable():
for element in media_list:
if element.can_considered:
yield element
media_list: Iterable[MediaElement] = get_all_considered(
"elem.release_date DESC, elem.id"
)
return render_template(
"media_list.htm",
media_list=list(itertools.islice(get_considerable(), 100))
media_list=common.limit_iter(media_list, 100),
)
@flask_app.route("/media/short")
@flask_app.route("/media/short/<int:seconds>")
def list_short_media(seconds: int = 10 * 60):
media_list: Iterable[MediaElement] = orm.select(m for m in MediaElement).order_by(orm.desc(MediaElement.release_date), MediaElement.id)
def get_considerable():
for element in media_list:
if element.left_length <= seconds and element.can_considered:
yield element
media_list: Iterable[MediaElement] = get_all_considered(
filter_by=f"(length - progress) <= {seconds}",
order_by="elem.release_date DESC, elem.id",
)
return render_template(
"media_list.htm",
media_list=list(itertools.islice(get_considerable(), 100))
media_list=list(itertools.islice(media_list, 100)),
)
@flask_app.route("/media/unsorted")

@ -488,6 +488,9 @@ class MediaElement(db.Entity, Tagable):
@property
def can_considered(self) -> bool:
DIRECT_SQL = True
if DIRECT_SQL:
return is_considered(self.id)
if self.skip_over:
return False
if self.release_date > datetime.now():
@ -735,6 +738,101 @@ def sql_cleanup(sql: str) -> str:
return SQL_WHITESPACE_PATTERN.sub(" ", sql).strip()
def sql_where_in(id: str, id_list: Iterable[str | int]) -> str:
return f"{id} IN ({','.join(str(i) for i in id_list)})"
# TODO reducing cache table to only contain videos not watched/ignored (not huge speedup)
# TODO add bool for (not)? blocking to direct dependencies (similar to above) (not huge speedup)
def sql_is_considered(elem_id: str, use_cache: bool = True) -> str:
# NOT EXISTS seems worlds better then making a OUTER JOIN
return sql_cleanup(
f"""
NOT EXISTS (
"""
+ (
f"""
SELECT c.element2
FROM element_lookup_cache c
INNER JOIN mediaelement m2 ON c.element1 = m2.id
WHERE c.element2 = {elem_id} AND NOT (m2.watched OR m2.ignored)
"""
if use_cache
else f"""
SELECT *
FROM mediaelement look_elem
INNER JOIN mediacollectionlink link ON link.element = look_elem.id
INNER JOIN mediacollection coll ON coll.id = link.collection
INNER JOIN mediacollectionlink coll_link ON coll_link.collection = coll.id
INNER JOIN mediaelement coll_elem ON coll_elem.id = coll_link.element
WHERE look_elem.id = {elem_id}
AND coll.watch_in_order
AND NOT (coll_elem.watched OR coll_elem.ignored)
AND (coll_link.season, coll_link.episode, coll_elem.release_date, coll_elem.id) < (link.season, link.episode, look_elem.release_date, look_elem.id)
"""
)
+ f"""
) AND NOT EXISTS (
SELECT *
FROM mediaelement_mediaelement m_m
INNER JOIN mediaelement m ON m_m.mediaelement = m.id
WHERE m_m.mediaelement_2 = {elem_id} AND NOT (m.watched OR m.ignored)
)
"""
)
def is_considered(elem_id: int) -> bool:
return db.exists(
sql_cleanup(
f"""
SELECT elem.id
FROM mediaelement elem
WHERE elem.id = {elem_id}
AND NOT (elem.watched OR elem.ignored)
AND elem.release_date <= NOW()
AND ({sql_is_considered('elem.id')})
"""
)
)
def are_multiple_considered(elem_ids: Iterable[int]) -> Mapping[int, bool]:
res = {
r[0]
for r in db.execute(
sql_cleanup(
f"""
SELECT elem.id
FROM mediaelement elem
WHERE NOT (elem.watched OR elem.ignored)
AND elem.release_date <= NOW()
AND ({sql_is_considered("elem.id")})
"""
)
)
}
return {elem_id: elem_id in res for elem_id in elem_ids}
def get_all_considered(
order_by: str = "NULL", filter_by: str = "true"
) -> List[MediaElement]:
return MediaElement.select_by_sql(
sql_cleanup(
f"""
SELECT elem.*
FROM mediaelement elem
WHERE NOT (elem.watched OR elem.ignored)
AND elem.release_date <= NOW()
AND {filter_by}
AND ({sql_is_considered("elem.id")})
ORDER BY {order_by}
"""
)
)
def update_element_lookup_cache(collection_ids: List[int] = []):
logging.info(
f"Rebuild element_lookup_cache for {len(collection_ids) if collection_ids else 'all'} collections …"

Loading…
Cancel
Save