Skip to content

Commit

Permalink
Merge pull request #27 from LimeDrive:develop
Browse files Browse the repository at this point in the history
v1.1.0
  • Loading branch information
LimeDrive authored Sep 10, 2024
2 parents 9a68dbc + f356f79 commit 20d39b3
Show file tree
Hide file tree
Showing 14 changed files with 1,074 additions and 1,128 deletions.
8 changes: 6 additions & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
FROM python:3.11.9-slim-bullseye
FROM python:3.11-alpine

ENV PYTHONUNBUFFERED=1
ENV PIP_NO_CACHE_DIR=1

RUN pip install --no-cache-dir poetry

RUN pip install poetry
WORKDIR /app

COPY pyproject.toml poetry.lock ./
Expand Down
59 changes: 18 additions & 41 deletions deploy/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,14 +8,14 @@ services:
environment:
SECRET_API_KEY: 'superkey_that_can_be_changed'
TMDB_API_KEY: 'REDATED'
FLARESOLVERR_HOST: flaresolverr
JACKETT_HOST: jackett
JACKETT_API_KEY: 'REDACTED'
ZILEAN_URL: 'http://zilean:8181'
REDIS_HOST: redis
REDIS_PORT: 6379
LOG_LEVEL: DEBUG
LOG_REDACTED: False
USE_HTTPS: True
TZ: Europe/London
ports:
- 8080:8080
Expand All @@ -25,7 +25,7 @@ services:
- redis
- zilean
- jackett
- flaresolverr
- postgres
restart: unless-stopped

redis:
Expand All @@ -47,50 +47,30 @@ services:
deploy:
resources:
limits:
memory: 1g
memory: 3g
volumes:
- ./zilean_data:/app/data
environment:
- Zilean__ElasticSearch__Url=http://elasticsearch:9200
Zilean__Database__ConnectionString: "Host=postgres;Port=5432;Database=zilean;Username=zilean;Password=zilean"
Zilean__Dmm__ImportBatched: "true"
Zilean__Dmm__MaxFilteredResults: 200
Zilean__Dmm__MinimumScoreMatch: 0.85
depends_on:
elasticsearch:
condition: service_healthy
- postgres


elasticsearch:
image: elasticsearch:8.14.3@sha256:1ddbb1ae0754278f3ab53edc24fcc5c790ebc2422cc47abea760b24abee2d88a
container_name: elasticsearch
postgres:
image: postgres:16.3-alpine3.20
container_name: postgres
restart: unless-stopped
environment:
ES_SETTING_DISCOVERY_TYPE: single-node
ES_SETTING_XPACK_SECURITY_ENABLED: false
ES_SETTING_BOOTSTRAP_MEMORY__LOCK: true
ES_JAVA_OPTS: "-Xms512m -Xmx512m"
PGDATA: /var/lib/postgresql/data/pgdata
POSTGRES_USER: zilean
POSTGRES_PASSWORD: zilean
POSTGRES_DB: zilean
expose:
- 9200
deploy:
resources:
limits:
memory: 2g
healthcheck:
test: ["CMD-SHELL", "curl -s http://localhost:9200 | grep -q 'You Know, for Search'"]
interval: 10s
timeout: 10s
retries: 5
- 5432
volumes:
- elastic_data:/usr/share/elasticsearch/data:rw

flaresolverr:
image: ghcr.io/flaresolverr/flaresolverr:latest
container_name: flaresolverr
environment:
- LOG_LEVEL=${LOG_LEVEL:-info}
- LOG_HTML=${LOG_HTML:-false}
- CAPTCHA_SOLVER=${CAPTCHA_SOLVER:-none}
- TZ=Europe/London
expose:
- 8191
restart: unless-stopped
- ./zilean_postgresql:/var/lib/postgresql/data/pgdata

jackett:
image: lscr.io/linuxserver/jackett:latest
Expand All @@ -104,7 +84,4 @@ services:
- ./blackhole:/downloads
ports:
- 9117:9117
restart: unless-stopped

volumes:
elastic_data:
restart: unless-stopped
1,885 changes: 932 additions & 953 deletions poetry.lock

Large diffs are not rendered by default.

5 changes: 3 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "stream-fusion"
version = "1.0.0"
version = "1.1.0"
description = "StreamFusion is an advanced plugin for Stremio that significantly enhances its streaming capabilities with debrid service."
authors = ["LimeDrive <[email protected]>"]
readme = "README.md"
Expand All @@ -15,7 +15,6 @@ bencode-py = "^4.0.0"
jinja2 = "^3.1.4"
aiocron = "^1.8"
python-dotenv = "^1.0.1"
rank-torrent-name = "^0.2.21"
cachetools = "^5.3.3"
redis = "^5.0.7"
aiohttp = "^3.9.5"
Expand All @@ -35,6 +34,8 @@ timeout-decorator = "^0.5.0"
tenacity = "^8.5.0"
aiohttp-socks = "^0.8.4"
tmdbv3api = "^1.9.0"
rank-torrent-name = "^1.0.4"
orjson = "^3.10.7"


[build-system]
Expand Down
4 changes: 2 additions & 2 deletions stream_fusion/utils/cache/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,8 +103,8 @@ def cache_public(torrents: List[TorrentItem], media):
cache_item["hash"] = torrent.info_hash
cache_item["indexer"] = torrent.indexer
cache_item["quality"] = (
torrent.parsed_data.resolution[0]
if len(torrent.parsed_data.resolution) > 0
torrent.parsed_data.resolution
if torrent.parsed_data.resolution
else "Unknown"
)
cache_item["qualitySpec"] = ";".join(torrent.parsed_data.quality)
Expand Down
14 changes: 7 additions & 7 deletions stream_fusion/utils/filter_results.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,9 @@

def sort_quality(item: TorrentItem):
logger.debug(f"Evaluating quality for item: {item.raw_title}")
if len(item.parsed_data.resolution) == 0:
if not item.parsed_data.resolution:
return float("inf"), True
resolution = item.parsed_data.resolution[0]
resolution = item.parsed_data.resolution
priority = quality_order.get(resolution, float("inf"))
return priority, item.parsed_data.resolution is None

Expand Down Expand Up @@ -85,19 +85,19 @@ def filter_out_non_matching_series(items, season, episode):

for item in items:
logger.debug(f"Checking item: {item.raw_title}")
if len(item.parsed_data.season) == 0 and len(item.parsed_data.episode) == 0:
if len(item.parsed_data.seasons) == 0 and len(item.parsed_data.episodes) == 0:
logger.debug("Item with no season and episode, skipped")
continue
if (
len(item.parsed_data.episode) == 0
and numeric_season in item.parsed_data.season
len(item.parsed_data.episodes) == 0
and numeric_season in item.parsed_data.seasons
):
logger.debug("Season match found, episode not specified")
filtered_items.append(item)
continue
if (
numeric_season in item.parsed_data.season
and numeric_episode in item.parsed_data.episode
numeric_season in item.parsed_data.seasons
and numeric_episode in item.parsed_data.episodes
):
logger.debug("Exact season and episode match found")
filtered_items.append(item)
Expand Down
2 changes: 1 addition & 1 deletion stream_fusion/utils/general.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ def season_episode_in_filename(filename, season, episode):

parsed_name = parse(filename)

return season in parsed_name.season and episode in parsed_name.episode
return season in parsed_name.seasons and episode in parsed_name.episodes


def get_info_hash_from_magnet(magnet: str):
Expand Down
3 changes: 2 additions & 1 deletion stream_fusion/utils/torrent/torrent_item.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from RTN import parse
from RTN.models import ParsedData
from urllib.parse import quote

from stream_fusion.utils.models.media import Media
Expand Down Expand Up @@ -30,7 +31,7 @@ def __init__(self, raw_title, size, magnet, info_hash, link, seeders, languages,

self.availability = False # If it's instantly available on the debrid service

self.parsed_data = parsed_data # Ranked result
self.parsed_data: ParsedData = parsed_data # Ranked result

def to_debrid_stream_query(self, media: Media) -> dict:
return {
Expand Down
4 changes: 2 additions & 2 deletions stream_fusion/utils/torrent/torrent_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,7 @@ def __process_torrent(self, result: TorrentItem, torrent_file):
result.files = metadata["info"]["files"]

if result.type == "series":
file_details = self.__find_episode_file(result.files, result.parsed_data.season, result.parsed_data.episode)
file_details = self.__find_episode_file(result.files, result.parsed_data.seasons, result.parsed_data.episodes)

if file_details is not None:
self.logger.debug("File details")
Expand Down Expand Up @@ -244,7 +244,7 @@ def __find_episode_file(self, file_structure, season, episode):

parsed_file = parse(file)

if season[0] in parsed_file.season and episode[0] in parsed_file.episode:
if season[0] in parsed_file.seasons and episode[0] in parsed_file.episodes:
episode_files.append({
"file_index": file_index,
"title": file,
Expand Down
Loading

0 comments on commit 20d39b3

Please sign in to comment.