refactor: Implement cdndania downloader with asyncio, type hints, and subprocess log streaming

This commit is contained in:
2025-12-31 16:01:15 +09:00
parent 88c342b3b6
commit 6a1b30510c
6 changed files with 562 additions and 353 deletions

View File

@@ -64,4 +64,4 @@
1. **FlaskFarm 웹 > 플러그인 > Anime Downloader > 설정**으로 이동합니다. 1. **FlaskFarm 웹 > 플러그인 > Anime Downloader > 설정**으로 이동합니다.
2. **Proxy URL**: 필요한 경우 `http://IP:PORT` 형식으로 입력 (기본값: 공란). 2. **Proxy URL**: 필요한 경우 `http://IP:PORT` 형식으로 입력 (기본값: 공란).
3. **저장 경로**: 다운로드된 파일이 저장될 경로 설정. 3. **저장 경로**: 다운로드된 파일이 저장될 경로 설정.
4. **다운로드 방법**: `ffmpeg` (기본) 추천. 4. **다운로드 방법**: `yt-dlp` (기본) 추천.

View File

@@ -4,6 +4,8 @@ cdndania.com CDN 전용 다운로더 (curl_cffi 사용)
- CDN 보안 검증 우회 - CDN 보안 검증 우회
- subprocess로 분리 실행하여 Flask 블로킹 방지 - subprocess로 분리 실행하여 Flask 블로킹 방지
""" """
from __future__ import annotations
import os import os
import sys import sys
import time import time
@@ -12,6 +14,7 @@ import logging
import subprocess import subprocess
import tempfile import tempfile
import threading import threading
from typing import Callable, Optional, Tuple, Any, IO
from urllib.parse import urljoin, urlparse from urllib.parse import urljoin, urlparse
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -20,24 +23,33 @@ logger = logging.getLogger(__name__)
class CdndaniaDownloader: class CdndaniaDownloader:
"""cdndania.com 전용 다운로더 (세션 기반 보안 우회)""" """cdndania.com 전용 다운로더 (세션 기반 보안 우회)"""
def __init__(self, iframe_src, output_path, referer_url=None, callback=None, proxy=None, threads=16, on_download_finished=None): def __init__(
self.iframe_src = iframe_src # cdndania.com 플레이어 iframe URL self,
self.output_path = output_path iframe_src: str,
self.referer_url = referer_url or "https://ani.ohli24.com/" output_path: str,
self.callback = callback referer_url: Optional[str] = None,
self.proxy = proxy callback: Optional[Callable[[int, int, int, str, str], None]] = None,
self.threads = threads proxy: Optional[str] = None,
self.on_download_finished = on_download_finished threads: int = 16,
self.cancelled = False on_download_finished: Optional[Callable[[], None]] = None
self.released = False # 조기 반환 여부 ) -> None:
self.iframe_src: str = iframe_src # cdndania.com 플레이어 iframe URL
self.output_path: str = output_path
self.referer_url: str = referer_url or "https://ani.ohli24.com/"
self.callback: Optional[Callable[[int, int, int, str, str], None]] = callback
self.proxy: Optional[str] = proxy
self.threads: int = threads
self.on_download_finished: Optional[Callable[[], None]] = on_download_finished
self.cancelled: bool = False
self.released: bool = False # 조기 반환 여부
# 진행 상황 추적 # 진행 상황 추적
self.start_time = None self.start_time: Optional[float] = None
self.total_bytes = 0 self.total_bytes: int = 0
self.current_speed = 0 self.current_speed: float = 0
self.process = None self.process: Optional[subprocess.Popen[str]] = None
def download(self): def download(self) -> Tuple[bool, str]:
"""subprocess로 다운로드 실행 (Flask 블로킹 방지)""" """subprocess로 다운로드 실행 (Flask 블로킹 방지)"""
try: try:
# 현재 파일 경로 (subprocess에서 실행할 스크립트) # 현재 파일 경로 (subprocess에서 실행할 스크립트)
@@ -71,6 +83,20 @@ class CdndaniaDownloader:
text=True text=True
) )
# Subprocess 로그 실시간 출력용 스레드
def log_reader(pipe: IO[str]) -> None:
try:
for line in iter(pipe.readline, ''):
if line:
logger.info(f"[Worker] {line.strip()}")
else:
break
except ValueError:
pass
log_thread = threading.Thread(target=log_reader, args=(self.process.stderr,), daemon=True)
log_thread.start()
self.start_time = time.time() self.start_time = time.time()
last_callback_time = 0 last_callback_time = 0
@@ -148,24 +174,59 @@ class CdndaniaDownloader:
logger.error(traceback.format_exc()) logger.error(traceback.format_exc())
return False, str(e) return False, str(e)
def cancel(self): def cancel(self) -> None:
"""다운로드 취소""" """다운로드 취소"""
self.cancelled = True self.cancelled = True
if self.process: if self.process:
self.process.terminate() self.process.terminate()
def _download_worker(iframe_src, output_path, referer_url, proxy, progress_path, threads=16): def _download_worker(
"""실제 다운로드 작업 (subprocess에서 실행)""" iframe_src: str,
output_path: str,
referer_url: Optional[str],
proxy: Optional[str],
progress_path: str,
threads: int = 16
) -> None:
"""실제 다운로드 작업 (subprocess에서 실행) - AsyncIO Wrapper"""
import sys
import asyncio
# Windows/Mac 등에서 loop 정책 설정이 필요할 수 있음
if sys.platform == 'win32':
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
try:
asyncio.run(_download_worker_async(iframe_src, output_path, referer_url, proxy, progress_path, threads))
except KeyboardInterrupt:
pass
except Exception as e:
import traceback
import logging
logging.getLogger(__name__).error(f"AsyncIO Loop Error: {e}")
traceback.print_exc()
sys.exit(1)
async def _download_worker_async(
iframe_src: str,
output_path: str,
referer_url: Optional[str],
proxy: Optional[str],
progress_path: str,
threads: int = 16
) -> None:
"""실제 다운로드 작업 (AsyncIO)"""
import sys import sys
import os import os
import time import time
import json import json
import tempfile import tempfile
from urllib.parse import urljoin import logging
from urllib.parse import urljoin, urlparse
import asyncio
# 로깅 설정 (subprocess용) # 로깅 설정 (subprocess용)
import logging
logging.basicConfig( logging.basicConfig(
level=logging.INFO, level=logging.INFO,
format='[%(asctime)s|%(levelname)s|%(name)s] %(message)s', format='[%(asctime)s|%(levelname)s|%(name)s] %(message)s',
@@ -173,10 +234,26 @@ def _download_worker(iframe_src, output_path, referer_url, proxy, progress_path,
) )
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
def update_progress(percent, current, total, speed, elapsed, status=None): # curl_cffi 임포트
"""진행 상황을 파일에 저장"""
try: try:
data = { from curl_cffi.requests import AsyncSession
except ImportError:
import subprocess
subprocess.run([sys.executable, "-m", "pip", "install", "curl_cffi", "-q"],
timeout=120, check=True)
from curl_cffi.requests import AsyncSession
# Progress Update Helper
def update_progress(
percent: int,
current: int,
total: int,
speed: str,
elapsed: str,
status: Optional[str] = None
) -> None:
try:
data: dict[str, Any] = {
'percent': percent, 'percent': percent,
'current': current, 'current': current,
'total': total, 'total': total,
@@ -191,7 +268,7 @@ def _download_worker(iframe_src, output_path, referer_url, proxy, progress_path,
except: except:
pass pass
def format_speed(bytes_per_sec): def format_speed(bytes_per_sec: float) -> str:
if bytes_per_sec < 1024: if bytes_per_sec < 1024:
return f"{bytes_per_sec:.0f} B/s" return f"{bytes_per_sec:.0f} B/s"
elif bytes_per_sec < 1024 * 1024: elif bytes_per_sec < 1024 * 1024:
@@ -199,7 +276,7 @@ def _download_worker(iframe_src, output_path, referer_url, proxy, progress_path,
else: else:
return f"{bytes_per_sec / (1024 * 1024):.2f} MB/s" return f"{bytes_per_sec / (1024 * 1024):.2f} MB/s"
def format_time(seconds): def format_time(seconds: float) -> str:
seconds = int(seconds) seconds = int(seconds)
if seconds < 60: if seconds < 60:
return f"{seconds}" return f"{seconds}"
@@ -209,21 +286,14 @@ def _download_worker(iframe_src, output_path, referer_url, proxy, progress_path,
return f"{seconds // 3600}시간 {(seconds % 3600) // 60}" return f"{seconds // 3600}시간 {(seconds % 3600) // 60}"
try: try:
# curl_cffi 임포트
try:
from curl_cffi import requests as cffi_requests
except ImportError:
subprocess.run([sys.executable, "-m", "pip", "install", "curl_cffi", "-q"],
timeout=120, check=True)
from curl_cffi import requests as cffi_requests
# 세션 생성 (Chrome 120 TLS 핑거프린트 사용)
session = cffi_requests.Session(impersonate="chrome120")
proxies = None proxies = None
if proxy: if proxy:
proxies = {"http": proxy, "https": proxy} proxies = {"http": proxy, "https": proxy}
# --- Async Session Context ---
# impersonate="chrome110"으로 변경 (TLS Fingerprint 변경, Safari 이슈 회피)
async with AsyncSession(impersonate="chrome110", proxies=proxies) as session:
# 1. iframe URL에서 video_id 추출 # 1. iframe URL에서 video_id 추출
video_id = None video_id = None
if "/video/" in iframe_src: if "/video/" in iframe_src:
@@ -232,85 +302,77 @@ def _download_worker(iframe_src, output_path, referer_url, proxy, progress_path,
video_id = iframe_src.split("/v/")[1].split("?")[0].split("&")[0] video_id = iframe_src.split("/v/")[1].split("?")[0].split("&")[0]
if not video_id: if not video_id:
print(f"Could not extract video ID from: {iframe_src}", file=sys.stderr) log.error(f"Could not extract video ID from: {iframe_src}")
sys.exit(1) sys.exit(1)
log.info(f"Extracted video_id: {video_id}") log.info(f"Extracted video_id: {video_id}")
# 2. 플레이어 페이지 먼저 방문 (세션/쿠키 획득) # 2. 플레이어 페이지 먼저 방문 (세션/쿠키 획득)
headers = { headers = {
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36", # "user-agent": "...", # impersonate가 알아서 설정함
"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8", "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8",
"accept-language": "ko-KR,ko;q=0.9,en-US;q=0.8,en;q=0.7",
"referer": referer_url, "referer": referer_url,
"sec-ch-ua": '"Not_A Brand";v="8", "Chromium";v="120", "Google Chrome";v="120"', # "sec-ch-ua": ..., # 제거
"sec-ch-ua-mobile": "?0", # "sec-ch-ua-mobile": "?0",
"sec-ch-ua-platform": '"macOS"', # "sec-ch-ua-platform": '"macOS"',
"sec-fetch-dest": "iframe", "sec-fetch-dest": "iframe",
"sec-fetch-mode": "navigate", "sec-fetch-mode": "navigate",
"sec-fetch-site": "cross-site", "sec-fetch-site": "cross-site",
} }
log.info(f"Visiting iframe page: {iframe_src}") log.info(f"Visiting iframe page: {iframe_src}")
resp = session.get(iframe_src, headers=headers, proxies=proxies, timeout=30) resp = await session.get(iframe_src, headers=headers)
log.info(f"Iframe page status: {resp.status_code}") log.info(f"Iframe page status: {resp.status_code}")
parsed_iframe = urlparse(iframe_src)
cdn_base_url = f"{parsed_iframe.scheme}://{parsed_iframe.netloc}"
# 3. getVideo API 호출 # 3. getVideo API 호출
api_url = f"https://cdndania.com/player/index.php?data={video_id}&do=getVideo" api_url = f"{cdn_base_url}/player/index.php?data={video_id}&do=getVideo"
api_headers = { api_headers = {
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36", # "user-agent": ...,
"x-requested-with": "XMLHttpRequest", "x-requested-with": "XMLHttpRequest",
"content-type": "application/x-www-form-urlencoded; charset=UTF-8", "content-type": "application/x-www-form-urlencoded; charset=UTF-8",
"referer": iframe_src, "referer": iframe_src,
"origin": "https://cdndania.com", "origin": cdn_base_url,
"accept": "application/json, text/javascript, */*; q=0.01", "accept": "application/json, text/javascript, */*; q=0.01",
} }
post_data = { post_data = {"hash": video_id, "r": referer_url}
"hash": video_id,
"r": referer_url
}
log.info(f"Calling video API: {api_url}") log.info(f"Calling video API: {api_url}")
api_resp = session.post(api_url, headers=api_headers, data=post_data, api_resp = await session.post(api_url, headers=api_headers, data=post_data)
proxies=proxies, timeout=30)
if api_resp.status_code != 200: if api_resp.status_code != 200:
print(f"API request failed: HTTP {api_resp.status_code}", file=sys.stderr) log.error(f"API request failed: HTTP {api_resp.status_code}")
sys.exit(1) sys.exit(1)
try: try:
data = api_resp.json() data = api_resp.json()
except: except:
print(f"Failed to parse API response: {api_resp.text[:200]}", file=sys.stderr) log.error("Failed to parse API response")
sys.exit(1) sys.exit(1)
video_url = data.get("videoSource") or data.get("securedLink") video_url = data.get("videoSource") or data.get("securedLink")
if not video_url: if not video_url:
print(f"No video URL in API response: {data}", file=sys.stderr) log.error(f"No video URL in API response: {data}")
sys.exit(1) sys.exit(1)
log.info(f"Got video URL: {video_url}") log.info(f"Got video URL: {video_url}")
# 4. m3u8 다운로드 (동일 세션 유지!) # 4. m3u8 다운로드
m3u8_headers = { m3u8_headers = {
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36", # "user-agent": ...,
"referer": iframe_src, "referer": iframe_src,
"origin": "https://cdndania.com", "origin": cdn_base_url,
"accept": "*/*", "accept": "*/*",
} }
log.info(f"Fetching m3u8: {video_url}") log.info(f"Fetching m3u8: {video_url}")
m3u8_resp = session.get(video_url, headers=m3u8_headers, proxies=proxies, timeout=30) m3u8_resp = await session.get(video_url, headers=m3u8_headers)
if m3u8_resp.status_code != 200:
print(f"m3u8 fetch failed: HTTP {m3u8_resp.status_code}", file=sys.stderr)
sys.exit(1)
m3u8_content = m3u8_resp.text m3u8_content = m3u8_resp.text
# Master playlist 확인 # Master playlist 확인 및 미디어 플레이리스트 추적
if "#EXT-X-STREAM-INF" in m3u8_content: if "#EXT-X-STREAM-INF" in m3u8_content:
# 가장 높은 품질의 미디어 플레이리스트 URL 추출
base = video_url.rsplit('/', 1)[0] + '/' base = video_url.rsplit('/', 1)[0] + '/'
last_url = None last_url = None
for line in m3u8_content.strip().split('\n'): for line in m3u8_content.strip().split('\n'):
@@ -323,11 +385,11 @@ def _download_worker(iframe_src, output_path, referer_url, proxy, progress_path,
if last_url: if last_url:
log.info(f"Following media playlist: {last_url}") log.info(f"Following media playlist: {last_url}")
m3u8_resp = session.get(last_url, headers=m3u8_headers, proxies=proxies, timeout=30) m3u8_resp = await session.get(last_url, headers=m3u8_headers)
m3u8_content = m3u8_resp.text m3u8_content = m3u8_resp.text
video_url = last_url video_url = last_url
# 5. 세그먼트 URL 파싱 # 5. 세그먼트 파싱
base = video_url.rsplit('/', 1)[0] + '/' base = video_url.rsplit('/', 1)[0] + '/'
segments = [] segments = []
for line in m3u8_content.strip().split('\n'): for line in m3u8_content.strip().split('\n'):
@@ -339,153 +401,135 @@ def _download_worker(iframe_src, output_path, referer_url, proxy, progress_path,
segments.append(urljoin(base, line)) segments.append(urljoin(base, line))
if not segments: if not segments:
print("No segments found in m3u8", file=sys.stderr) log.error("No segments found")
sys.exit(1) sys.exit(1)
log.info(f"Found {len(segments)} segments") log.info(f"Found {len(segments)} segments. Starting AsyncIO download...")
# 6. 세그먼트 다운로드 (병렬 처리) # 6. Async Segment Download
start_time = time.time() # 쿠키 유지: session.cookies는 이미 이전 요청들로 인해 채워져 있음 (자동 관리)
total_bytes = 0
current_speed = 0
# 진행 상황 공유 변수 (Thread-safe하게 관리 필요) # 출력 디렉토리
completed_segments = 0
lock = threading.Lock()
# 출력 디렉토리 미리 생성 (임시 폴더 생성을 위해)
output_dir = os.path.dirname(output_path) output_dir = os.path.dirname(output_path)
if output_dir and not os.path.exists(output_dir): if output_dir and not os.path.exists(output_dir):
os.makedirs(output_dir) os.makedirs(output_dir)
with tempfile.TemporaryDirectory(dir=output_dir) as temp_dir: with tempfile.TemporaryDirectory(dir=output_dir) as temp_dir:
segment_files = [None] * len(segments) # 순서 보장을 위해 미리 할당
total_segments = len(segments)
log.info(f"Temp directory: {temp_dir}") log.info(f"Temp directory: {temp_dir}")
# 다운로드 worker
log.info(f"Starting optimized download: Binary Merge Mode (Threads: {threads})")
# 세그먼트 다운로드 함수 start_time = time.time()
def download_segment(index, url): total_segments = len(segments)
completed_segments = 0
total_bytes = 0
segment_files = [None] * total_segments
# Semaphore로 동시성 제어 - 설정값 사용 (UI에서 1~16 선택 가능)
actual_threads = threads # 설정에서 전달된 값 사용
log.info(f"Concurrency set to {actual_threads} (from settings)")
sem = asyncio.Semaphore(actual_threads)
async def download_one(idx: int, url: str) -> None:
nonlocal completed_segments, total_bytes nonlocal completed_segments, total_bytes
try: async with sem:
# 재시도 로직 outfile = os.path.join(temp_dir, f"segment_{idx:05d}.ts")
for retry in range(3): for retry in range(3):
try: try:
seg_resp = session.get(url, headers=m3u8_headers, proxies=proxies, timeout=30) # 스트림 방식으로 다운로드하면 메모리 절약 가능하지만, TS는 작으므로 그냥 read
if seg_resp.status_code == 200: # log.debug(f"Req Seg {idx}...")
content = seg_resp.content # 타임아웃 강제 적용 (asyncio.wait_for) - Hang 방지
if len(content) < 100: resp = await asyncio.wait_for(
if retry == 2: session.get(url, headers=m3u8_headers),
raise Exception(f"Segment data too small ({len(content)}B)") timeout=20
time.sleep(1) )
continue
# 파일 저장 if resp.status_code == 200:
filename = f"segment_{index:05d}.ts" content = resp.content
filepath = os.path.join(temp_dir, filename) if len(content) < 500:
with open(filepath, 'wb') as f: # HTML/에러 체크
head = content[:100].decode('utf-8', errors='ignore').lower()
if "<html" in head or "<!doctype" in head:
if retry == 2:
log.warning(f"Seg {idx} is HTML garbage. Retrying...")
raise Exception("HTML content received")
# Write File (Sync write is fine for tmpfs/SSD usually, otherwise aiofiles)
with open(outfile, 'wb') as f:
f.write(content) f.write(content)
# 결과 기록 segment_files[idx] = f"segment_{idx:05d}.ts"
with lock:
segment_files[index] = filename
total_bytes += len(content)
completed_segments += 1 completed_segments += 1
total_bytes += len(content)
# 진행률 업데이트 (너무 자주는 말고 10개마다) # Log Progress
if completed_segments % 10 == 0 or completed_segments == total_segments: if completed_segments == 1 or completed_segments % 10 == 0 or completed_segments == total_segments:
pct = int((completed_segments / total_segments) * 100) pct = int((completed_segments / total_segments) * 100)
elapsed = time.time() - start_time elapsed = time.time() - start_time
speed = total_bytes / elapsed if elapsed > 0 else 0 speed = total_bytes / elapsed if elapsed > 0 else 0
log.info(f"Progress: {pct}% ({completed_segments}/{total_segments}) Speed: {format_speed(speed)}") log.info(f"Progress: {pct}% ({completed_segments}/{total_segments}) Speed: {format_speed(speed)}")
update_progress(pct, completed_segments, total_segments, format_speed(speed), format_time(elapsed)) update_progress(pct, completed_segments, total_segments, format_speed(speed), format_time(elapsed))
return True return
except asyncio.TimeoutError:
if retry == 2:
log.error(f"Seg {idx} TIMEOUT.")
# else:
# log.debug(f"Seg {idx} timeout, retrying...")
pass
except Exception as e: except Exception as e:
if retry == 2: if retry == 2:
log.error(f"Seg {index} failed after retries: {e}") log.error(f"Seg {idx} failed: {e}")
raise e else:
time.sleep(0.5) log.warning(f"Seg {idx} error: {e}. Retrying in 5s...")
except Exception as e: await asyncio.sleep(5) # Backoff increased to 5s
return False
# 스레드 풀 실행 # Create Tasks
from concurrent.futures import ThreadPoolExecutor tasks = [download_one(i, url) for i, url in enumerate(segments)]
await asyncio.gather(*tasks)
# 설정된 스레드 수로 병렬 다운로드 # Check Results
with ThreadPoolExecutor(max_workers=threads) as executor:
futures = []
for i, seg_url in enumerate(segments):
futures.append(executor.submit(download_segment, i, seg_url))
# 모든 작업 완료 대기
for future in futures:
try:
future.result()
except Exception as e:
log.error(f"Thread error: {e}")
print(f"Download thread failed: {e}", file=sys.stderr)
sys.exit(1)
# 다운로드 완료 확인
if completed_segments != total_segments: if completed_segments != total_segments:
print(f"Incomplete download: {completed_segments}/{total_segments}", file=sys.stderr) log.error(f"Download incomplete: {completed_segments}/{total_segments}")
sys.exit(1) sys.exit(1)
log.info("All segments downloaded successfully.") log.info("All segments downloaded. Merging...")
# 조기 반환 신호 (merging 상태 기록)
update_progress(100, total_segments, total_segments, "", "", status="merging") update_progress(100, total_segments, total_segments, "", "", status="merging")
# 7. ffmpeg로 합치기 # Merge
log.info("Concatenating segments with ffmpeg...") concat_list_path = os.path.join(temp_dir, "concat.txt")
concat_file = os.path.join(temp_dir, "concat.txt") with open(concat_list_path, 'w') as f:
with open(concat_file, 'w') as f: for sf in segment_files:
for seg_file in segment_files: if sf:
if seg_file: f.write(f"file '{sf}'\n")
f.write(f"file '{seg_file}'\n")
# 출력 디렉토리 생성
output_dir = os.path.dirname(output_path)
if output_dir and not os.path.exists(output_dir):
os.makedirs(output_dir)
cmd = [ cmd = [
'ffmpeg', '-y', 'ffmpeg', '-y', '-f', 'concat', '-safe', '0',
'-f', 'concat', '-i', 'concat.txt', '-c', 'copy', os.path.abspath(output_path)
'-safe', '0',
'-i', 'concat.txt',
'-c', 'copy',
os.path.abspath(output_path)
] ]
result = subprocess.run(cmd, capture_output=True, text=True, # ffmpeg는 sync subprocess로 실행 (block이어도 상관없음, 마지막 단계라)
timeout=600, cwd=temp_dir) # 하지만 asyncio 환경이므로 run_in_executor 혹은 create_subprocess_exec 권장
# 여기선 간단히 create_subprocess_exec 사용
proc = await asyncio.create_subprocess_exec(
*cmd,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE,
cwd=temp_dir
)
stdout, stderr = await proc.communicate()
if result.returncode != 0: if proc.returncode != 0:
print(f"FFmpeg concat failed: {result.stderr[:200]}", file=sys.stderr) log.error(f"FFmpeg failed: {stderr.decode()}")
sys.exit(1) sys.exit(1)
# 출력 파일 확인 if os.path.exists(output_path) and os.path.getsize(output_path) > 10000:
if not os.path.exists(output_path): log.info(f"Download Success: {output_path}")
print("Output file not created", file=sys.stderr) else:
log.error("Output file invalid")
sys.exit(1) sys.exit(1)
file_size = os.path.getsize(output_path)
if file_size < 10000:
print(f"Output file too small: {file_size}B", file=sys.stderr)
sys.exit(1)
log.info(f"Download completed: {output_path} ({file_size / 1024 / 1024:.1f}MB)")
update_progress(100, total_segments, total_segments, "", format_time(time.time() - start_time))
sys.exit(0)
except Exception as e: except Exception as e:
log.error(f"Critical Error: {e}")
import traceback import traceback
print(f"Error: {e}", file=sys.stderr) log.error(traceback.format_exc())
traceback.print_exc(file=sys.stderr)
sys.exit(1) sys.exit(1)
@@ -510,7 +554,7 @@ if __name__ == "__main__":
referer = sys.argv[3] if len(sys.argv) > 3 else None referer = sys.argv[3] if len(sys.argv) > 3 else None
proxy = sys.argv[4] if len(sys.argv) > 4 else None proxy = sys.argv[4] if len(sys.argv) > 4 else None
def progress_callback(percent, current, total, speed, elapsed): def progress_callback(percent: int, current: int, total: int, speed: str, elapsed: str) -> None:
print(f"\r[{percent:3d}%] {current}/{total} segments - {speed} - {elapsed}", end="", flush=True) print(f"\r[{percent:3d}%] {current}/{total} segments - {speed} - {elapsed}", end="", flush=True)
downloader = CdndaniaDownloader( downloader = CdndaniaDownloader(
@@ -525,5 +569,4 @@ if __name__ == "__main__":
print() print()
print(f"Result: {'SUCCESS' if success else 'FAILED'} - {message}") print(f"Result: {'SUCCESS' if success else 'FAILED'} - {message}")
else: else:
print("Usage: python cdndania_downloader.py <iframe_url> <output_path> [referer_url] [proxy]") print("Usage: python cdndania_downloader.py <iframe_url> <output_path> [referer] [proxy] [progress_path] [threads]")
sys.exit(1)

View File

@@ -261,13 +261,12 @@ class FfmpegQueue(object):
if not download_threads: if not download_threads:
download_threads = 16 download_threads = 16
# cdndania.com 감지 시 CdndaniaDownloader 사용 (curl_cffi로 세션 기반 보안 우회) # cdndania.com 감지 로직 제거 - 이제 설정에서 직접 선택
# [주의] cdndania는 yt-dlp로 받으면 14B 가짜 파일(보안 차단)이 받아지므로 # 사용자가 ohli24_download_method 설정에서 cdndania 선택 가능
# aria2c 선택 여부와 무관하게 전용 다운로더(CdndaniaDownloader)를 써야 함. # if getattr(entity, 'need_special_downloader', False) or 'cdndania.com' in video_url or 'michealcdn.com' in video_url:
# 대신 CdndaniaDownloader 내부에 멀티스레드(16)를 구현하여 속도를 해결함. # logger.info(f"Detected special CDN requirement - using Optimized CdndaniaDownloader")
if getattr(entity, 'need_special_downloader', False) or 'cdndania.com' in video_url or 'michealcdn.com' in video_url: # download_method = "cdndania"
logger.info(f"Detected special CDN requirement (flag={getattr(entity, 'need_special_downloader', False)}) - using Optimized CdndaniaDownloader") pass # 이제 설정값(download_method) 그대로 사용
download_method = "cdndania"
logger.info(f"Download method: {download_method}") logger.info(f"Download method: {download_method}")

View File

@@ -5,6 +5,7 @@
# @Site : # @Site :
# @File : logic_ohli24 # @File : logic_ohli24
# @Software: PyCharm # @Software: PyCharm
from __future__ import annotations
import asyncio import asyncio
import hashlib import hashlib
@@ -18,6 +19,7 @@ import threading
import traceback import traceback
import urllib import urllib
from datetime import datetime, date from datetime import datetime, date
from typing import Any, Dict, List, Optional, Tuple, Union, Callable, TYPE_CHECKING
from urllib import parse from urllib import parse
# third-party # third-party
@@ -60,12 +62,12 @@ name = "ohli24"
class LogicOhli24(PluginModuleBase): class LogicOhli24(PluginModuleBase):
current_headers = None current_headers: Optional[Dict[str, str]] = None
current_data = None current_data: Optional[Dict[str, Any]] = None
referer = None referer: Optional[str] = None
origin_url = None origin_url: Optional[str] = None
episode_url = None episode_url: Optional[str] = None
cookies = None cookies: Optional[requests.cookies.RequestsCookieJar] = None
# proxy = "http://192.168.0.2:3138" # proxy = "http://192.168.0.2:3138"
# proxies = { # proxies = {
@@ -74,11 +76,11 @@ class LogicOhli24(PluginModuleBase):
# } # }
@classmethod @classmethod
def get_proxy(cls): def get_proxy(cls) -> str:
return P.ModelSetting.get("ohli24_proxy_url") return P.ModelSetting.get("ohli24_proxy_url")
@classmethod @classmethod
def get_proxies(cls): def get_proxies(cls) -> Optional[Dict[str, str]]:
proxy = cls.get_proxy() proxy = cls.get_proxy()
if proxy: if proxy:
return {"http": proxy, "https": proxy} return {"http": proxy, "https": proxy}
@@ -104,13 +106,14 @@ class LogicOhli24(PluginModuleBase):
download_thread = None download_thread = None
current_download_count = 0 current_download_count = 0
def __init__(self, P): def __init__(self, P: Any) -> None:
super(LogicOhli24, self).__init__(P, "setting", scheduler_desc="ohli24 자동 다운로드") super(LogicOhli24, self).__init__(P, "setting", scheduler_desc="ohli24 자동 다운로드")
self.name = name self.name: str = name
self.db_default = { self.db_default = {
"ohli24_db_version": "1", "ohli24_db_version": "1",
"ohli24_proxy_url": "", "ohli24_proxy_url": "",
"ohli24_discord_webhook_url": "",
"ohli24_url": "https://ani.ohli24.com", "ohli24_url": "https://ani.ohli24.com",
"ohli24_download_path": os.path.join(path_data, P.package_name, "ohli24"), "ohli24_download_path": os.path.join(path_data, P.package_name, "ohli24"),
"ohli24_auto_make_folder": "True", "ohli24_auto_make_folder": "True",
@@ -118,8 +121,8 @@ class LogicOhli24(PluginModuleBase):
"ohli24_auto_make_season_folder": "True", "ohli24_auto_make_season_folder": "True",
"ohli24_finished_insert": "[완결]", "ohli24_finished_insert": "[완결]",
"ohli24_max_ffmpeg_process_count": "1", "ohli24_max_ffmpeg_process_count": "1",
f"{self.name}_download_method": "ffmpeg", # ffmpeg or ytdlp f"{self.name}_download_method": "cdndania", # cdndania (default), ffmpeg, ytdlp, aria2c
"ohli24_download_threads": "16", "ohli24_download_threads": "2", # 기본값 2 (안정성 권장)
"ohli24_order_desc": "False", "ohli24_order_desc": "False",
"ohli24_auto_start": "False", "ohli24_auto_start": "False",
"ohli24_interval": "* 5 * * *", "ohli24_interval": "* 5 * * *",
@@ -135,8 +138,32 @@ class LogicOhli24(PluginModuleBase):
# default_route_socketio(P, self) # default_route_socketio(P, self)
default_route_socketio_module(self, attach="/queue") default_route_socketio_module(self, attach="/queue")
def cleanup_stale_temps(self) -> None:
"""서버 시작 시 잔여 tmp 폴더 정리"""
try:
download_path = P.ModelSetting.get("ohli24_download_path")
if not download_path or not os.path.exists(download_path):
return
logger.info(f"Checking for stale temp directories in: {download_path}")
# 다운로드 경로 순회 (1 depth만 확인해도 충분할 듯 하나, 시즌 폴더 고려하여 recursively)
for root, dirs, files in os.walk(download_path):
for dir_name in dirs:
if dir_name.startswith("tmp") and len(dir_name) > 3:
full_path = os.path.join(root, dir_name)
try:
import shutil
logger.info(f"Removing stale temp directory: {full_path}")
shutil.rmtree(full_path)
except Exception as e:
logger.error(f"Failed to remove stale temp dir {full_path}: {e}")
except Exception as e:
logger.error(f"Error during stale temp cleanup: {e}")
@staticmethod @staticmethod
def db_init(): def db_init() -> None:
pass pass
# try: # try:
# for key, value in P.Logic.db_default.items(): # for key, value in P.Logic.db_default.items():
@@ -147,7 +174,7 @@ class LogicOhli24(PluginModuleBase):
# logger.error('Exception:%s', e) # logger.error('Exception:%s', e)
# logger.error(traceback.format_exc()) # logger.error(traceback.format_exc())
def process_menu(self, sub, req): def process_menu(self, sub: str, req: Any) -> str:
arg = P.ModelSetting.to_dict() arg = P.ModelSetting.to_dict()
arg["sub"] = self.name arg["sub"] = self.name
if sub in ["setting", "queue", "list", "category", "request", "search"]: if sub in ["setting", "queue", "list", "category", "request", "search"]:
@@ -166,7 +193,7 @@ class LogicOhli24(PluginModuleBase):
return render_template("sample.html", title="%s - %s" % (P.package_name, sub)) return render_template("sample.html", title="%s - %s" % (P.package_name, sub))
# @staticmethod # @staticmethod
def process_ajax(self, sub, req): def process_ajax(self, sub: str, req: Any) -> Any:
try: try:
data = [] data = []
cate = request.form.get("type", None) cate = request.form.get("type", None)
@@ -458,7 +485,7 @@ class LogicOhli24(PluginModuleBase):
# db 에서 다운로드 완료 유무 체크 # db 에서 다운로드 완료 유무 체크
@staticmethod @staticmethod
async def get_data(url) -> str: async def get_data(url: str) -> str:
async with aiohttp.ClientSession() as session: async with aiohttp.ClientSession() as session:
async with session.get(url) as response: async with session.get(url) as response:
content = await response.text() content = await response.text()
@@ -466,12 +493,12 @@ class LogicOhli24(PluginModuleBase):
return content return content
@staticmethod @staticmethod
async def main(url_list: list): async def main(url_list: List[str]) -> List[str]:
input_coroutines = [LogicOhli24.get_data(url_) for url_ in url_list] input_coroutines = [LogicOhli24.get_data(url_) for url_ in url_list]
res = await asyncio.gather(*input_coroutines) res = await asyncio.gather(*input_coroutines)
return res return res
def get_series_info(self, code, wr_id, bo_table): def get_series_info(self, code: str, wr_id: Optional[str], bo_table: Optional[str]) -> Dict[str, Any]:
code_type = "c" code_type = "c"
code = urllib.parse.quote(code) code = urllib.parse.quote(code)
@@ -810,7 +837,7 @@ class LogicOhli24(PluginModuleBase):
return {"ret": "exception", "log": str(e)} return {"ret": "exception", "log": str(e)}
# @staticmethod # @staticmethod
def plugin_load(self): def plugin_load(self) -> None:
try: try:
# SupportFfmpeg.initialize(ffmpeg_modelsetting.get('ffmpeg_path'), os.path.join(F.config['path_data'], 'tmp'), # SupportFfmpeg.initialize(ffmpeg_modelsetting.get('ffmpeg_path'), os.path.join(F.config['path_data'], 'tmp'),
# self.callback_function, ffmpeg_modelsetting.get_int('max_pf_count')) # self.callback_function, ffmpeg_modelsetting.get_int('max_pf_count'))
@@ -836,12 +863,15 @@ class LogicOhli24(PluginModuleBase):
self.current_data = None self.current_data = None
self.queue.queue_start() self.queue.queue_start()
# 잔여 Temp 폴더 정리
self.cleanup_stale_temps()
except Exception as e: except Exception as e:
logger.error("Exception:%s", e) logger.error("Exception:%s", e)
logger.error(traceback.format_exc()) logger.error(traceback.format_exc())
# @staticmethod # @staticmethod
def plugin_unload(self): def plugin_unload(self) -> None:
try: try:
logger.debug("%s plugin_unload", P.package_name) logger.debug("%s plugin_unload", P.package_name)
scheduler.remove_job("%s_recent" % P.package_name) scheduler.remove_job("%s_recent" % P.package_name)
@@ -856,7 +886,16 @@ class LogicOhli24(PluginModuleBase):
return True return True
@staticmethod @staticmethod
def get_html(url, headers=None, referer=None, stream=False, timeout=60, stealth=False, data=None, method='GET'): def get_html(
url: str,
headers: Optional[Dict[str, str]] = None,
referer: Optional[str] = None,
stream: bool = False,
timeout: int = 60,
stealth: bool = False,
data: Optional[Dict[str, Any]] = None,
method: str = 'GET'
) -> str:
"""별도 스레드에서 curl_cffi 실행하여 gevent SSL 충돌 및 Cloudflare 우회""" """별도 스레드에서 curl_cffi 실행하여 gevent SSL 충돌 및 Cloudflare 우회"""
from concurrent.futures import ThreadPoolExecutor, TimeoutError as FuturesTimeoutError from concurrent.futures import ThreadPoolExecutor, TimeoutError as FuturesTimeoutError
import time import time
@@ -940,7 +979,7 @@ class LogicOhli24(PluginModuleBase):
return response_data return response_data
######################################################### #########################################################
def add(self, episode_info): def add(self, episode_info: Dict[str, Any]) -> str:
if self.is_exist(episode_info): if self.is_exist(episode_info):
return "queue_exist" return "queue_exist"
else: else:
@@ -951,7 +990,7 @@ class LogicOhli24(PluginModuleBase):
# logger.debug("db_entity.status ::: %s", db_entity.status) # logger.debug("db_entity.status ::: %s", db_entity.status)
if db_entity is None: if db_entity is None:
entity = Ohli24QueueEntity(P, self, episode_info) entity = Ohli24QueueEntity(P, self, episode_info)
entity.proxy = self.proxy entity.proxy = LogicOhli24.get_proxy()
logger.debug("entity:::> %s", entity.as_dict()) logger.debug("entity:::> %s", entity.as_dict())
ModelOhli24Item.append(entity.as_dict()) ModelOhli24Item.append(entity.as_dict())
# # logger.debug("entity:: type >> %s", type(entity)) # # logger.debug("entity:: type >> %s", type(entity))
@@ -970,7 +1009,7 @@ class LogicOhli24(PluginModuleBase):
return "enqueue_db_append" return "enqueue_db_append"
elif db_entity.status != "completed": elif db_entity.status != "completed":
entity = Ohli24QueueEntity(P, self, episode_info) entity = Ohli24QueueEntity(P, self, episode_info)
entity.proxy = self.proxy entity.proxy = LogicOhli24.get_proxy()
logger.debug("entity:::> %s", entity.as_dict()) logger.debug("entity:::> %s", entity.as_dict())
# P.logger.debug(F.config['path_data']) # P.logger.debug(F.config['path_data'])
@@ -988,7 +1027,7 @@ class LogicOhli24(PluginModuleBase):
else: else:
return "db_completed" return "db_completed"
def is_exist(self, info): def is_exist(self, info: Dict[str, Any]) -> bool:
# print(self.queue) # print(self.queue)
# print(self.queue.entity_list) # print(self.queue.entity_list)
for en in self.queue.entity_list: for en in self.queue.entity_list:
@@ -996,7 +1035,7 @@ class LogicOhli24(PluginModuleBase):
return True return True
return False return False
def callback_function(self, **args): def callback_function(self, **args: Any) -> None:
logger.debug(f"callback_function invoked with args: {args}") logger.debug(f"callback_function invoked with args: {args}")
if 'status' in args: if 'status' in args:
logger.debug(f"Status: {args['status']}") logger.debug(f"Status: {args['status']}")
@@ -1111,38 +1150,144 @@ class LogicOhli24(PluginModuleBase):
elif args["type"] == "normal": elif args["type"] == "normal":
if args["status"] == SupportFfmpeg.Status.DOWNLOADING: if args["status"] == SupportFfmpeg.Status.DOWNLOADING:
refresh_type = "status" refresh_type = "status"
# Discord Notification
try:
title = args['data'].get('title', 'Unknown Title')
filename = args['data'].get('filename', 'Unknown File')
poster_url = entity.info.get('image_link', '') if entity and entity.info else ''
msg = "다운로드를 시작합니다."
self.send_discord_notification(msg, title, filename, poster_url)
except Exception as e:
logger.error(f"Failed to send discord notification: {e}")
# P.logger.info(refresh_type) # P.logger.info(refresh_type)
self.socketio_callback(refresh_type, args["data"]) self.socketio_callback(refresh_type, args["data"])
def send_discord_notification(
self,
title: str,
desc: str,
filename: str,
image_url: str = ""
) -> None:
try:
webhook_url = P.ModelSetting.get("ohli24_discord_webhook_url")
if not webhook_url:
logger.debug("Discord webhook URL is empty.")
return
logger.info(f"Sending Discord notification to: {webhook_url}")
# 에피소드/시즌 정보 추출 (배지용)
import re
season_ep_str = ""
match = re.search(r"(?P<season>\d+)기\s*(?P<episode>\d+)화", title)
if not match:
match = re.search(r"(?P<season>\d+)기", title)
if not match:
match = re.search(r"(?P<episode>\d+)화", title)
if match:
parts = []
gd = match.groupdict()
if "season" in gd and gd["season"]:
parts.append(f"S{int(gd['season']):02d}")
if "episode" in gd and gd["episode"]:
parts.append(f"E{int(gd['episode']):02d}")
if parts:
season_ep_str = " | ".join(parts)
author_name = "Ohli24 Downloader"
if season_ep_str:
author_name = f"{season_ep_str} • Ohli24"
embed = {
"title": title,
"description": desc,
"color": 5763719, # Green (0x57F287)
"author": {
"name": author_name,
"icon_url": "https://i.imgur.com/4M34hi2.png" # Optional generic icon
},
"fields": [
{
"name": "파일명",
"value": filename if filename else "알 수 없음",
"inline": False
}
],
"footer": {
"text": f"FlaskFarm Ohli24 • {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}"
}
}
if image_url:
embed["thumbnail"] = {
"url": image_url
}
message = {
"username": "Ohli24 Downloader",
"embeds": [embed]
}
import requests
headers = {"Content-Type": "application/json"}
response = requests.post(webhook_url, json=message, headers=headers)
if response.status_code == 204:
logger.info("Discord notification sent successfully.")
else:
logger.error(f"Failed to send Discord notification. Status Code: {response.status_code}, Response: {response.text}")
except Exception as e:
logger.error(f"Exception in send_discord_notification: {e}")
logger.error(traceback.format_exc())
class Ohli24QueueEntity(FfmpegQueueEntity): class Ohli24QueueEntity(FfmpegQueueEntity):
def __init__(self, P, module_logic, info): def __init__(self, P: Any, module_logic: LogicOhli24, info: Dict[str, Any]) -> None:
super(Ohli24QueueEntity, self).__init__(P, module_logic, info) super(Ohli24QueueEntity, self).__init__(P, module_logic, info)
self._vi = None self._vi: Optional[Any] = None
self.url = None self.url: Optional[str] = None
self.epi_queue = None self.epi_queue: Optional[str] = None
self.filepath = None self.filepath: Optional[str] = None
self.savepath = None self.savepath: Optional[str] = None
self.quality = None self.quality: Optional[str] = None
self.filename = None self.filename: Optional[str] = None
self.vtt = None self.vtt: Optional[str] = None
self.season = 1 self.season: int = 1
self.content_title = None self.content_title: Optional[str] = None
self.srt_url = None self.srt_url: Optional[str] = None
self.headers = None self.headers: Optional[Dict[str, str]] = None
self.cookies_file = None # yt-dlp용 CDN 세션 쿠키 파일 경로 self.cookies_file: Optional[str] = None # yt-dlp용 CDN 세션 쿠키 파일 경로
self.need_special_downloader = False # CDN 보안 우회 다운로더 필요 여부 self.need_special_downloader: bool = False # CDN 보안 우회 다운로더 필요 여부
self._discord_sent: bool = False # Discord 알림 발송 여부
# Todo::: 임시 주석 처리 # Todo::: 임시 주석 처리
self.make_episode_info() self.make_episode_info()
def refresh_status(self): def refresh_status(self) -> None:
# ffmpeg_queue_v1.py에서 실패 처리(-1)된 경우 DB 업데이트 트리거 # ffmpeg_queue_v1.py에서 실패 처리(-1)된 경우 DB 업데이트 트리거
if getattr(self, 'ffmpeg_status', 0) == -1: if getattr(self, 'ffmpeg_status', 0) == -1:
reason = getattr(self, 'ffmpeg_status_kor', 'Unknown Error') reason = getattr(self, 'ffmpeg_status_kor', 'Unknown Error')
self.download_failed(reason) self.download_failed(reason)
self.module_logic.socketio_callback("status", self.as_dict()) self.module_logic.socketio_callback("status", self.as_dict())
# Discord Notification Trigger (All downloaders)
try:
if getattr(self, 'ffmpeg_status', 0) == 5: # DOWNLOADING
if not getattr(self, '_discord_sent', False):
self._discord_sent = True
title = self.info.get('title', 'Unknown Title')
filename = getattr(self, 'filename', 'Unknown File')
# 썸네일 이미지 - image_link 또는 thumbnail 필드에서 가져옴
poster_url = self.info.get('image_link', '') or self.info.get('thumbnail', '')
logger.debug(f"Discord poster_url: {poster_url}")
self.module_logic.send_discord_notification("다운로드 시작", title, filename, poster_url)
except Exception as e:
logger.error(f"Failed to check/send discord notification in refresh_status: {e}")
# 추가: /queue 네임스페이스로도 명시적으로 전송 # 추가: /queue 네임스페이스로도 명시적으로 전송
try: try:
from framework import socketio from framework import socketio
@@ -1151,7 +1296,7 @@ class Ohli24QueueEntity(FfmpegQueueEntity):
except: except:
pass pass
def info_dict(self, tmp): def info_dict(self, tmp: Dict[str, Any]) -> Dict[str, Any]:
# logger.debug('self.info::> %s', self.info) # logger.debug('self.info::> %s', self.info)
for key, value in self.info.items(): for key, value in self.info.items():
tmp[key] = value tmp[key] = value
@@ -1162,7 +1307,7 @@ class Ohli24QueueEntity(FfmpegQueueEntity):
tmp["epi_queue"] = self.epi_queue tmp["epi_queue"] = self.epi_queue
return tmp return tmp
def download_completed(self): def download_completed(self) -> None:
logger.debug("download completed.......!!") logger.debug("download completed.......!!")
db_entity = ModelOhli24Item.get_by_ohli24_id(self.info["_id"]) db_entity = ModelOhli24Item.get_by_ohli24_id(self.info["_id"])
if db_entity is not None: if db_entity is not None:
@@ -1170,7 +1315,7 @@ class Ohli24QueueEntity(FfmpegQueueEntity):
db_entity.completed_time = datetime.now() db_entity.completed_time = datetime.now()
db_entity.save() db_entity.save()
def download_failed(self, reason): def download_failed(self, reason: str) -> None:
logger.debug(f"download failed.......!! reason: {reason}") logger.debug(f"download failed.......!! reason: {reason}")
db_entity = ModelOhli24Item.get_by_ohli24_id(self.info["_id"]) db_entity = ModelOhli24Item.get_by_ohli24_id(self.info["_id"])
if db_entity is not None: if db_entity is not None:
@@ -1293,8 +1438,9 @@ class Ohli24QueueEntity(FfmpegQueueEntity):
iframe_src = iframe.get("src") iframe_src = iframe.get("src")
logger.info(f"Found cdndania iframe: {iframe_src}") logger.info(f"Found cdndania iframe: {iframe_src}")
self.iframe_src = iframe_src self.iframe_src = iframe_src
# CDN 보안 우회 다운로더 사용 플래그 설정 (도메인 무관하게 모듈 강제 선택) # CDN 보안 우회 다운로더 필요 여부 - 설정에 따름
self.need_special_downloader = True # self.need_special_downloader = True # 설정값 존중 (ffmpeg/ytdlp/aria2c 테스트 가능)
self.need_special_downloader = False
# Step 2: cdndania.com 페이지에서 m3u8 URL 추출 # Step 2: cdndania.com 페이지에서 m3u8 URL 추출
video_url, vtt_url, cookies_file = self.extract_video_from_cdndania(iframe_src, url) video_url, vtt_url, cookies_file = self.extract_video_from_cdndania(iframe_src, url)
@@ -1348,7 +1494,8 @@ class Ohli24QueueEntity(FfmpegQueueEntity):
cookies_file = None cookies_file = None
try: try:
import cloudscraper
from curl_cffi import requests
import tempfile import tempfile
import json import json
@@ -1365,12 +1512,11 @@ class Ohli24QueueEntity(FfmpegQueueEntity):
logger.error(f"Could not find video ID in iframe URL: {iframe_src}") logger.error(f"Could not find video ID in iframe URL: {iframe_src}")
return video_url, vtt_url, cookies_file return video_url, vtt_url, cookies_file
# cloudscraper 세션 생성 (쿠키 유지용) # curl_cffi 세션 생성 (Chrome 120 TLS Fingerprint)
scraper = cloudscraper.create_scraper( scraper = requests.Session(impersonate="chrome120")
browser={'browser': 'chrome', 'platform': 'darwin', 'mobile': False},
delay=10
)
proxies = LogicOhli24.get_proxies() proxies = LogicOhli24.get_proxies()
if proxies:
scraper.proxies = {"http": proxies["http"], "https": proxies["https"]}
# getVideo API 호출 # getVideo API 호출
# iframe 도메인 자동 감지 (cdndania.com -> michealcdn.com 등) # iframe 도메인 자동 감지 (cdndania.com -> michealcdn.com 등)
@@ -1555,6 +1701,9 @@ class Ohli24QueueEntity(FfmpegQueueEntity):
# self.socketio_callback(refresh_type, args['data']) # self.socketio_callback(refresh_type, args['data'])
class ModelOhli24Item(ModelBase): class ModelOhli24Item(ModelBase):
P = P P = P
__tablename__ = "{package_name}_ohli24_item".format(package_name=P.package_name) __tablename__ = "{package_name}_ohli24_item".format(package_name=P.package_name)

View File

@@ -20,11 +20,25 @@
background-image: radial-gradient(circle at top right, #1e293b 0%, transparent 60%), radial-gradient(circle at bottom left, #1e293b 0%, transparent 60%); background-image: radial-gradient(circle at top right, #1e293b 0%, transparent 60%), radial-gradient(circle at bottom left, #1e293b 0%, transparent 60%);
color: var(--text-color); color: var(--text-color);
font-family: 'Inter', -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, sans-serif; font-family: 'Inter', -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, sans-serif;
overflow: hidden; /* 외부 스크롤 방지 - 흔들림 해결 */
} }
/* Container & Typography */ /* Container & Typography */
.container-fluid { .container-fluid {
padding: 40px; padding: 8px; /* 최소 여백 */
}
@media (max-width: 768px) {
.container-fluid {
padding: 4px; /* 모바일 더 작은 여백 */
}
.tab-pane {
padding: 8px;
}
.dashboard-card {
margin-top: 8px;
border-radius: 6px;
}
} }
h1, h2, h3, h4, h5, h6 { h1, h2, h3, h4, h5, h6 {
@@ -93,6 +107,9 @@
width: 100%; width: 100%;
box-shadow: inset 0 2px 4px 0 rgba(0, 0, 0, 0.5); box-shadow: inset 0 2px 4px 0 rgba(0, 0, 0, 0.5);
resize: none; /* Disable manual resize */ resize: none; /* Disable manual resize */
overscroll-behavior: contain; /* 스크롤 체인 방지 */
transform: translateZ(0); /* GPU 가속화 */
will-change: scroll-position;
} }
textarea#log:focus, textarea#add:focus { textarea#log:focus, textarea#add:focus {

View File

@@ -26,10 +26,11 @@
{{ macros.setting_input_text('ohli24_download_path', '저장 폴더', value=arg['ohli24_download_path'], desc='정상적으로 다운 완료 된 파일이 이동할 폴더 입니다. ') }} {{ macros.setting_input_text('ohli24_download_path', '저장 폴더', value=arg['ohli24_download_path'], desc='정상적으로 다운 완료 된 파일이 이동할 폴더 입니다. ') }}
{{ macros.setting_input_int('ohli24_max_ffmpeg_process_count', '동시 다운로드 수', value=arg['ohli24_max_ffmpeg_process_count'], desc='동시에 다운로드 할 에피소드 갯수입니다.') }} {{ macros.setting_input_int('ohli24_max_ffmpeg_process_count', '동시 다운로드 수', value=arg['ohli24_max_ffmpeg_process_count'], desc='동시에 다운로드 할 에피소드 갯수입니다.') }}
{{ macros.setting_input_text('ohli24_proxy_url', 'Proxy URL', value=arg.get('ohli24_proxy_url', ''), desc=['프록시 서버 URL (예: http://192.168.0.2:3138)', '비어있으면 사용 안 함']) }} {{ macros.setting_input_text('ohli24_proxy_url', 'Proxy URL', value=arg.get('ohli24_proxy_url', ''), desc=['프록시 서버 URL (예: http://192.168.0.2:3138)', '비어있으면 사용 안 함']) }}
{{ macros.setting_select('ohli24_download_method', '다운로드 방법', [['ffmpeg', 'ffmpeg (기본)'], ['ytdlp', 'yt-dlp'], ['aria2c', 'aria2c (yt-dlp)']], value=arg.get('ohli24_download_method', 'ffmpeg'), desc='m3u8 다운로드에 사용할 도구를 선택합니다.') }} {{ macros.setting_input_text('ohli24_discord_webhook_url', 'Discord Webhook URL', value=arg.get('ohli24_discord_webhook_url', ''), desc=['디스코드 알림을 받을 웹후크 주소입니다.', '다운로드 시작 시 알림을 보냅니다.']) }}
{{ macros.setting_select('ohli24_download_method', '다운로드 방법', [['cdndania', 'cdndania (최적화, 기본)'], ['ffmpeg', 'ffmpeg'], ['ytdlp', 'yt-dlp'], ['aria2c', 'aria2c (yt-dlp)']], value=arg.get('ohli24_download_method', 'cdndania'), desc='m3u8 다운로드에 사용할 도구를 선택합니다.') }}
<div id="ohli24_download_threads_div"> <div id="ohli24_download_threads_div">
{{ macros.setting_select('ohli24_download_threads', '다운로드 속도', [['1', '1배속 (1개)'], ['2', '2배속 (2개)'], ['4', '4배속 (4개)'], ['8', '8배속 (8개)'], ['16', '16배속 (16개 MAX)']], value=arg.get('ohli24_download_threads', '16'), desc='yt-dlp/aria2c 모드에서 사용할 병렬 다운로드 스레드 수입니다.') }} {{ macros.setting_select('ohli24_download_threads', '다운로드 속도', [['1', '1배속 (1개, 안정)'], ['2', '2배속 (2개, 권장)'], ['4', '4배속 (4개)'], ['8', '8배속 (8개)'], ['16', '16배속 (16개, 불안정)']], value=arg.get('ohli24_download_threads', '2'), desc='cdndania/yt-dlp/aria2c 모드에서 사용할 동시 다운로드 수입니다. CDN 차단 시 1-2개 권장.') }}
</div> </div>
{{ macros.setting_checkbox('ohli24_order_desc', '요청 화면 최신순 정렬', value=arg['ohli24_order_desc'], desc='On : 최신화부터, Off : 1화부터') }} {{ macros.setting_checkbox('ohli24_order_desc', '요청 화면 최신순 정렬', value=arg['ohli24_order_desc'], desc='On : 최신화부터, Off : 1화부터') }}
{{ macros.setting_checkbox('ohli24_auto_make_folder', '제목 폴더 생성', value=arg['ohli24_auto_make_folder'], desc='제목으로 폴더를 생성하고 폴더 안에 다운로드합니다.') }} {{ macros.setting_checkbox('ohli24_auto_make_folder', '제목 폴더 생성', value=arg['ohli24_auto_make_folder'], desc='제목으로 폴더를 생성하고 폴더 안에 다운로드합니다.') }}
@@ -290,7 +291,7 @@ $('#ani365_auto_make_folder').change(function() {
function toggle_download_threads() { function toggle_download_threads() {
var method = $('#ohli24_download_method').val(); var method = $('#ohli24_download_method').val();
if (method == 'ytdlp' || method == 'aria2c') { if (method == 'cdndania' || method == 'ytdlp' || method == 'aria2c') {
$('#ohli24_download_threads_div').slideDown(); $('#ohli24_download_threads_div').slideDown();
} else { } else {
$('#ohli24_download_threads_div').slideUp(); $('#ohli24_download_threads_div').slideUp();