Refactor logging to use parameterized messages

This commit is contained in:
√(noham)²
2025-12-20 12:52:35 +01:00
parent debd884e2d
commit 31f87a6a99
7 changed files with 116 additions and 79 deletions

32
main.py
View File

@@ -173,15 +173,15 @@ if __name__ == "__main__":
logger.error("Error during stream selection.") logger.error("Error during stream selection.")
sys.exit(1) sys.exit(1)
logger.info(f"Start date: {start_date}") logger.debug("Start date: %s", start_date)
logger.info(f"End date: {end_date}") logger.debug("End date: %s", end_date)
logger.info(f"Channel ID: {args.channel_id}") logger.debug("Channel ID: %s", args.channel_id)
logger.info(f"Video quality: {args.video}") logger.debug("Video quality: %s", args.video)
logger.info(f"Audio track: {args.audio}") logger.debug("Audio track: %s", args.audio)
logger.info(f"Title: {title}") logger.debug("Title: %s", title)
logger.info(f"DRM keys: {keys}") logger.debug("DRM keys: %s", keys)
logger.info(f"Output dir: {args.output_dir}") logger.debug("Output dir: %s", args.output_dir)
logger.info(f"Widevine device: {args.widevine_device}") logger.debug("Widevine device: %s", args.widevine_device)
else: else:
# Interactive mode # Interactive mode
@@ -255,7 +255,7 @@ if __name__ == "__main__":
) )
rep_nb = (end_tick - start_tick) // DURATION + 1 rep_nb = (end_tick - start_tick) // DURATION + 1
logger.info(f"Total segments to fetch for {content_type}: {rep_nb}") logger.info("Total segments to fetch for %s: %d", content_type, rep_nb)
data = { data = {
"start_tick": start_tick, "start_tick": start_tick,
"rep_nb": rep_nb, "rep_nb": rep_nb,
@@ -296,7 +296,7 @@ if __name__ == "__main__":
key = k key = k
break break
if not key: if not key:
logger.info(f"No key found for KID {kid}, need to fetch it.") logger.info("No key found for KID %s, need to fetch it.", kid)
missing_keys.append(kid) missing_keys.append(kid)
if len(missing_keys) > 0: if len(missing_keys) > 0:
@@ -317,7 +317,7 @@ if __name__ == "__main__":
} }
fetched_keys = get_keys(kids=missing_keys, method=method) fetched_keys = get_keys(kids=missing_keys, method=method)
logger.info(f"Fetched keys: {fetched_keys}") logger.info("Fetched keys: %s", fetched_keys)
keys = keys + fetched_keys keys = keys + fetched_keys
for content_type, data in [("video", video_data), ("audio", audio_data)]: for content_type, data in [("video", video_data), ("audio", audio_data)]:
@@ -347,7 +347,7 @@ if __name__ == "__main__":
f'ffmpeg -i "concat:{output_dir}/segments_{track_id_video}/init.mp4|' f'ffmpeg -i "concat:{output_dir}/segments_{track_id_video}/init.mp4|'
f'{output_dir}/dec_video.mp4" -c copy {output_dir}/video.mp4' f'{output_dir}/dec_video.mp4" -c copy {output_dir}/video.mp4'
) )
logger.debug(f"FFmpeg command: {command_ffmpeg}") logger.debug("FFmpeg command: %s", command_ffmpeg)
subprocess.run( subprocess.run(
command_ffmpeg, command_ffmpeg,
shell=True, shell=True,
@@ -360,7 +360,7 @@ if __name__ == "__main__":
f'{output_dir}/dec_audio.mp4" -c copy {output_dir}/audio.mp4' f'{output_dir}/dec_audio.mp4" -c copy {output_dir}/audio.mp4'
) )
logger.debug(f"FFmpeg command: {command_ffmpeg}") logger.debug("FFmpeg command: %s", command_ffmpeg)
subprocess.run( subprocess.run(
command_ffmpeg, command_ffmpeg,
shell=True, shell=True,
@@ -373,7 +373,7 @@ if __name__ == "__main__":
f"ffmpeg -i {output_dir}/video.mp4 -itsoffset {diff_start_sec} " f"ffmpeg -i {output_dir}/video.mp4 -itsoffset {diff_start_sec} "
f"-i {output_dir}/audio.mp4 -c copy -map 0:v -map 1:a {output_dir}/output.mp4" f"-i {output_dir}/audio.mp4 -c copy -map 0:v -map 1:a {output_dir}/output.mp4"
) )
logger.debug(f"Merge command: {COMMAND_MERGE}") logger.debug("Merge command: %s", COMMAND_MERGE)
subprocess.run( subprocess.run(
COMMAND_MERGE, COMMAND_MERGE,
shell=True, shell=True,
@@ -384,7 +384,7 @@ if __name__ == "__main__":
FINAL_OUTPUT = f"{output_dir}/{title}.mp4" FINAL_OUTPUT = f"{output_dir}/{title}.mp4"
shutil.move(f"{output_dir}/output.mp4", FINAL_OUTPUT) shutil.move(f"{output_dir}/output.mp4", FINAL_OUTPUT)
logger.info(f"Final output saved to {FINAL_OUTPUT}") logger.info("Final output saved to %s", FINAL_OUTPUT)
os.remove(f"{output_dir}/dec_video.mp4") os.remove(f"{output_dir}/dec_video.mp4")
os.remove(f"{output_dir}/dec_audio.mp4") os.remove(f"{output_dir}/dec_audio.mp4")

View File

@@ -76,7 +76,7 @@ def get_date_input():
start_date = datetime.datetime.strptime( start_date = datetime.datetime.strptime(
start_date_result["datetime"], "%Y-%m-%d %H:%M:%S" start_date_result["datetime"], "%Y-%m-%d %H:%M:%S"
) )
logger.debug(f"Start date/time: {start_date}") logger.debug("Start date/time: %s", start_date)
question_end_date = [ question_end_date = [
{ {
@@ -116,7 +116,7 @@ def get_date_input():
h, m, s = map(int, duration_str.split(":")) h, m, s = map(int, duration_str.split(":"))
duration_td = datetime.timedelta(hours=h, minutes=m, seconds=s) duration_td = datetime.timedelta(hours=h, minutes=m, seconds=s)
end_date = start_date + duration_td end_date = start_date + duration_td
logger.debug(f"End date/time: {end_date}") logger.debug("End date/time: %s", end_date)
except (ValueError, TypeError): except (ValueError, TypeError):
logger.error("Unable to parse the provided duration string.") logger.error("Unable to parse the provided duration string.")
@@ -125,7 +125,7 @@ def get_date_input():
end_date = datetime.datetime.strptime( end_date = datetime.datetime.strptime(
end_date_result["datetime"], "%Y-%m-%d %H:%M:%S" end_date_result["datetime"], "%Y-%m-%d %H:%M:%S"
) )
logger.debug(f"End date/time: {end_date}") logger.debug("End date/time: %s", end_date)
except (ValueError, TypeError): except (ValueError, TypeError):
logger.error("Unable to parse the provided date/time string.") logger.error("Unable to parse the provided date/time string.")
return start_date, end_date return start_date, end_date
@@ -155,7 +155,7 @@ def select_oqee_channel():
choices.sort(key=lambda x: x["name"]) choices.sort(key=lambda x: x["name"])
except requests.exceptions.RequestException as e: except requests.exceptions.RequestException as e:
logger.error(f"A network error occurred: {e}") logger.error("A network error occurred: %s", e)
return None return None
except ValueError: except ValueError:
logger.error("Error parsing JSON response.") logger.error("Error parsing JSON response.")
@@ -179,9 +179,9 @@ def select_oqee_channel():
selected_channel_details = channels_data.get(selected_channel_id) selected_channel_details = channels_data.get(selected_channel_id)
if selected_channel_details: if selected_channel_details:
logger.info("You have selected:") logger.info("You have selected:")
logger.info(f" - Name: {selected_channel_details.get('name')}") logger.info(" - Name: %s", selected_channel_details.get("name"))
logger.info(f" - ID: {selected_channel_details.get('id')}") logger.info(" - ID: %s", selected_channel_details.get("id"))
logger.info(f" - Freebox ID: {selected_channel_details.get('freebox_id')}") logger.info(" - Freebox ID: %s", selected_channel_details.get("freebox_id"))
else: else:
logger.warning("Unable to find details for the selected channel.") logger.warning("Unable to find details for the selected channel.")
return selected_channel_details return selected_channel_details
@@ -190,7 +190,7 @@ def select_oqee_channel():
logger.info("Operation cancelled by user.") logger.info("Operation cancelled by user.")
return None return None
except (ValueError, KeyError, IndexError) as e: except (ValueError, KeyError, IndexError) as e:
logger.error(f"An unexpected error occurred: {e}") logger.error("An unexpected error occurred: %s", e)
return None return None
@@ -276,8 +276,8 @@ def stream_selection():
return None return None
logger.debug("Selected channel:") logger.debug("Selected channel:")
logger.debug(f" - Name: {selected_channel.get('name')}") logger.debug(" - Name: %s", selected_channel.get("name"))
logger.debug(f" - ID: {selected_channel.get('id')}") logger.debug(" - ID: %s", selected_channel.get("id"))
dash_id = selected_channel.get("streams", {}).get("dash") dash_id = selected_channel.get("streams", {}).get("dash")
if not dash_id: if not dash_id:
@@ -302,8 +302,8 @@ def stream_selection():
bitrate = details.get("bitrate_kbps") bitrate = details.get("bitrate_kbps")
track_id = details.get("track_id") track_id = details.get("track_id")
logger.info( logger.info(
f" - {stream_type.capitalize()}: " " - %s: Bitrate %s kbps (ID: %s)",
f"Bitrate {bitrate} kbps (ID: {track_id})" stream_type.capitalize(), bitrate, track_id
) )
logger.info("----------------------------------------") logger.info("----------------------------------------")
@@ -351,18 +351,19 @@ def get_selection(channel_id, video_quality="best", audio_quality="best"):
channels_data = data["result"]["channels"] channels_data = data["result"]["channels"]
selected_channel_details = channels_data.get(str(channel_id)) selected_channel_details = channels_data.get(str(channel_id))
if not selected_channel_details: if not selected_channel_details:
logger.error(f"Channel with ID {channel_id} not found.") logger.error("Channel with ID %s not found.", channel_id)
return None return None
except requests.exceptions.RequestException as e: except requests.exceptions.RequestException as e:
logger.error(f"Network error: {e}") logger.error("Network error: %s", e)
return None return None
except ValueError: except ValueError:
logger.error("Error parsing JSON response.") logger.error("Error parsing JSON response.")
return None return None
logger.info( logger.info(
f"Selected channel: {selected_channel_details.get('name')} (ID: {channel_id})" "Selected channel: %s (ID: %s)",
selected_channel_details.get("name"), channel_id
) )
dash_id = selected_channel_details.get("streams", {}).get("dash") dash_id = selected_channel_details.get("streams", {}).get("dash")
@@ -417,7 +418,7 @@ def select_track(content_dict, quality_spec, content_type):
candidates.extend(tracks) candidates.extend(tracks)
if not candidates: if not candidates:
logger.warning(f"No {content_type} track found for '{quality_spec}'.") logger.warning("No %s track found for '%s'.", content_type, quality_spec)
return None return None
if pref == "best": if pref == "best":
@@ -429,7 +430,8 @@ def select_track(content_dict, quality_spec, content_type):
selected = max(candidates, key=lambda x: x["bandwidth"]) selected = max(candidates, key=lambda x: x["bandwidth"])
logger.info( logger.info(
f"{content_type.capitalize()} selected: {selected['track_id']}, {selected['bitrate_kbps']} kbps" "%s selected: %s, %d kbps",
content_type.capitalize(), selected["track_id"], selected["bitrate_kbps"]
) )
return selected return selected
@@ -454,7 +456,7 @@ def get_epg_data_at(dt: datetime.datetime):
dt_aligned = dt.replace(minute=0, second=0, microsecond=0) dt_aligned = dt.replace(minute=0, second=0, microsecond=0)
unix_time = int(dt_aligned.timestamp()) unix_time = int(dt_aligned.timestamp())
logger.info(f"Fetching EPG for aligned time: {dt_aligned} (unix={unix_time})") logger.info("Fetching EPG for aligned time: %s (unix=%d)", dt_aligned, unix_time)
try: try:
response = requests.get(EPG_API_URL.format(unix=unix_time), timeout=10) response = requests.get(EPG_API_URL.format(unix=unix_time), timeout=10)
@@ -464,7 +466,7 @@ def get_epg_data_at(dt: datetime.datetime):
return data.get("result") return data.get("result")
except requests.exceptions.RequestException as e: except requests.exceptions.RequestException as e:
logger.error(f"A network error occurred: {e}") logger.error("A network error occurred: %s", e)
return None return None
except ValueError: except ValueError:
logger.error("Error parsing JSON response.") logger.error("Error parsing JSON response.")
@@ -560,9 +562,9 @@ def select_program_from_epg(programs, original_start_date, original_end_date):
program_title = live_data.get("title", "Untitled") program_title = live_data.get("title", "Untitled")
logger.info("Selected program:") logger.info("Selected program:")
logger.info(f" - Title: {program_title}") logger.info(" - Title: %s", program_title)
logger.info(f" - Start: {program_start.strftime('%Y-%m-%d %H:%M:%S')}") logger.info(" - Start: %s", program_start.strftime("%Y-%m-%d %H:%M:%S"))
logger.info(f" - End: {program_end.strftime('%Y-%m-%d %H:%M:%S')}") logger.info(" - End: %s", program_end.strftime("%Y-%m-%d %H:%M:%S"))
return { return {
"start_date": program_start, "start_date": program_start,

View File

@@ -1,9 +1,11 @@
"""Logging configuration for OqeeRewind."""
import logging import logging
import sys import sys
class ColoredFormatter(logging.Formatter): class ColoredFormatter(logging.Formatter):
"""Custom logging formatter to add colors to log levels.""" """Custom logging formatter to add colors to log levels."""
grey = "\x1b[38;20m" grey = "\x1b[38;20m"
yellow = "\x1b[33;20m" yellow = "\x1b[33;20m"
red = "\x1b[31;20m" red = "\x1b[31;20m"
@@ -27,8 +29,8 @@ class ColoredFormatter(logging.Formatter):
def setup_logging(level=logging.INFO): def setup_logging(level=logging.INFO):
"""Set up logging configuration.""" """Set up logging configuration."""
logger = logging.getLogger("OqeeRewind") log = logging.getLogger("OqeeRewind")
logger.setLevel(level) log.setLevel(level)
# Create console handler # Create console handler
console_handler = logging.StreamHandler(sys.stdout) console_handler = logging.StreamHandler(sys.stdout)
@@ -39,10 +41,10 @@ def setup_logging(level=logging.INFO):
console_handler.setFormatter(formatter) console_handler.setFormatter(formatter)
# Add handler to logger # Add handler to logger
if not logger.handlers: if not log.handlers:
logger.addHandler(console_handler) log.addHandler(console_handler)
return logger return log
# Create a default logger instance # Create a default logger instance
logger = logging.getLogger("OqeeRewind") logger = logging.getLogger("OqeeRewind")

View File

@@ -24,7 +24,11 @@ class OqeeClient: # pylint: disable=too-many-instance-attributes
# Base headers template for API requests # Base headers template for API requests
self._headers_template = { self._headers_template = {
"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7", "accept": (
"text/html,application/xhtml+xml,application/xml;q=0.9,"
"image/avif,image/webp,image/apng,*/*;q=0.8,"
"application/signed-exchange;v=b3;q=0.7"
),
"accept-language": "en-GB,en-US;q=0.9,en;q=0.8", "accept-language": "en-GB,en-US;q=0.9,en;q=0.8",
"cache-control": "no-cache", "cache-control": "no-cache",
"pragma": "no-cache", "pragma": "no-cache",
@@ -37,7 +41,11 @@ class OqeeClient: # pylint: disable=too-many-instance-attributes
"sec-fetch-site": "none", "sec-fetch-site": "none",
"sec-fetch-user": "?1", "sec-fetch-user": "?1",
"upgrade-insecure-requests": "1", "upgrade-insecure-requests": "1",
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/144.0.0.0 Safari/537.36", "user-agent": (
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) "
"AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/144.0.0.0 Safari/537.36"
),
} }
self.headers_base = self._build_headers() self.headers_base = self._build_headers()
@@ -128,7 +136,11 @@ class OqeeClient: # pylint: disable=too-many-instance-attributes
"pragma": "no-cache", "pragma": "no-cache",
"priority": "u=1, i", "priority": "u=1, i",
"sec-ch-ua": '"Brave";v="131", "Chromium";v="131", "Not_A Brand";v="24"', "sec-ch-ua": '"Brave";v="131", "Chromium";v="131", "Not_A Brand";v="24"',
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36", "user-agent": (
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) "
"AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/131.0.0.0 Safari/537.36"
),
"x-oqee-customization": "0", "x-oqee-customization": "0",
} }
) )
@@ -143,7 +155,10 @@ class OqeeClient: # pylint: disable=too-many-instance-attributes
state = r["state"][0] state = r["state"][0]
headers = { headers = {
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9, image/avif,image/webp,image/apng,*/*;q=0.8", "Accept": (
"text/html,application/xhtml+xml,application/xml;q=0.9, "
"image/avif,image/webp,image/apng,*/*;q=0.8"
),
"Accept-Language": "fr-FR,fr;q=0.7", "Accept-Language": "fr-FR,fr;q=0.7",
"Cache-Control": "max-age=0", "Cache-Control": "max-age=0",
"Connection": "keep-alive", "Connection": "keep-alive",
@@ -156,7 +171,11 @@ class OqeeClient: # pylint: disable=too-many-instance-attributes
"Sec-Fetch-User": "?1", "Sec-Fetch-User": "?1",
"Sec-GPC": "1", "Sec-GPC": "1",
"Upgrade-Insecure-Requests": "1", "Upgrade-Insecure-Requests": "1",
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36", "user-agent": (
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) "
"AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/131.0.0.0 Safari/537.36"
),
"sec-ch-ua": '"Brave";v="123", "Not:A-Brand";v="8", "Chromium";v="123"', "sec-ch-ua": '"Brave";v="123", "Not:A-Brand";v="8", "Chromium";v="123"',
"sec-ch-ua-mobile": "?0", "sec-ch-ua-mobile": "?0",
"sec-ch-ua-platform": '"macOS"', "sec-ch-ua-platform": '"macOS"',
@@ -216,7 +235,9 @@ class OqeeClient: # pylint: disable=too-many-instance-attributes
try: try:
self.access_token = self.login_cred(username, password) self.access_token = self.login_cred(username, password)
except ValueError as e: except ValueError as e:
logger.warning(f"Credential login failed: {e}. Falling back to IP login.") logger.warning(
"Credential login failed: %s. Falling back to IP login.", e
)
self.access_token = self.login_ip() self.access_token = self.login_ip()
logger.info("Fetching rights token") logger.info("Fetching rights token")

View File

@@ -5,7 +5,6 @@ import base64
import os import os
import asyncio import asyncio
import time import time
import subprocess
from typing import Dict, Any from typing import Dict, Any
import requests import requests
@@ -317,12 +316,16 @@ def get_manifest(manifest_id):
"sec-fetch-dest": "empty", "sec-fetch-dest": "empty",
"sec-fetch-mode": "cors", "sec-fetch-mode": "cors",
"sec-fetch-site": "cross-site", "sec-fetch-site": "cross-site",
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/143.0.0.0 Safari/537.36", "user-agent": (
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) "
"AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/143.0.0.0 Safari/537.36"
),
} }
format_id = 1 format_id = 1
url = ( url = (
f"https://api-proxad.dc2.oqee.net/playlist/v1/live/" "https://api-proxad.dc2.oqee.net/playlist/v1/live/"
f"{manifest_id}/{format_id}/live.mpd" f"{manifest_id}/{format_id}/live.mpd"
) )
response = requests.get(url, headers=headers, timeout=10) response = requests.get(url, headers=headers, timeout=10)
@@ -344,7 +347,11 @@ async def fetch_segment(session, ticks, track_id):
headers = { headers = {
"Accept": "*/*", "Accept": "*/*",
"Referer": "https://tv.free.fr/", "Referer": "https://tv.free.fr/",
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/143.0.0.0 Safari/537.36", "User-Agent": (
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) "
"AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/143.0.0.0 Safari/537.36"
),
} }
try: try:
async with session.get(url, headers=headers) as resp: async with session.get(url, headers=headers) as resp:
@@ -366,7 +373,11 @@ def get_init(output_folder, track_id):
headers = { headers = {
"Accept": "*/*", "Accept": "*/*",
"Referer": "https://tv.free.fr/", "Referer": "https://tv.free.fr/",
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/143.0.0.0 Safari/537.36", "User-Agent": (
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) "
"AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/143.0.0.0 Safari/537.36"
),
} }
response = requests.get(url, headers=headers, timeout=10) response = requests.get(url, headers=headers, timeout=10)
if response.status_code == 200: if response.status_code == 200:
@@ -374,7 +385,7 @@ def get_init(output_folder, track_id):
init_path = f"{output_folder}/segments_{track_id}/init.mp4" init_path = f"{output_folder}/segments_{track_id}/init.mp4"
with open(init_path, "wb") as f: with open(init_path, "wb") as f:
f.write(response.content) f.write(response.content)
logger.debug(f"Saved initialization segment to {init_path}") logger.debug("Saved initialization segment to %s", init_path)
return init_path return init_path
@@ -395,7 +406,11 @@ async def save_segments(output_folder, track_id, start_tick, rep_nb, duration):
headers = { headers = {
"Accept": "*/*", "Accept": "*/*",
"Referer": "https://tv.free.fr/", "Referer": "https://tv.free.fr/",
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/143.0.0.0 Safari/537.36", "User-Agent": (
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) "
"AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/143.0.0.0 Safari/537.36"
),
} }
try: try:
async with session.get(url, headers=headers) as resp: async with session.get(url, headers=headers) as resp:
@@ -406,21 +421,19 @@ async def save_segments(output_folder, track_id, start_tick, rep_nb, duration):
f.write(content) f.write(content)
return True return True
logger.error( logger.error(
f"Failed to download segment {rep} (tick {tick}): " "Failed to download segment %d (tick %d): HTTP %d",
f"HTTP {resp.status}" rep, tick, resp.status
) )
return False return False
except aiohttp.ClientError as e: except aiohttp.ClientError as e:
logger.warning(f"Error downloading segment {rep} (tick {tick}): {e}") logger.warning("Error downloading segment %d (tick %d): %s", rep, tick, e)
return False return False
logger.info(f"Starting download of {rep_nb} segments...") logger.info("Starting download of %d segments...", rep_nb)
logger.debug(f"Track ID: {track_id}") logger.debug("Track ID: %s", track_id)
logger.debug(f"Base tick: {start_tick}") logger.debug("Base tick: %d", start_tick)
start_time = time.time() start_time = time.time()
successful = 0
failed = 0
async with aiohttp.ClientSession() as session: async with aiohttp.ClientSession() as session:
tasks = [] tasks = []
@@ -438,13 +451,12 @@ async def save_segments(output_folder, track_id, start_tick, rep_nb, duration):
result = await coro result = await coro
results.append(result) results.append(result)
successful = sum(1 for r in results if r is True) successful = sum(1 for r in results if r is True)
failed = rep_nb - successful
end_time = time.time() end_time = time.time()
elapsed = end_time - start_time elapsed = end_time - start_time
logger.debug(f"Download completed in {elapsed:.2f}s") logger.debug("Download completed in %.2fs", elapsed)
logger.info(f"Files saved to {output_folder}/segments_{track_id}/") logger.info("Files saved to %s/segments_%s/", output_folder, track_id)
def get_kid(output_folder, track_id): def get_kid(output_folder, track_id):
@@ -461,7 +473,7 @@ def get_kid(output_folder, track_id):
for filename in os.listdir(folder): for filename in os.listdir(folder):
if filename.endswith(".m4s"): if filename.endswith(".m4s"):
filepath = os.path.join(folder, filename) filepath = os.path.join(folder, filename)
logger.debug(f"Checking file: {filepath}") logger.debug("Checking file: %s", filepath)
with open(filepath, "rb") as f: with open(filepath, "rb") as f:
data = f.read() data = f.read()
# Pattern before KID # Pattern before KID

View File

@@ -57,7 +57,7 @@ async def bruteforce(track_id, date):
total_requests = 288000 total_requests = 288000
batch_size = 20000 batch_size = 20000
logger.debug(f"Starting bruteforce for {track_id}") logger.debug("Starting bruteforce for %s", track_id)
start_time = time.time() start_time = time.time()
@@ -86,16 +86,16 @@ async def bruteforce(track_id, date):
# Stop if we found valid ticks # Stop if we found valid ticks
if valid_ticks: if valid_ticks:
logger.debug(f"Found valid ticks: {valid_ticks}, stopping bruteforce.") logger.debug("Found valid ticks: %s, stopping bruteforce.", valid_ticks)
break break
except KeyboardInterrupt: except KeyboardInterrupt:
logger.error("Interrupted by user (Ctrl+C)") logger.error("Interrupted by user (Ctrl+C)")
elapsed = time.time() - start_time elapsed = time.time() - start_time
logger.debug(f"Completed in {elapsed:.2f}s") logger.debug("Completed in %.2fs", elapsed)
logger.debug(f"Speed: {total_requests / elapsed if elapsed > 0 else 0:.2f} req/s") logger.debug("Speed: %.2f req/s", total_requests / elapsed if elapsed > 0 else 0)
logger.debug(f"Total checked: {total_requests}") logger.debug("Total checked: %d", total_requests)
return valid_ticks return valid_ticks

View File

@@ -20,10 +20,10 @@ def verify_mp4ff():
def verify_cmd(path: str) -> bool: def verify_cmd(path: str) -> bool:
"""Verify if the file provided at path is valid and exists, otherwise log error and exit.""" """Verify if the file provided at path is valid and exists, otherwise log error and exit."""
if not os.path.exists(path): if not os.path.exists(path):
logger.error(f"File does not exist: {path}") logger.error("File does not exist: %s", path)
sys.exit(1) sys.exit(1)
if not os.path.isfile(path): if not os.path.isfile(path):
logger.error(f"Path is not a file: {path}") logger.error("Path is not a file: %s", path)
sys.exit(1) sys.exit(1)
return True return True
@@ -40,7 +40,7 @@ def merge_segments(input_folder: str, track_id: str, output_file: str):
for fname in segment_files: for fname in segment_files:
with open(f"{segment_folder}/{fname}", "rb") as infile: with open(f"{segment_folder}/{fname}", "rb") as infile:
outfile.write(infile.read()) outfile.write(infile.read())
logger.info(f"Merged segments into {output_file}") logger.info("Merged segments into %s", output_file)
def decrypt(input_file, init_path, output_file, key): def decrypt(input_file, init_path, output_file, key):
@@ -63,7 +63,7 @@ def decrypt(input_file, init_path, output_file, key):
check=False, check=False,
) )
if result.returncode == 0: if result.returncode == 0:
logger.info(f"Decrypted {input_file} to {output_file}") logger.info("Decrypted %s to %s", input_file, output_file)
return True return True
logger.error(f"Decryption failed: {result.stderr}") logger.error("Decryption failed: %s", result.stderr)
return False return False