chore: translation handling for log messages (#787)

This commit is contained in:
Jens
2026-01-24 15:27:46 +01:00
committed by GitHub
parent 9b75a4047a
commit 08385fa01d
9 changed files with 108 additions and 96 deletions

View File

@@ -1,3 +1,4 @@
# yaml-language-server: $schema=https://coderabbit.ai/integrations/schema.v2.json
# CodeRabbit Configuration for Kleinanzeigen Bot # CodeRabbit Configuration for Kleinanzeigen Bot
# Maintains project-specific rules for English code and translation system # Maintains project-specific rules for English code and translation system
@@ -6,7 +7,8 @@
# ============================================================================= # =============================================================================
language: "en" language: "en"
tone_instructions: "Be strict about English-only code and translation system usage. Focus on simple, maintainable solutions. Avoid unnecessary complexity and abstractions." tone_instructions: "Be strict about English-only code and translation system usage. Non-code may be in german. Focus on simple, maintainable solutions. Avoid unnecessary complexity and abstractions."
enable_free_tier: true
# ============================================================================= # =============================================================================
# REVIEWS # REVIEWS
@@ -25,6 +27,7 @@ reviews:
related_prs: true related_prs: true
suggested_labels: false suggested_labels: false
suggested_reviewers: true suggested_reviewers: true
in_progress_fortune: false
poem: false poem: false
# Path filters to focus on important files # Path filters to focus on important files
@@ -83,7 +86,8 @@ reviews:
11. Test business logic separately from web scraping 11. Test business logic separately from web scraping
12. Include SPDX license headers on all Python files 12. Include SPDX license headers on all Python files
13. Use type hints for all function parameters and return values 13. Use type hints for all function parameters and return values
14. Use structured logging with context 14. Use structured logging with context and appropriate log levels.
15. Log message strings should be plain English without `_()` (TranslatingLogger handles translation); wrap non-log user-facing strings with `_()` and add translations
- path: "tests/**/*.py" - path: "tests/**/*.py"
instructions: | instructions: |
TESTING RULES: TESTING RULES:
@@ -143,6 +147,12 @@ reviews:
yamllint: yamllint:
enabled: true enabled: true
finishing_touches:
docstrings:
enabled: false
unit_tests:
enabled: false
# ============================================================================= # =============================================================================
# KNOWLEDGE BASE # KNOWLEDGE BASE
# ============================================================================= # =============================================================================
@@ -172,5 +182,3 @@ knowledge_base:
scope: "auto" scope: "auto"
pull_requests: pull_requests:
scope: "auto" scope: "auto"

View File

@@ -302,6 +302,8 @@ See the [LICENSE.txt](LICENSE.txt) file for our project's licensing. All source
- All user-facing output (log messages, print statements, CLI help, etc.) must be written in **English**. - All user-facing output (log messages, print statements, CLI help, etc.) must be written in **English**.
- For every user-facing message, a **German translation** must be added to `src/kleinanzeigen_bot/resources/translations.de.yaml`. - For every user-facing message, a **German translation** must be added to `src/kleinanzeigen_bot/resources/translations.de.yaml`.
- Log messages are auto-translated by `TranslatingLogger`; do not wrap `LOG.*`/`logger.*` message strings with `_()`.
- Non-log user-facing strings (e.g., `print`, `ainput`, exceptions, validation messages) should use `_()`.
- Use the translation system for all output—**never hardcode German or other languages** in the code. - Use the translation system for all output—**never hardcode German or other languages** in the code.
- If you add or change a user-facing message, update the translation file and ensure that translation completeness tests pass (`tests/unit/test_translations.py`). - If you add or change a user-facing message, update the translation file and ensure that translation completeness tests pass (`tests/unit/test_translations.py`).
- Review the translation guidelines and patterns in the codebase for correct usage. - Review the translation guidelines and patterns in the codebase for correct usage.

View File

@@ -55,7 +55,7 @@ def _repost_cycle_ready(ad_cfg:Ad, ad_file_relative:str) -> bool:
if total_reposts <= delay_reposts: if total_reposts <= delay_reposts:
remaining = (delay_reposts + 1) - total_reposts remaining = (delay_reposts + 1) - total_reposts
LOG.info( LOG.info(
_("Auto price reduction delayed for [%s]: waiting %s more reposts (completed %s, applied %s reductions)"), "Auto price reduction delayed for [%s]: waiting %s more reposts (completed %s, applied %s reductions)",
ad_file_relative, ad_file_relative,
max(remaining, 1), # Clamp to 1 to avoid showing "0 more reposts" when at threshold max(remaining, 1), # Clamp to 1 to avoid showing "0 more reposts" when at threshold
total_reposts, total_reposts,
@@ -64,9 +64,7 @@ def _repost_cycle_ready(ad_cfg:Ad, ad_file_relative:str) -> bool:
return False return False
if eligible_cycles <= applied_cycles: if eligible_cycles <= applied_cycles:
LOG.debug( LOG.debug("Auto price reduction already applied for [%s]: %s reductions match %s eligible reposts", ad_file_relative, applied_cycles, eligible_cycles)
_("Auto price reduction already applied for [%s]: %s reductions match %s eligible reposts"), ad_file_relative, applied_cycles, eligible_cycles
)
return False return False
return True return True
@@ -86,7 +84,7 @@ def _day_delay_elapsed(ad_cfg:Ad, ad_file_relative:str) -> bool:
reference = ad_cfg.updated_on or ad_cfg.created_on reference = ad_cfg.updated_on or ad_cfg.created_on
if not reference: if not reference:
LOG.info(_("Auto price reduction delayed for [%s]: waiting %s days but publish timestamp missing"), ad_file_relative, delay_days) LOG.info("Auto price reduction delayed for [%s]: waiting %s days but publish timestamp missing", ad_file_relative, delay_days)
return False return False
# Note: .days truncates to whole days (e.g., 1.9 days -> 1 day) # Note: .days truncates to whole days (e.g., 1.9 days -> 1 day)
@@ -94,7 +92,7 @@ def _day_delay_elapsed(ad_cfg:Ad, ad_file_relative:str) -> bool:
# Both misc.now() and stored timestamps use UTC (via misc.now()), ensuring consistent calculations # Both misc.now() and stored timestamps use UTC (via misc.now()), ensuring consistent calculations
elapsed_days = (misc.now() - reference).days elapsed_days = (misc.now() - reference).days
if elapsed_days < delay_days: if elapsed_days < delay_days:
LOG.info(_("Auto price reduction delayed for [%s]: waiting %s days (elapsed %s)"), ad_file_relative, delay_days, elapsed_days) LOG.info("Auto price reduction delayed for [%s]: waiting %s days (elapsed %s)", ad_file_relative, delay_days, elapsed_days)
return False return False
return True return True
@@ -116,11 +114,11 @@ def apply_auto_price_reduction(ad_cfg:Ad, _ad_cfg_orig:dict[str, Any], ad_file_r
base_price = ad_cfg.price base_price = ad_cfg.price
if base_price is None: if base_price is None:
LOG.warning(_("Auto price reduction is enabled for [%s] but no price is configured."), ad_file_relative) LOG.warning("Auto price reduction is enabled for [%s] but no price is configured.", ad_file_relative)
return return
if ad_cfg.auto_price_reduction.min_price is not None and ad_cfg.auto_price_reduction.min_price == base_price: if ad_cfg.auto_price_reduction.min_price is not None and ad_cfg.auto_price_reduction.min_price == base_price:
LOG.warning(_("Auto price reduction is enabled for [%s] but min_price equals price (%s) - no reductions will occur."), ad_file_relative, base_price) LOG.warning("Auto price reduction is enabled for [%s] but min_price equals price (%s) - no reductions will occur.", ad_file_relative, base_price)
return return
if not _repost_cycle_ready(ad_cfg, ad_file_relative): if not _repost_cycle_ready(ad_cfg, ad_file_relative):
@@ -140,10 +138,10 @@ def apply_auto_price_reduction(ad_cfg:Ad, _ad_cfg_orig:dict[str, Any], ad_file_r
if effective_price == base_price: if effective_price == base_price:
# Still increment counter so small fractional reductions can accumulate over multiple cycles # Still increment counter so small fractional reductions can accumulate over multiple cycles
ad_cfg.price_reduction_count = next_cycle ad_cfg.price_reduction_count = next_cycle
LOG.info(_("Auto price reduction kept price %s after attempting %s reduction cycles"), effective_price, next_cycle) LOG.info("Auto price reduction kept price %s after attempting %s reduction cycles", effective_price, next_cycle)
return return
LOG.info(_("Auto price reduction applied: %s -> %s after %s reduction cycles"), base_price, effective_price, next_cycle) LOG.info("Auto price reduction applied: %s -> %s after %s reduction cycles", base_price, effective_price, next_cycle)
ad_cfg.price = effective_price ad_cfg.price = effective_price
ad_cfg.price_reduction_count = next_cycle ad_cfg.price_reduction_count = next_cycle
# Note: price_reduction_count is persisted to ad_cfg_orig only after successful publish # Note: price_reduction_count is persisted to ad_cfg_orig only after successful publish
@@ -225,7 +223,7 @@ class KleinanzeigenBot(WebScrapingMixin): # noqa: PLR0904
if self.installation_mode is None: if self.installation_mode is None:
# First run - prompt user # First run - prompt user
LOG.info(_("First run detected, prompting user for installation mode")) LOG.info("First run detected, prompting user for installation mode")
self.installation_mode = xdg_paths.prompt_installation_mode() self.installation_mode = xdg_paths.prompt_installation_mode()
# Set config path based on detected mode # Set config path based on detected mode
@@ -242,8 +240,8 @@ class KleinanzeigenBot(WebScrapingMixin): # noqa: PLR0904
# Log installation mode and config location (INFO level for user visibility) # Log installation mode and config location (INFO level for user visibility)
mode_display = "portable (current directory)" if self.installation_mode == "portable" else "system-wide (XDG directories)" mode_display = "portable (current directory)" if self.installation_mode == "portable" else "system-wide (XDG directories)"
LOG.info(_("Installation mode: %s"), mode_display) LOG.info("Installation mode: %s", mode_display)
LOG.info(_("Config file: %s"), self.config_file_path) LOG.info("Config file: %s", self.config_file_path)
async def run(self, args:list[str]) -> None: async def run(self, args:list[str]) -> None:
self.parse_args(args) self.parse_args(args)
@@ -357,7 +355,7 @@ class KleinanzeigenBot(WebScrapingMixin): # noqa: PLR0904
# Default to all ads if no selector provided # Default to all ads if no selector provided
if not re.compile(r"\d+[,\d+]*").search(self.ads_selector): if not re.compile(r"\d+[,\d+]*").search(self.ads_selector):
LOG.info(_("Extending all ads within 8-day window...")) LOG.info("Extending all ads within 8-day window...")
self.ads_selector = "all" self.ads_selector = "all"
if ads := self.load_ads(): if ads := self.load_ads():
@@ -802,7 +800,7 @@ class KleinanzeigenBot(WebScrapingMixin): # noqa: PLR0904
LOG.info("Checking if already logged in...") LOG.info("Checking if already logged in...")
await self.web_open(f"{self.root_url}") await self.web_open(f"{self.root_url}")
if getattr(self, "page", None) is not None: if getattr(self, "page", None) is not None:
LOG.debug(_("Current page URL after opening homepage: %s"), self.page.url) LOG.debug("Current page URL after opening homepage: %s", self.page.url)
if await self.is_logged_in(): if await self.is_logged_in():
LOG.info("Already logged in as [%s]. Skipping login.", self.config.login.username) LOG.info("Already logged in as [%s]. Skipping login.", self.config.login.username)
@@ -816,14 +814,14 @@ class KleinanzeigenBot(WebScrapingMixin): # noqa: PLR0904
# Sometimes a second login is required # Sometimes a second login is required
if not await self.is_logged_in(): if not await self.is_logged_in():
LOG.debug(_("First login attempt did not succeed, trying second login attempt")) LOG.debug("First login attempt did not succeed, trying second login attempt")
await self.fill_login_data_and_send() await self.fill_login_data_and_send()
await self.handle_after_login_logic() await self.handle_after_login_logic()
if await self.is_logged_in(): if await self.is_logged_in():
LOG.debug(_("Second login attempt succeeded")) LOG.debug("Second login attempt succeeded")
else: else:
LOG.warning(_("Second login attempt also failed - login may not have succeeded")) LOG.warning("Second login attempt also failed - login may not have succeeded")
async def fill_login_data_and_send(self) -> None: async def fill_login_data_and_send(self) -> None:
LOG.info("Logging in as [%s]...", self.config.login.username) LOG.info("Logging in as [%s]...", self.config.login.username)
@@ -869,27 +867,27 @@ class KleinanzeigenBot(WebScrapingMixin): # noqa: PLR0904
login_check_timeout = self._timeout("login_detection") login_check_timeout = self._timeout("login_detection")
effective_timeout = self._effective_timeout("login_detection") effective_timeout = self._effective_timeout("login_detection")
username = self.config.login.username.lower() username = self.config.login.username.lower()
LOG.debug(_("Starting login detection (timeout: %.1fs base, %.1fs effective with multiplier/backoff)"), login_check_timeout, effective_timeout) LOG.debug("Starting login detection (timeout: %.1fs base, %.1fs effective with multiplier/backoff)", login_check_timeout, effective_timeout)
# Try to find the standard element first # Try to find the standard element first
try: try:
user_info = await self.web_text(By.CLASS_NAME, "mr-medium", timeout = login_check_timeout) user_info = await self.web_text(By.CLASS_NAME, "mr-medium", timeout = login_check_timeout)
if username in user_info.lower(): if username in user_info.lower():
LOG.debug(_("Login detected via .mr-medium element")) LOG.debug("Login detected via .mr-medium element")
return True return True
except TimeoutError: except TimeoutError:
LOG.debug(_("Timeout waiting for .mr-medium element after %.1fs"), effective_timeout) LOG.debug("Timeout waiting for .mr-medium element after %.1fs", effective_timeout)
# If standard element not found or didn't contain username, try the alternative # If standard element not found or didn't contain username, try the alternative
try: try:
user_info = await self.web_text(By.ID, "user-email", timeout = login_check_timeout) user_info = await self.web_text(By.ID, "user-email", timeout = login_check_timeout)
if username in user_info.lower(): if username in user_info.lower():
LOG.debug(_("Login detected via #user-email element")) LOG.debug("Login detected via #user-email element")
return True return True
except TimeoutError: except TimeoutError:
LOG.debug(_("Timeout waiting for #user-email element after %.1fs"), effective_timeout) LOG.debug("Timeout waiting for #user-email element after %.1fs", effective_timeout)
LOG.debug(_("No login detected - neither .mr-medium nor #user-email found with username")) LOG.debug("No login detected - neither .mr-medium nor #user-email found with username")
return False return False
async def delete_ads(self, ad_cfgs:list[tuple[str, Ad, dict[str, Any]]]) -> None: async def delete_ads(self, ad_cfgs:list[tuple[str, Ad, dict[str, Any]]]) -> None:
@@ -946,24 +944,24 @@ class KleinanzeigenBot(WebScrapingMixin): # noqa: PLR0904
for ad_file, ad_cfg, ad_cfg_orig in ad_cfgs: for ad_file, ad_cfg, ad_cfg_orig in ad_cfgs:
# Skip unpublished ads (no ID) # Skip unpublished ads (no ID)
if not ad_cfg.id: if not ad_cfg.id:
LOG.info(_(" -> SKIPPED: ad '%s' is not published yet"), ad_cfg.title) LOG.info(" -> SKIPPED: ad '%s' is not published yet", ad_cfg.title)
continue continue
# Find ad in published list # Find ad in published list
published_ad = next((ad for ad in published_ads if ad["id"] == ad_cfg.id), None) published_ad = next((ad for ad in published_ads if ad["id"] == ad_cfg.id), None)
if not published_ad: if not published_ad:
LOG.warning(_(" -> SKIPPED: ad '%s' (ID: %s) not found in published ads"), ad_cfg.title, ad_cfg.id) LOG.warning(" -> SKIPPED: ad '%s' (ID: %s) not found in published ads", ad_cfg.title, ad_cfg.id)
continue continue
# Skip non-active ads # Skip non-active ads
if published_ad.get("state") != "active": if published_ad.get("state") != "active":
LOG.info(_(" -> SKIPPED: ad '%s' is not active (state: %s)"), ad_cfg.title, published_ad.get("state")) LOG.info(" -> SKIPPED: ad '%s' is not active (state: %s)", ad_cfg.title, published_ad.get("state"))
continue continue
# Check if ad is within 8-day extension window using API's endDate # Check if ad is within 8-day extension window using API's endDate
end_date_str = published_ad.get("endDate") end_date_str = published_ad.get("endDate")
if not end_date_str: if not end_date_str:
LOG.warning(_(" -> SKIPPED: ad '%s' has no endDate in API response"), ad_cfg.title) LOG.warning(" -> SKIPPED: ad '%s' has no endDate in API response", ad_cfg.title)
continue continue
# Intentionally parsing naive datetime from kleinanzeigen API's German date format, timezone not relevant for date-only comparison # Intentionally parsing naive datetime from kleinanzeigen API's German date format, timezone not relevant for date-only comparison
@@ -972,33 +970,33 @@ class KleinanzeigenBot(WebScrapingMixin): # noqa: PLR0904
# Magic value 8 is kleinanzeigen.de's platform policy: extensions only possible within 8 days of expiry # Magic value 8 is kleinanzeigen.de's platform policy: extensions only possible within 8 days of expiry
if days_until_expiry <= 8: # noqa: PLR2004 if days_until_expiry <= 8: # noqa: PLR2004
LOG.info(_(" -> ad '%s' expires in %d days, will extend"), ad_cfg.title, days_until_expiry) LOG.info(" -> ad '%s' expires in %d days, will extend", ad_cfg.title, days_until_expiry)
ads_to_extend.append((ad_file, ad_cfg, ad_cfg_orig, published_ad)) ads_to_extend.append((ad_file, ad_cfg, ad_cfg_orig, published_ad))
else: else:
LOG.info(_(" -> SKIPPED: ad '%s' expires in %d days (can only extend within 8 days)"), ad_cfg.title, days_until_expiry) LOG.info(" -> SKIPPED: ad '%s' expires in %d days (can only extend within 8 days)", ad_cfg.title, days_until_expiry)
if not ads_to_extend: if not ads_to_extend:
LOG.info(_("No ads need extension at this time.")) LOG.info("No ads need extension at this time.")
LOG.info("############################################") LOG.info("############################################")
LOG.info(_("DONE: No ads extended.")) LOG.info("DONE: No ads extended.")
LOG.info("############################################") LOG.info("############################################")
return return
# Process extensions # Process extensions
success_count = 0 success_count = 0
for idx, (ad_file, ad_cfg, ad_cfg_orig, _published_ad) in enumerate(ads_to_extend, start = 1): for idx, (ad_file, ad_cfg, ad_cfg_orig, _published_ad) in enumerate(ads_to_extend, start = 1):
LOG.info(_("Processing %s/%s: '%s' from [%s]..."), idx, len(ads_to_extend), ad_cfg.title, ad_file) LOG.info("Processing %s/%s: '%s' from [%s]...", idx, len(ads_to_extend), ad_cfg.title, ad_file)
if await self.extend_ad(ad_file, ad_cfg, ad_cfg_orig): if await self.extend_ad(ad_file, ad_cfg, ad_cfg_orig):
success_count += 1 success_count += 1
await self.web_sleep() await self.web_sleep()
LOG.info("############################################") LOG.info("############################################")
LOG.info(_("DONE: Extended %s"), pluralize("ad", success_count)) LOG.info("DONE: Extended %s", pluralize("ad", success_count))
LOG.info("############################################") LOG.info("############################################")
async def extend_ad(self, ad_file:str, ad_cfg:Ad, ad_cfg_orig:dict[str, Any]) -> bool: async def extend_ad(self, ad_file:str, ad_cfg:Ad, ad_cfg_orig:dict[str, Any]) -> bool:
"""Extends a single ad listing.""" """Extends a single ad listing."""
LOG.info(_("Extending ad '%s' (ID: %s)..."), ad_cfg.title, ad_cfg.id) LOG.info("Extending ad '%s' (ID: %s)...", ad_cfg.title, ad_cfg.id)
try: try:
# Navigate to ad management page # Navigate to ad management page
@@ -1010,7 +1008,7 @@ class KleinanzeigenBot(WebScrapingMixin): # noqa: PLR0904
try: try:
await self.web_click(By.XPATH, extend_button_xpath) await self.web_click(By.XPATH, extend_button_xpath)
except TimeoutError: except TimeoutError:
LOG.error(_(" -> FAILED: Could not find extend button for ad ID %s"), ad_cfg.id) LOG.error(" -> FAILED: Could not find extend button for ad ID %s", ad_cfg.id)
return False return False
# Handle confirmation dialog # Handle confirmation dialog
@@ -1024,21 +1022,21 @@ class KleinanzeigenBot(WebScrapingMixin): # noqa: PLR0904
await self.web_click(By.CSS_SELECTOR, 'button[aria-label="Schließen"]', timeout = dialog_close_timeout) await self.web_click(By.CSS_SELECTOR, 'button[aria-label="Schließen"]', timeout = dialog_close_timeout)
LOG.debug(" -> Closed confirmation dialog") LOG.debug(" -> Closed confirmation dialog")
except TimeoutError: except TimeoutError:
LOG.warning(_(" -> No confirmation dialog found, extension may have completed directly")) LOG.warning(" -> No confirmation dialog found, extension may have completed directly")
# Update metadata in YAML file # Update metadata in YAML file
# Update updated_on to track when ad was extended # Update updated_on to track when ad was extended
ad_cfg_orig["updated_on"] = misc.now().isoformat(timespec = "seconds") ad_cfg_orig["updated_on"] = misc.now().isoformat(timespec = "seconds")
dicts.save_dict(ad_file, ad_cfg_orig) dicts.save_dict(ad_file, ad_cfg_orig)
LOG.info(_(" -> SUCCESS: ad extended with ID %s"), ad_cfg.id) LOG.info(" -> SUCCESS: ad extended with ID %s", ad_cfg.id)
return True return True
except TimeoutError as ex: except TimeoutError as ex:
LOG.error(_(" -> FAILED: Timeout while extending ad '%s': %s"), ad_cfg.title, ex) LOG.error(" -> FAILED: Timeout while extending ad '%s': %s", ad_cfg.title, ex)
return False return False
except OSError as ex: except OSError as ex:
LOG.error(_(" -> FAILED: Could not persist extension for ad '%s': %s"), ad_cfg.title, ex) LOG.error(" -> FAILED: Could not persist extension for ad '%s': %s", ad_cfg.title, ex)
return False return False
async def __check_publishing_result(self) -> bool: async def __check_publishing_result(self) -> bool:
@@ -1072,10 +1070,10 @@ class KleinanzeigenBot(WebScrapingMixin): # noqa: PLR0904
raise # Respect task cancellation raise # Respect task cancellation
except (TimeoutError, ProtocolException) as ex: except (TimeoutError, ProtocolException) as ex:
if attempt < max_retries: if attempt < max_retries:
LOG.warning(_("Attempt %s/%s failed for '%s': %s. Retrying..."), attempt, max_retries, ad_cfg.title, ex) LOG.warning("Attempt %s/%s failed for '%s': %s. Retrying...", attempt, max_retries, ad_cfg.title, ex)
await self.web_sleep(2) # Wait before retry await self.web_sleep(2) # Wait before retry
else: else:
LOG.error(_("All %s attempts failed for '%s': %s. Skipping ad."), max_retries, ad_cfg.title, ex) LOG.error("All %s attempts failed for '%s': %s. Skipping ad.", max_retries, ad_cfg.title, ex)
failed_count += 1 failed_count += 1
# Check publishing result separately (no retry - ad is already submitted) # Check publishing result separately (no retry - ad is already submitted)
@@ -1084,16 +1082,16 @@ class KleinanzeigenBot(WebScrapingMixin): # noqa: PLR0904
publish_timeout = self._timeout("publishing_result") publish_timeout = self._timeout("publishing_result")
await self.web_await(self.__check_publishing_result, timeout = publish_timeout) await self.web_await(self.__check_publishing_result, timeout = publish_timeout)
except TimeoutError: except TimeoutError:
LOG.warning(_(" -> Could not confirm publishing for '%s', but ad may be online"), ad_cfg.title) LOG.warning(" -> Could not confirm publishing for '%s', but ad may be online", ad_cfg.title)
if success and self.config.publishing.delete_old_ads == "AFTER_PUBLISH" and not self.keep_old_ads: if success and self.config.publishing.delete_old_ads == "AFTER_PUBLISH" and not self.keep_old_ads:
await self.delete_ad(ad_cfg, published_ads, delete_old_ads_by_title = False) await self.delete_ad(ad_cfg, published_ads, delete_old_ads_by_title = False)
LOG.info("############################################") LOG.info("############################################")
if failed_count > 0: if failed_count > 0:
LOG.info(_("DONE: (Re-)published %s (%s failed after retries)"), pluralize("ad", count - failed_count), failed_count) LOG.info("DONE: (Re-)published %s (%s failed after retries)", pluralize("ad", count - failed_count), failed_count)
else: else:
LOG.info(_("DONE: (Re-)published %s"), pluralize("ad", count)) LOG.info("DONE: (Re-)published %s", pluralize("ad", count))
LOG.info("############################################") LOG.info("############################################")
async def publish_ad( async def publish_ad(
@@ -1316,7 +1314,7 @@ class KleinanzeigenBot(WebScrapingMixin): # noqa: PLR0904
try: try:
await self.web_input(By.ID, "pstad-zip", contact.zipcode) await self.web_input(By.ID, "pstad-zip", contact.zipcode)
except TimeoutError: except TimeoutError:
LOG.warning(_("Could not set contact zipcode: %s"), contact.zipcode) LOG.warning("Could not set contact zipcode: %s", contact.zipcode)
zipcode_set = False zipcode_set = False
# Set city if location is specified # Set city if location is specified
if contact.location and zipcode_set: if contact.location and zipcode_set:
@@ -1336,9 +1334,9 @@ class KleinanzeigenBot(WebScrapingMixin): # noqa: PLR0904
found = True found = True
break break
if not found: if not found:
LOG.warning(_("No city dropdown option matched location: %s"), contact.location) LOG.warning("No city dropdown option matched location: %s", contact.location)
except TimeoutError: except TimeoutError:
LOG.warning(_("Could not set contact location: %s"), contact.location) LOG.warning("Could not set contact location: %s", contact.location)
############################# #############################
# set contact street # set contact street
@@ -1350,7 +1348,7 @@ class KleinanzeigenBot(WebScrapingMixin): # noqa: PLR0904
await self.web_sleep() await self.web_sleep()
await self.web_input(By.ID, "pstad-street", contact.street) await self.web_input(By.ID, "pstad-street", contact.street)
except TimeoutError: except TimeoutError:
LOG.warning(_("Could not set contact street.")) LOG.warning("Could not set contact street.")
############################# #############################
# set contact name # set contact name
@@ -1360,7 +1358,7 @@ class KleinanzeigenBot(WebScrapingMixin): # noqa: PLR0904
if not await self.web_check(By.ID, "postad-contactname", Is.READONLY): if not await self.web_check(By.ID, "postad-contactname", Is.READONLY):
await self.web_input(By.ID, "postad-contactname", contact.name) await self.web_input(By.ID, "postad-contactname", contact.name)
except TimeoutError: except TimeoutError:
LOG.warning(_("Could not set contact name.")) LOG.warning("Could not set contact name.")
############################# #############################
# set contact phone # set contact phone
@@ -1378,10 +1376,7 @@ class KleinanzeigenBot(WebScrapingMixin): # noqa: PLR0904
await self.web_input(By.ID, "postad-phonenumber", contact.phone) await self.web_input(By.ID, "postad-phonenumber", contact.phone)
except TimeoutError: except TimeoutError:
LOG.warning( LOG.warning(
_( "Phone number field not present on page. This is expected for many private accounts; commercial accounts may still support phone numbers."
"Phone number field not present on page. This is expected for many private accounts; "
"commercial accounts may still support phone numbers."
)
) )
async def update_ads(self, ad_cfgs:list[tuple[str, Ad, dict[str, Any]]]) -> None: async def update_ads(self, ad_cfgs:list[tuple[str, Ad, dict[str, Any]]]) -> None:
@@ -1495,25 +1490,25 @@ class KleinanzeigenBot(WebScrapingMixin): # noqa: PLR0904
try: try:
special_attr_elem = await self.web_find(By.ID, special_attribute_key) special_attr_elem = await self.web_find(By.ID, special_attribute_key)
except TimeoutError as ex: except TimeoutError as ex:
LOG.debug(_("Attribute field '%s' could not be found."), special_attribute_key) LOG.debug("Attribute field '%s' could not be found.", special_attribute_key)
raise TimeoutError(_("Failed to set attribute '%s'") % special_attribute_key) from ex raise TimeoutError(_("Failed to set attribute '%s'") % special_attribute_key) from ex
try: try:
elem_id:str = str(special_attr_elem.attrs.id) elem_id:str = str(special_attr_elem.attrs.id)
if special_attr_elem.local_name == "select": if special_attr_elem.local_name == "select":
LOG.debug(_("Attribute field '%s' seems to be a select..."), special_attribute_key) LOG.debug("Attribute field '%s' seems to be a select...", special_attribute_key)
await self.web_select(By.ID, elem_id, special_attribute_value_str) await self.web_select(By.ID, elem_id, special_attribute_value_str)
elif special_attr_elem.attrs.type == "checkbox": elif special_attr_elem.attrs.type == "checkbox":
LOG.debug(_("Attribute field '%s' seems to be a checkbox..."), special_attribute_key) LOG.debug("Attribute field '%s' seems to be a checkbox...", special_attribute_key)
await self.web_click(By.ID, elem_id) await self.web_click(By.ID, elem_id)
elif special_attr_elem.attrs.type == "text" and special_attr_elem.attrs.get("role") == "combobox": elif special_attr_elem.attrs.type == "text" and special_attr_elem.attrs.get("role") == "combobox":
LOG.debug(_("Attribute field '%s' seems to be a Combobox (i.e. text input with filtering dropdown)..."), special_attribute_key) LOG.debug("Attribute field '%s' seems to be a Combobox (i.e. text input with filtering dropdown)...", special_attribute_key)
await self.web_select_combobox(By.ID, elem_id, special_attribute_value_str) await self.web_select_combobox(By.ID, elem_id, special_attribute_value_str)
else: else:
LOG.debug(_("Attribute field '%s' seems to be a text input..."), special_attribute_key) LOG.debug("Attribute field '%s' seems to be a text input...", special_attribute_key)
await self.web_input(By.ID, elem_id, special_attribute_value_str) await self.web_input(By.ID, elem_id, special_attribute_value_str)
except TimeoutError as ex: except TimeoutError as ex:
LOG.debug(_("Failed to set attribute field '%s' via known input types."), special_attribute_key) LOG.debug("Failed to set attribute field '%s' via known input types.", special_attribute_key)
raise TimeoutError(_("Failed to set attribute '%s'") % special_attribute_key) from ex raise TimeoutError(_("Failed to set attribute '%s'") % special_attribute_key) from ex
LOG.debug("Successfully set attribute field [%s] to [%s]...", special_attribute_key, special_attribute_value_str) LOG.debug("Successfully set attribute field [%s] to [%s]...", special_attribute_key, special_attribute_value_str)
@@ -1673,7 +1668,7 @@ class KleinanzeigenBot(WebScrapingMixin): # noqa: PLR0904
# Wait for all images to be processed and thumbnails to appear # Wait for all images to be processed and thumbnails to appear
expected_count = len(ad_cfg.images) expected_count = len(ad_cfg.images)
LOG.info(_(" -> waiting for %s to be processed..."), pluralize("image", ad_cfg.images)) LOG.info(" -> waiting for %s to be processed...", pluralize("image", ad_cfg.images))
async def check_thumbnails_uploaded() -> bool: async def check_thumbnails_uploaded() -> bool:
try: try:
@@ -1684,7 +1679,7 @@ class KleinanzeigenBot(WebScrapingMixin): # noqa: PLR0904
) )
current_count = len(thumbnails) current_count = len(thumbnails)
if current_count < expected_count: if current_count < expected_count:
LOG.debug(_(" -> %d of %d images processed"), current_count, expected_count) LOG.debug(" -> %d of %d images processed", current_count, expected_count)
return current_count == expected_count return current_count == expected_count
except TimeoutError: except TimeoutError:
# No thumbnails found yet, continue polling # No thumbnails found yet, continue polling
@@ -1707,7 +1702,7 @@ class KleinanzeigenBot(WebScrapingMixin): # noqa: PLR0904
% {"expected": expected_count, "found": current_count} % {"expected": expected_count, "found": current_count}
) from ex ) from ex
LOG.info(_(" -> all images uploaded successfully")) LOG.info(" -> all images uploaded successfully")
async def download_ads(self) -> None: async def download_ads(self) -> None:
""" """

View File

@@ -51,7 +51,7 @@ class AdExtractor(WebScrapingMixin):
# create sub-directory for ad(s) to download (if necessary): # create sub-directory for ad(s) to download (if necessary):
download_dir = xdg_paths.get_downloaded_ads_path(self.installation_mode) download_dir = xdg_paths.get_downloaded_ads_path(self.installation_mode)
LOG.info(_("Using download directory: %s"), download_dir) LOG.info("Using download directory: %s", download_dir)
# Note: xdg_paths.get_downloaded_ads_path() already creates the directory # Note: xdg_paths.get_downloaded_ads_path() already creates the directory
# Extract ad info and determine final directory path # Extract ad info and determine final directory path
@@ -447,7 +447,7 @@ class AdExtractor(WebScrapingMixin):
return f"{category_ids[0]}/{category_ids[0]}" return f"{category_ids[0]}/{category_ids[0]}"
# Fallback to legacy selectors in case the breadcrumb structure is unexpected. # Fallback to legacy selectors in case the breadcrumb structure is unexpected.
LOG.debug(_("Falling back to legacy breadcrumb selectors; collected ids: %s"), category_ids) LOG.debug("Falling back to legacy breadcrumb selectors; collected ids: %s", category_ids)
fallback_timeout = self._effective_timeout() fallback_timeout = self._effective_timeout()
try: try:
category_first_part = await self.web_find(By.CSS_SELECTOR, "a:nth-of-type(2)", parent = category_line) category_first_part = await self.web_find(By.CSS_SELECTOR, "a:nth-of-type(2)", parent = category_line)

View File

@@ -95,9 +95,20 @@ class DownloadConfig(ContextualModel):
default = False, default = False,
description = "if true, all shipping options matching the package size will be included", description = "if true, all shipping options matching the package size will be included",
) )
excluded_shipping_options:list[str] = Field(default_factory = list, description = "list of shipping options to exclude, e.g. ['DHL_2', 'DHL_5']") excluded_shipping_options:list[str] = Field(
folder_name_max_length:int = Field(default = 100, ge = 10, le = 255, description = "maximum length for folder names when downloading ads (default: 100)") default_factory = list,
rename_existing_folders:bool = Field(default = False, description = "if true, rename existing folders without titles to include titles (default: false)") description = "list of shipping options to exclude, e.g. ['DHL_2', 'DHL_5']",
)
folder_name_max_length:int = Field(
default = 100,
ge = 10,
le = 255,
description = "maximum length for folder names when downloading ads (default: 100)",
)
rename_existing_folders:bool = Field(
default = False,
description = "if true, rename existing folders without titles to include titles (default: false)",
)
class BrowserConfig(ContextualModel): class BrowserConfig(ContextualModel):

View File

@@ -328,7 +328,6 @@ kleinanzeigen_bot/utils/dicts.py:
################################################# #################################################
load_dict_if_exists: load_dict_if_exists:
"Loading %s[%s]...": "Lade %s[%s]..." "Loading %s[%s]...": "Lade %s[%s]..."
" from ": " von "
"Unsupported file type. The filename \"%s\" must end with *.json, *.yaml, or *.yml": "Nicht unterstützter Dateityp. Der Dateiname \"%s\" muss mit *.json, *.yaml oder *.yml enden" "Unsupported file type. The filename \"%s\" must end with *.json, *.yaml, or *.yml": "Nicht unterstützter Dateityp. Der Dateiname \"%s\" muss mit *.json, *.yaml oder *.yml enden"
save_dict: save_dict:
"Saving [%s]...": "Speichere [%s]..." "Saving [%s]...": "Speichere [%s]..."

View File

@@ -6,7 +6,6 @@ from __future__ import annotations
import logging import logging
from datetime import datetime from datetime import datetime
from gettext import gettext as _
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
import colorama import colorama
@@ -93,7 +92,7 @@ class UpdateChecker:
commit_hash = str(sha) if sha else None commit_hash = str(sha) if sha else None
return commit_hash, commit_date return commit_hash, commit_date
except Exception as e: except Exception as e:
logger.warning(_("Could not resolve commit '%s': %s"), commitish, e) logger.warning("Could not resolve commit '%s': %s", commitish, e)
return None, None return None, None
def _get_short_commit_hash(self, commit:str) -> str: def _get_short_commit_hash(self, commit:str) -> str:
@@ -135,12 +134,12 @@ class UpdateChecker:
local_version = self.get_local_version() local_version = self.get_local_version()
if not local_version: if not local_version:
logger.warning(_("Could not determine local version.")) logger.warning("Could not determine local version.")
return return
local_commitish = self._get_commit_hash(local_version) local_commitish = self._get_commit_hash(local_version)
if not local_commitish: if not local_commitish:
logger.warning(_("Could not determine local commit hash.")) logger.warning("Could not determine local commit hash.")
return return
# --- Fetch release info from GitHub using correct endpoint per channel --- # --- Fetch release info from GitHub using correct endpoint per channel ---
@@ -152,7 +151,7 @@ class UpdateChecker:
release = response.json() release = response.json()
# Defensive: ensure it's not a prerelease # Defensive: ensure it's not a prerelease
if release.get("prerelease", False): if release.get("prerelease", False):
logger.warning(_("Latest release from GitHub is a prerelease, but 'latest' channel expects a stable release.")) logger.warning("Latest release from GitHub is a prerelease, but 'latest' channel expects a stable release.")
return return
elif self.config.update_check.channel == "preview": elif self.config.update_check.channel == "preview":
# Use /releases endpoint and select the most recent prerelease # Use /releases endpoint and select the most recent prerelease
@@ -162,13 +161,13 @@ class UpdateChecker:
# Find the most recent prerelease # Find the most recent prerelease
release = next((r for r in releases if r.get("prerelease", False) and not r.get("draft", False)), None) release = next((r for r in releases if r.get("prerelease", False) and not r.get("draft", False)), None)
if not release: if not release:
logger.warning(_("No prerelease found for 'preview' channel.")) logger.warning("No prerelease found for 'preview' channel.")
return return
else: else:
logger.warning(_("Unknown update channel: %s"), self.config.update_check.channel) logger.warning("Unknown update channel: %s", self.config.update_check.channel)
return return
except Exception as e: except Exception as e:
logger.warning(_("Could not get releases: %s"), e) logger.warning("Could not get releases: %s", e)
return return
# Get release commit-ish (use tag name to avoid branch tip drift) # Get release commit-ish (use tag name to avoid branch tip drift)
@@ -176,20 +175,20 @@ class UpdateChecker:
if not release_commitish: if not release_commitish:
release_commitish = release.get("target_commitish") release_commitish = release.get("target_commitish")
if not release_commitish: if not release_commitish:
logger.warning(_("Could not determine release commit hash.")) logger.warning("Could not determine release commit hash.")
return return
# Resolve commit hashes and dates for comparison # Resolve commit hashes and dates for comparison
local_commit, local_commit_date = self._resolve_commitish(local_commitish) local_commit, local_commit_date = self._resolve_commitish(local_commitish)
release_commit, release_commit_date = self._resolve_commitish(str(release_commitish)) release_commit, release_commit_date = self._resolve_commitish(str(release_commitish))
if not local_commit or not release_commit or not local_commit_date or not release_commit_date: if not local_commit or not release_commit or not local_commit_date or not release_commit_date:
logger.warning(_("Could not determine commit dates for comparison.")) logger.warning("Could not determine commit dates for comparison.")
return return
if self._commits_match(local_commit, release_commit): if self._commits_match(local_commit, release_commit):
# If the commit hashes are identical, we are on the latest version. Do not proceed to other checks. # If the commit hashes are identical, we are on the latest version. Do not proceed to other checks.
logger.info( logger.info(
_("You are on the latest version: %s (compared to %s in channel %s)"), "You are on the latest version: %s (compared to %s in channel %s)",
local_version, local_version,
self._get_short_commit_hash(release_commit), self._get_short_commit_hash(release_commit),
self.config.update_check.channel, self.config.update_check.channel,
@@ -200,7 +199,7 @@ class UpdateChecker:
# All commit dates are in UTC; append ' UTC' to timestamps in logs for clarity. # All commit dates are in UTC; append ' UTC' to timestamps in logs for clarity.
if local_commit_date < release_commit_date: if local_commit_date < release_commit_date:
logger.warning( logger.warning(
_("A new version is available: %s from %s UTC (current: %s from %s UTC, channel: %s)"), "A new version is available: %s from %s UTC (current: %s from %s UTC, channel: %s)",
self._get_short_commit_hash(release_commit), self._get_short_commit_hash(release_commit),
release_commit_date.strftime("%Y-%m-%d %H:%M:%S"), release_commit_date.strftime("%Y-%m-%d %H:%M:%S"),
local_version, local_version,
@@ -208,13 +207,11 @@ class UpdateChecker:
self.config.update_check.channel, self.config.update_check.channel,
) )
if release.get("body"): if release.get("body"):
logger.info(_("Release notes:\n%s"), release["body"]) logger.info("Release notes:\n%s", release["body"])
else: else:
logger.info( logger.info(
_(
"You are on a different commit than the release for channel '%s' (tag: %s). This may mean you are ahead, behind, or on a different branch. " "You are on a different commit than the release for channel '%s' (tag: %s). This may mean you are ahead, behind, or on a different branch. "
"Local commit: %s (%s UTC), Release commit: %s (%s UTC)" "Local commit: %s (%s UTC), Release commit: %s (%s UTC)",
),
self.config.update_check.channel, self.config.update_check.channel,
release.get("tag_name", "unknown"), release.get("tag_name", "unknown"),
self._get_short_commit_hash(local_commit), self._get_short_commit_hash(local_commit),

View File

@@ -84,7 +84,7 @@ def load_dict(filepath:str, content_label:str = "") -> dict[str, Any]:
def load_dict_if_exists(filepath:str, content_label:str = "") -> dict[str, Any] | None: def load_dict_if_exists(filepath:str, content_label:str = "") -> dict[str, Any] | None:
abs_filepath = files.abspath(filepath) abs_filepath = files.abspath(filepath)
LOG.info("Loading %s[%s]...", content_label and content_label + _(" from ") or "", abs_filepath) LOG.info("Loading %s[%s]...", content_label and content_label + " from " or "", abs_filepath)
__, file_ext = os.path.splitext(filepath) __, file_ext = os.path.splitext(filepath)
if file_ext not in {".json", ".yaml", ".yml"}: if file_ext not in {".json", ".yaml", ".yml"}:

View File

@@ -232,7 +232,7 @@ class WebScrapingMixin:
try: try:
await self._validate_chrome_version_configuration() await self._validate_chrome_version_configuration()
except AssertionError as exc: except AssertionError as exc:
LOG.warning(_("Remote debugging detected, but browser configuration looks invalid: %s"), exc) LOG.warning("Remote debugging detected, but browser configuration looks invalid: %s", exc)
else: else:
await self._validate_chrome_version_configuration() await self._validate_chrome_version_configuration()
@@ -323,7 +323,7 @@ class WebScrapingMixin:
if browser_arg.startswith("--user-data-dir="): if browser_arg.startswith("--user-data-dir="):
raw = browser_arg.split("=", maxsplit = 1)[1].strip().strip('"').strip("'") raw = browser_arg.split("=", maxsplit = 1)[1].strip().strip('"').strip("'")
if not raw: if not raw:
LOG.warning(_("Ignoring empty --user-data-dir= argument; falling back to configured user_data_dir.")) LOG.warning("Ignoring empty --user-data-dir= argument; falling back to configured user_data_dir.")
continue continue
user_data_dir_from_args = raw user_data_dir_from_args = raw
continue continue
@@ -339,7 +339,7 @@ class WebScrapingMixin:
) )
if arg_path is None or cfg_path is None or arg_path != cfg_path: if arg_path is None or cfg_path is None or arg_path != cfg_path:
LOG.warning( LOG.warning(
_("Configured browser.user_data_dir (%s) does not match --user-data-dir argument (%s); using the argument value."), "Configured browser.user_data_dir (%s) does not match --user-data-dir argument (%s); using the argument value.",
self.browser_config.user_data_dir, self.browser_config.user_data_dir,
user_data_dir_from_args, user_data_dir_from_args,
) )
@@ -1091,7 +1091,7 @@ class WebScrapingMixin:
# From the Inputfield, get the attribute "aria-controls" which POINTS to the Dropdown ul #id: # From the Inputfield, get the attribute "aria-controls" which POINTS to the Dropdown ul #id:
dropdown_id = input_field.attrs.get("aria-controls") dropdown_id = input_field.attrs.get("aria-controls")
if not dropdown_id: if not dropdown_id:
LOG.error(_("Combobox input field does not have 'aria-controls' attribute.")) LOG.error("Combobox input field does not have 'aria-controls' attribute.")
raise TimeoutError(_("Combobox missing aria-controls attribute")) raise TimeoutError(_("Combobox missing aria-controls attribute"))
dropdown_elem = await self.web_find(By.ID, dropdown_id, timeout = timeout) dropdown_elem = await self.web_find(By.ID, dropdown_id, timeout = timeout)
@@ -1131,7 +1131,7 @@ class WebScrapingMixin:
}} }}
""") """)
if not ok: if not ok:
LOG.error(_("No matching option found in combobox: '%s'"), selected_value) LOG.error("No matching option found in combobox: '%s'", selected_value)
raise TimeoutError(_("No matching option found in combobox: '%s'") % selected_value) raise TimeoutError(_("No matching option found in combobox: '%s'") % selected_value)
await self.web_sleep() await self.web_sleep()