import json
import os
import time
import requests
import logging
from bs4 import BeautifulSoup
from datetime import datetime, timedelta
from sopel import plugin
from apscheduler.schedulers.background import BackgroundScheduler
from functools import lru_cache
from typing import Dict, List, Optional
from dataclasses import dataclass, asdict
from threading import Lock
Type definitions and data classes for better structure
@dataclass
class TruckStop:
country: str
interval: str
tonnage: str
@dataclass
class Config:
JSON_FILE: str = os.path.expanduser(“/home/ai/.sopel/plugins/notify/kamionstop.json”)
COUNTRIES: List[str] = None
SOURCE_URL: str = “Teherautóra vonatkozó forgalmi korlátozások Európában”
TARGET_CHANNEL: str = “#Magyar”
MENTION_USER: str = “@Zsolt”
def __post_init__(self):
self.COUNTRIES = ["Ausztria", "Németország", "Belgium"]
class TruckStopBot:
def init(self):
self.config = Config()
self.logger = self._setup_logger()
self.data_lock = Lock()
self.scheduler = None
self._setup_directories()
def _setup_logger(self) -> logging.Logger:
"""Setup logging with proper configuration."""
logger = logging.getLogger(__name__)
if not logger.handlers:
handler = logging.StreamHandler()
handler.setFormatter(
logging.Formatter("%(asctime)s - %(levelname)s - %(message)s")
)
logger.addHandler(handler)
logger.setLevel(logging.INFO)
return logger
def _setup_directories(self) -> None:
"""Ensure necessary directories exist."""
os.makedirs(os.path.dirname(self.config.JSON_FILE), exist_ok=True)
@lru_cache(maxsize=1, typed=True)
def _get_session(self) -> requests.Session:
"""Get a cached session for requests."""
session = requests.Session()
session.headers.update({
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36'
})
return session
def load_data(self) -> Dict:
"""Thread-safe data loading with error handling."""
with self.data_lock:
try:
if os.path.exists(self.config.JSON_FILE):
with open(self.config.JSON_FILE, "r", encoding="utf-8") as f:
return json.load(f)
except Exception as e:
self.logger.error(f"Error loading data: {e}")
return {}
def save_data(self, data: Dict) -> None:
"""Thread-safe data saving with error handling."""
with self.data_lock:
try:
with open(self.config.JSON_FILE, "w", encoding="utf-8") as f:
json.dump(data, f, ensure_ascii=False, indent=4)
except Exception as e:
self.logger.error(f"Error saving data: {e}")
def fetch_truck_stops(self) -> Dict:
"""Fetch truck stop data with improved error handling and parsing."""
try:
session = self._get_session()
response = session.get(self.config.SOURCE_URL, timeout=10)
response.raise_for_status()
soup = BeautifulSoup(response.text, "html.parser")
stops = {}
for date_header in soup.find_all("h3"):
date = self._parse_date(date_header.get_text(strip=True))
if not date:
continue
table = date_header.find_next("table", class_="table")
if not table:
continue
stops[date] = self._parse_table(table)
return stops
except Exception as e:
self.logger.error(f"Error fetching data: {e}")
return {}
def _parse_date(self, date_text: str) -> Optional[str]:
"""Parse date text with better error handling."""
date_text = date_text.split("(")[0].strip().replace('.', '')
date_formats = ["%Y %B %d", "%Y. %B %d"]
for fmt in date_formats:
try:
return datetime.strptime(date_text, fmt).strftime("%Y-%m-%d")
except ValueError:
continue
return None
def _parse_table(self, table) -> List[Dict]:
"""Parse table data with validation."""
stops = []
for row in table.find_all("tr"):
cols = row.find_all("td")
if len(cols) >= 3:
country = cols[0].get_text(strip=True).split(" ")[-1]
if country in self.config.COUNTRIES:
stop = TruckStop(
country=country,
interval=cols[1].get_text(strip=True),
tonnage=cols[2].get_text(strip=True)
)
stops.append(asdict(stop))
return stops
def update_data(self) -> None:
"""Update truck stop data with validation."""
new_data = self.fetch_truck_stops()
if new_data:
self.save_data(new_data)
self.logger.info("Truck stop data updated successfully")
def get_weekly_stops(self, start_date: datetime) -> Dict:
"""Get weekly stops with date validation."""
data = self.load_data()
if not data:
return {}
end_date = start_date + timedelta(days=6)
return {
k: v for k, v in data.items()
if start_date.strftime("%Y-%m-%d") <= k <= end_date.strftime("%Y-%m-%d")
}
def format_stop_message(self, date: str, stop: Dict, prefix: str = "") -> str:
"""Format stop message consistently."""
return (f"{prefix}*{date}*: - *{stop['country']}*: "
f"Intervallum: {stop['interval']} - Raksúly: {stop['tonnage']}")
Plugin commands and setup
bot_instance = TruckStopBot()
@plugin.rule(r’^[(\S+)]!stop’)
def check_weekly_stops(bot, trigger):
“”“Check weekly stops with rate limiting.”“”
now = datetime.now()
weekly_stops = bot_instance.get_weekly_stops(now)
if not weekly_stops:
bot.say(f"{bot_instance.config.MENTION_USER} 🚛 **Nincs elérhető kamionstop adat!**",
bot_instance.config.TARGET_CHANNEL)
return
bot.say(f"{bot_instance.config.MENTION_USER} 🚛 **Ez a hét kamionstopjai:**",
bot_instance.config.TARGET_CHANNEL)
for date, stops in sorted(weekly_stops.items()):
for stop in stops:
time.sleep(0.5) # Rate limiting
bot.say(bot_instance.format_stop_message(date, stop),
bot_instance.config.TARGET_CHANNEL)
@plugin.rule(r’^[(\S+)]!upstop’)
def update_truck_stop_data(bot, trigger):
“”“Update truck stop data with feedback.”“”
bot.say(f"{bot_instance.config.MENTION_USER}
Kamionstop adat frissítése…“,
bot_instance.config.TARGET_CHANNEL)
bot_instance.update_data()
bot.say(f”{bot_instance.config.MENTION_USER}
Kamionstop adatok frissítve!",
bot_instance.config.TARGET_CHANNEL)
def setup(bot):
“”“Setup scheduler with error handling.”“”
try:
bot_instance.scheduler = BackgroundScheduler()
bot_instance.scheduler.add_job(
bot_instance.update_data,
‘interval’,
days=1,
next_run_time=datetime.now()
)
bot_instance.scheduler.start()
bot_instance.logger.info(“Scheduler started successfully”)
except Exception as e:
bot_instance.logger.error(f"Error starting scheduler: {e}")
def shutdown(bot):
“”“Graceful shutdown.”“”
if bot_instance.scheduler:
bot_instance.scheduler.shutdown()
bot_instance.logger.info(“Scheduler shut down successfully”)