Skip to content

Commit

Permalink
Made support for custom timezone simpler
Browse files Browse the repository at this point in the history
  • Loading branch information
TheophileDiot committed Aug 20, 2024
1 parent b2976de commit 904166b
Show file tree
Hide file tree
Showing 25 changed files with 90 additions and 152 deletions.
5 changes: 2 additions & 3 deletions src/autoconf/Config.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
from time import sleep
from typing import Any, Dict, List, Optional

from common_utils import get_timezone # type: ignore
from Database import Database # type: ignore
from logger import setup_logger # type: ignore

Expand Down Expand Up @@ -83,9 +82,9 @@ def have_to_wait(self) -> bool:
)

def wait_applying(self, startup: bool = False):
current_time = datetime.now(get_timezone())
current_time = datetime.now()
ready = False
while not ready and (datetime.now(get_timezone()) - current_time).seconds < 240:
while not ready and (datetime.now() - current_time).seconds < 240:
db_metadata = self._db.get_metadata()
if isinstance(db_metadata, str):
if not startup:
Expand Down
2 changes: 1 addition & 1 deletion src/autoconf/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ FROM python:3.12.5-alpine@sha256:c2f41e6a5a67bc39b95be3988dd19fbd05d1b82375c46d9
RUN umask 027

# Install bash and create autoconf user
RUN apk add --no-cache bash && \
RUN apk add --no-cache bash tzdata && \
addgroup -g 101 autoconf && \
adduser -h /var/cache/autoconf -g autoconf -s /bin/sh -G autoconf -D -H -u 101 autoconf

Expand Down
2 changes: 1 addition & 1 deletion src/bw/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ FROM nginx:1.26.2-alpine-slim@sha256:28967af9fa8d5e1c58a45feeb35e2f326bb6d99b120
RUN umask 027

# Install runtime dependencies
RUN apk add --no-cache openssl pcre bash python3 yajl geoip libxml2 libgd curl
RUN apk add --no-cache openssl pcre bash python3 yajl geoip libxml2 libgd curl tzdata

# Fix CVEs
RUN apk add --no-cache "busybox>=1.36.1-r17" "busybox-binsh>=1.36.1-r17" "ssl_client>=1.36.1-r17" # CVE-2023-42363 CVE-2023-42366
Expand Down
4 changes: 1 addition & 3 deletions src/common/core/backup/bwcli/restore.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,6 @@

from utils import acquire_db_lock, backup_database, BACKUP_DIR, DB_LOCK_FILE, LOGGER, restore_database

from common_utils import get_timezone # type: ignore

status = 0

try:
Expand Down Expand Up @@ -51,7 +49,7 @@
sys_exit(1)

LOGGER.info("Backing up the current database before restoring the backup ...")
current_time = datetime.now(get_timezone())
current_time = datetime.now()
tmp_backup_dir = Path(sep, "tmp", "bunkerweb", "backups")
tmp_backup_dir.mkdir(parents=True, exist_ok=True)
db = backup_database(current_time, backup_dir=tmp_backup_dir)
Expand Down
4 changes: 1 addition & 3 deletions src/common/core/backup/bwcli/save.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,6 @@

from utils import acquire_db_lock, backup_database, BACKUP_DIR, DB_LOCK_FILE, LOGGER

from common_utils import get_timezone # type: ignore

status = 0

try:
Expand All @@ -41,7 +39,7 @@
LOGGER.info(f"Creating directory {directory} as it does not exist")
directory.mkdir(parents=True, exist_ok=True)

backup_database(datetime.now(get_timezone()), backup_dir=directory)
backup_database(datetime.now(), backup_dir=directory)
except SystemExit as se:
status = se.code
except:
Expand Down
4 changes: 1 addition & 3 deletions src/common/core/backup/jobs/backup-data.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,6 @@
from jobs import Job # type: ignore
from utils import backup_database

from common_utils import get_timezone # type: ignore

LOGGER = setup_logger("BACKUP", getenv("LOG_LEVEL", "INFO"))
status = 0

Expand All @@ -37,7 +35,7 @@
if last_backup_date:
last_backup_date = datetime.fromisoformat(last_backup_date)

current_time = datetime.now(get_timezone())
current_time = datetime.now()
backup_period = getenv("BACKUP_SCHEDULE", "daily")
PERIOD_STAMPS = {
"daily": timedelta(days=1).total_seconds(),
Expand Down
10 changes: 5 additions & 5 deletions src/common/core/backup/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
if deps_path not in sys_path:
sys_path.append(deps_path)

from common_utils import bytes_hash, get_timezone # type: ignore
from common_utils import bytes_hash # type: ignore
from Database import Database # type: ignore
from logger import setup_logger # type: ignore
from model import Base # type: ignore
Expand All @@ -30,7 +30,7 @@

def acquire_db_lock():
"""Acquire the database lock to prevent concurrent access to the database."""
current_time = datetime.now(get_timezone())
current_time = datetime.now()
while DB_LOCK_FILE.is_file() and DB_LOCK_FILE.stat().st_ctime + 30 > current_time.timestamp():
LOGGER.warning("Database is locked, waiting for it to be unlocked (timeout: 30s) ...")
sleep(1)
Expand All @@ -46,9 +46,9 @@ def backup_database(current_time: datetime, db: Database = None, backup_dir: Pat
backup_file = backup_dir.joinpath(f"backup-{database}-{current_time.strftime('%Y-%m-%d_%H-%M-%S')}.zip")
LOGGER.debug(f"Backup file path: {backup_file}")
stderr = "Table 'db.test_"
current_time = datetime.now(get_timezone())
current_time = datetime.now()

while "Table 'db.test_" in stderr and (datetime.now(get_timezone()) - current_time).total_seconds() < 10:
while "Table 'db.test_" in stderr and (datetime.now() - current_time).total_seconds() < 10:
if database == "sqlite":
match = DB_STRING_RX.search(db.database_uri)
if not match:
Expand Down Expand Up @@ -94,7 +94,7 @@ def backup_database(current_time: datetime, db: Database = None, backup_dir: Pat
LOGGER.error(f"Failed to dump the database: {stderr}")
sys_exit(1)

if (datetime.now(get_timezone()) - current_time).total_seconds() >= 10:
if (datetime.now() - current_time).total_seconds() >= 10:
LOGGER.error("Failed to dump the database: Timeout reached")
sys_exit(1)

Expand Down
4 changes: 2 additions & 2 deletions src/common/core/jobs/jobs/mmdb-asn.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
from requests import RequestException, Response, get

from logger import setup_logger # type: ignore
from common_utils import bytes_hash, get_timezone, file_hash # type: ignore
from common_utils import bytes_hash, file_hash # type: ignore
from jobs import Job # type: ignore

LOGGER = setup_logger("JOBS.mmdb-asn", getenv("LOG_LEVEL", "INFO"))
Expand Down Expand Up @@ -62,7 +62,7 @@ def request_mmdb() -> Optional[Response]:

if response and response.status_code == 200:
skip_dl = response.content.find(bytes_hash(job_cache["data"], algorithm="sha1").encode()) != -1
elif job_cache["last_update"] < (datetime.now(get_timezone()) - timedelta(weeks=1)).timestamp():
elif job_cache["last_update"] < (datetime.now() - timedelta(weeks=1)).timestamp():
LOGGER.warning("Unable to check if the cache file is the latest version from db-ip.com and file is older than 1 week, checking anyway...")
skip_dl = False

Expand Down
4 changes: 2 additions & 2 deletions src/common/core/jobs/jobs/mmdb-country.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
from requests import RequestException, Response, get

from logger import setup_logger # type: ignore
from common_utils import bytes_hash, get_timezone, file_hash # type: ignore
from common_utils import bytes_hash, file_hash # type: ignore
from jobs import Job # type: ignore

LOGGER = setup_logger("JOBS.mmdb-country", getenv("LOG_LEVEL", "INFO"))
Expand Down Expand Up @@ -62,7 +62,7 @@ def request_mmdb() -> Optional[Response]:

if response and response.status_code == 200:
skip_dl = response.content.find(bytes_hash(job_cache["data"], algorithm="sha1").encode()) != -1
elif job_cache["last_update"] < (datetime.now(get_timezone()) - timedelta(weeks=1)).timestamp():
elif job_cache["last_update"] < (datetime.now() - timedelta(weeks=1)).timestamp():
LOGGER.warning("Unable to check if the cache file is the latest version from db-ip.com and file is older than 1 week, checking anyway...")
skip_dl = False

Expand Down
4 changes: 2 additions & 2 deletions src/common/core/pro/jobs/download-pro-plugins.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@

from Database import Database # type: ignore
from logger import setup_logger # type: ignore
from common_utils import bytes_hash, get_os_info, get_integration, get_timezone, get_version # type: ignore
from common_utils import bytes_hash, get_os_info, get_integration, get_version # type: ignore

API_ENDPOINT = "https://api.bunkerweb.io"
PREVIEW_ENDPOINT = "https://assets.bunkerity.com/bw-pro/preview"
Expand Down Expand Up @@ -95,7 +95,7 @@ def install_plugin(plugin_path: Path, db, preview: bool = True) -> bool:
try:
db = Database(LOGGER, sqlalchemy_string=getenv("DATABASE_URI"))
db_metadata = db.get_metadata()
current_date = datetime.now(get_timezone())
current_date = datetime.now()
pro_license_key = getenv("PRO_LICENSE_KEY", "").strip()

LOGGER.info("Checking BunkerWeb Pro status...")
Expand Down
44 changes: 22 additions & 22 deletions src/common/db/Database.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@
if deps_path not in sys_path:
sys_path.append(deps_path)

from common_utils import bytes_hash, get_timezone # type: ignore
from common_utils import bytes_hash # type: ignore

from pymysql import install_as_MySQLdb
from sqlalchemy import create_engine, event, MetaData as sql_metadata, func, join, select as db_select, text, inspect
Expand Down Expand Up @@ -168,7 +168,7 @@ def __init__(

DATABASE_RETRY_TIMEOUT = int(DATABASE_RETRY_TIMEOUT)

current_time = datetime.now(get_timezone())
current_time = datetime.now()
not_connected = True
fallback = False

Expand All @@ -185,7 +185,7 @@ def __init__(

not_connected = False
except (OperationalError, DatabaseError) as e:
if (datetime.now(get_timezone()) - current_time).total_seconds() > DATABASE_RETRY_TIMEOUT:
if (datetime.now() - current_time).total_seconds() > DATABASE_RETRY_TIMEOUT:
if not fallback and self.database_uri_readonly:
self.logger.error(f"Can't connect to database after {DATABASE_RETRY_TIMEOUT} seconds. Falling back to read-only database connection")
self.sql_engine.dispose(close=True)
Expand Down Expand Up @@ -241,7 +241,7 @@ def test_write(self):

def retry_connection(self, *, readonly: bool = False, fallback: bool = False, log: bool = True, **kwargs) -> None:
"""Retry the connection to the database"""
self.last_connection_retry = datetime.now(get_timezone())
self.last_connection_retry = datetime.now()

if log:
self.logger.debug(f"Retrying the connection to the database{' in read-only mode' if readonly else ''}{' with fallback' if fallback else ''} ...")
Expand Down Expand Up @@ -476,7 +476,7 @@ def checked_changes(
if not metadata:
return "The metadata are not set yet, try again"

current_time = datetime.now(get_timezone())
current_time = datetime.now()

if "config" in changes:
if not metadata.first_config_saved:
Expand Down Expand Up @@ -536,7 +536,7 @@ def init_tables(self, default_plugins: List[dict], bunkerweb_version: str) -> Tu
db_ui_version = db_version

self.logger.warning(f"Database version ({db_version}) is different from Bunkerweb version ({bunkerweb_version}), migrating ...")
current_time = datetime.now(get_timezone())
current_time = datetime.now()
error = True
# ? Wait for the metadata to be available
while error:
Expand All @@ -545,7 +545,7 @@ def init_tables(self, default_plugins: List[dict], bunkerweb_version: str) -> Tu
metadata.reflect(self.sql_engine)
error = False
except BaseException as e:
if (datetime.now(get_timezone()) - current_time).total_seconds() > 10:
if (datetime.now() - current_time).total_seconds() > 10:
raise e
sleep(1)

Expand Down Expand Up @@ -1328,7 +1328,7 @@ def save_config(self, config: Dict[str, Any], method: str, changed: Optional[boo
session.query(Custom_configs).filter(Custom_configs.service_id.in_(missing_ids)).delete()
session.query(Jobs_cache).filter(Jobs_cache.service_id.in_(missing_ids)).delete()
session.query(Metadata).filter_by(id=1).update(
{Metadata.custom_configs_changed: True, Metadata.last_custom_configs_change: datetime.now(get_timezone())}
{Metadata.custom_configs_changed: True, Metadata.last_custom_configs_change: datetime.now()}
)
changed_services = True

Expand Down Expand Up @@ -1672,7 +1672,7 @@ def save_custom_configs(
metadata = session.query(Metadata).get(1)
if metadata is not None:
metadata.custom_configs_changed = True
metadata.last_custom_configs_change = datetime.now(get_timezone())
metadata.last_custom_configs_change = datetime.now()

try:
session.add_all(to_put)
Expand Down Expand Up @@ -1994,7 +1994,7 @@ def add_job_run(self, job_name: str, success: bool, start_date: datetime, end_da
if self.readonly:
return "The database is read-only, the changes will not be saved"

session.add(Jobs_runs(job_name=job_name, success=success, start_date=start_date, end_date=end_date or datetime.now(get_timezone())))
session.add(Jobs_runs(job_name=job_name, success=success, start_date=start_date, end_date=end_date or datetime.now()))

try:
session.commit()
Expand Down Expand Up @@ -2062,13 +2062,13 @@ def upsert_job_cache(
service_id=service_id,
file_name=file_name,
data=data,
last_update=datetime.now(get_timezone()),
last_update=datetime.now(),
checksum=checksum,
)
)
else:
cache.data = data
cache.last_update = datetime.now(get_timezone())
cache.last_update = datetime.now()
cache.checksum = checksum

try:
Expand Down Expand Up @@ -2858,10 +2858,10 @@ def update_external_plugins(
if metadata is not None:
if _type in ("external", "ui"):
metadata.external_plugins_changed = True
metadata.last_external_plugins_change = datetime.now(get_timezone())
metadata.last_external_plugins_change = datetime.now()
elif _type == "pro":
metadata.pro_plugins_changed = True
metadata.last_pro_plugins_change = datetime.now(get_timezone())
metadata.last_pro_plugins_change = datetime.now()

try:
session.add_all(to_put)
Expand Down Expand Up @@ -2902,10 +2902,10 @@ def delete_plugin(self, plugin_id: str, method: str) -> str:
if metadata is not None:
if method in ("external", "ui"):
metadata.external_plugins_changed = True
metadata.last_external_plugins_change = datetime.now(get_timezone())
metadata.last_external_plugins_change = datetime.now()
elif method == "pro":
metadata.pro_plugins_changed = True
metadata.last_pro_plugins_change = datetime.now(get_timezone())
metadata.last_pro_plugins_change = datetime.now()

try:
session.commit()
Expand Down Expand Up @@ -3127,7 +3127,7 @@ def add_instance(self, hostname: str, port: int, server_name: str, method: str,
if db_instance is not None:
return f"Instance {hostname} already exists, will not be added."

current_time = datetime.now(get_timezone())
current_time = datetime.now()
session.add(
Instances(
hostname=hostname,
Expand All @@ -3145,7 +3145,7 @@ def add_instance(self, hostname: str, port: int, server_name: str, method: str,
metadata = session.query(Metadata).get(1)
if metadata is not None:
metadata.instances_changed = True
metadata.last_instances_change = datetime.now(get_timezone())
metadata.last_instances_change = datetime.now()

try:
session.commit()
Expand All @@ -3172,7 +3172,7 @@ def delete_instance(self, hostname: str, changed: Optional[bool] = True) -> str:
metadata = session.query(Metadata).get(1)
if metadata is not None:
metadata.instances_changed = True
metadata.last_instances_change = datetime.now(get_timezone())
metadata.last_instances_change = datetime.now()

try:
session.commit()
Expand All @@ -3194,7 +3194,7 @@ def update_instances(self, instances: List[Dict[str, Any]], method: str, changed
if instance.get("hostname") is None:
continue

current_time = datetime.now(get_timezone())
current_time = datetime.now()
to_put.append(
Instances(
hostname=instance["hostname"],
Expand All @@ -3214,7 +3214,7 @@ def update_instances(self, instances: List[Dict[str, Any]], method: str, changed
metadata = session.query(Metadata).get(1)
if metadata is not None:
metadata.instances_changed = True
metadata.last_instances_change = datetime.now(get_timezone())
metadata.last_instances_change = datetime.now()

try:
session.add_all(to_put)
Expand All @@ -3236,7 +3236,7 @@ def update_instance(self, hostname: str, status: str) -> str:
return f"Instance {hostname} does not exist, will not be updated."

db_instance.status = status
db_instance.last_seen = datetime.now(get_timezone())
db_instance.last_seen = datetime.now()

try:
session.commit()
Expand Down
1 change: 1 addition & 0 deletions src/common/gen/Configurator.py
Original file line number Diff line number Diff line change
Expand Up @@ -209,6 +209,7 @@ def get_config(self, db=None) -> Dict[str, str]:
"SHLVL",
"SERVER_SOFTWARE",
"NAMESPACE",
"TZ",
)
):
self.__logger.warning(f"Ignoring variable {variable} : {err} - {value = !r}")
Expand Down
Loading

0 comments on commit 904166b

Please sign in to comment.