aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--CHANGELOG.rst19
-rw-r--r--fietsboek/__init__.py22
-rw-r--r--fietsboek/actions.py44
-rw-r--r--fietsboek/data.py160
-rw-r--r--fietsboek/fstrans.py417
-rw-r--r--fietsboek/updater/scripts/upd_20230103_lu8w3rwlz4ddcpms.py68
-rw-r--r--fietsboek/updater/scripts/upd_20251109_nm561argcq1s8w27.py211
-rw-r--r--fietsboek/updater/scripts/upd_30ppwg8zi4ujb46f.py46
-rw-r--r--fietsboek/views/edit.py77
-rw-r--r--fietsboek/views/upload.py9
-rw-r--r--tests/conftest.py2
11 files changed, 728 insertions, 347 deletions
diff --git a/CHANGELOG.rst b/CHANGELOG.rst
index 8b26ca1..ead9c42 100644
--- a/CHANGELOG.rst
+++ b/CHANGELOG.rst
@@ -7,12 +7,29 @@ Unreleased
Added
^^^^^
-- Added opengraph tags for better previews
+- Added opengraph tags for better previews.
+- Added PDF renderings of tracks.
+
+Changed
+^^^^^^^
+
+- ``hittekaart`` is now built as a dependency, no longer an external binary.
+- Tracks are now stored in the database.
Fixed
^^^^^
- Backup GPX files are now included in the storage breakdown.
+- No more warnings about ``pkg_resources`` being deprecated.
+- Proper deletion of (partial) track data if something fails during the upload.
+
+Removed
+^^^^^^^
+
+- The ``hittekaart.bin`` configuration setting (not needed anymore, since it's
+ a dependency now).
+- ``Content-Encoding: br`` for GPX files (not worth anymore, as we need to
+ compress on-the-fly).
0.11.0 - 2025-06-18
-------------------
diff --git a/fietsboek/__init__.py b/fietsboek/__init__.py
index 2ac41e3..4c3a340 100644
--- a/fietsboek/__init__.py
+++ b/fietsboek/__init__.py
@@ -31,6 +31,7 @@ from pyramid.response import Response
from pyramid.session import SignedCookieSessionFactory
from . import config as mod_config
+from . import fstrans
from . import jinja2 as mod_jinja2
from . import transformers
from .data import DataManager
@@ -92,7 +93,10 @@ def maintenance_mode(
"""
def tween(request: Request) -> Response:
- maintenance = request.data_manager.maintenance_mode()
+ # We don't want to mess around with transactioned data mangers here,
+ # so we create a new one without transaction
+ data_manager = DataManager(Path(request.config.data_dir))
+ maintenance = data_manager.maintenance_mode()
if maintenance is None:
return handler(request)
@@ -139,6 +143,17 @@ def check_db_engine(sqlalchemy_uri: str):
)
+def create_data_folders(data_dir: Path):
+ """Creates the subfolders of the data directory.
+
+ :param data_dir: Path to the data directory, from the config.
+ """
+ LOGGER.debug("Creating %s/tracks/", data_dir)
+ (data_dir / "tracks").mkdir(exist_ok=True)
+ LOGGER.debug("Creating %s/users/", data_dir)
+ (data_dir / "users").mkdir(exist_ok=True)
+
+
def main(global_config, **settings):
"""This function returns a Pyramid WSGI application."""
# Avoid a circular import by not importing at the top level
@@ -153,9 +168,12 @@ def main(global_config, **settings):
settings["jinja2.newstyle"] = True
check_db_engine(parsed_config.sqlalchemy_url)
+ create_data_folders(parsed_config.data_dir)
def data_manager(request):
- return DataManager(Path(request.config.data_dir))
+ data_dir = Path(request.config.data_dir)
+ lock_file = data_dir / "lock"
+ return DataManager(data_dir, txn=fstrans.begin(lock_file, request.tm))
def redis_(request):
return redis.from_url(request.config.redis_url)
diff --git a/fietsboek/actions.py b/fietsboek/actions.py
index 2d23d97..cdddaa2 100644
--- a/fietsboek/actions.py
+++ b/fietsboek/actions.py
@@ -93,29 +93,27 @@ def add_track(
# Save the GPX data
LOGGER.debug("Creating a new data folder for %d", track.id)
assert track.id is not None
- with data_manager.initialize(track.id) as manager:
- LOGGER.debug("Saving backup to %s", manager.backup_path())
- manager.compress_backup(gpx_data)
-
- for transformer in transformers:
- LOGGER.debug("Running %s with %r", transformer, transformer.parameters)
- transformer.execute(path)
- track.transformers = [
- [tfm.identifier(), tfm.parameters.model_dump()] for tfm in transformers
- ]
-
- track.fast_set_path(path)
-
- # Best time to build the cache is right after the upload, but *after* the
- # transformers have been applied!
- track.ensure_cache(path)
- dbsession.add(track.cache)
-
- LOGGER.debug("Building preview image for %s", track.id)
- preview_image = trackmap.render(path, layer, tile_requester)
- image_io = io.BytesIO()
- preview_image.save(image_io, "PNG")
- manager.set_preview(image_io.getvalue())
+ manager = data_manager.initialize(track.id)
+ LOGGER.debug("Saving backup to %s", manager.backup_path())
+ manager.compress_backup(gpx_data)
+
+ for transformer in transformers:
+ LOGGER.debug("Running %s with %r", transformer, transformer.parameters)
+ transformer.execute(path)
+ track.transformers = [[tfm.identifier(), tfm.parameters.model_dump()] for tfm in transformers]
+
+ track.fast_set_path(path)
+
+ # Best time to build the cache is right after the upload, but *after* the
+ # transformers have been applied!
+ track.ensure_cache(path)
+ dbsession.add(track.cache)
+
+ LOGGER.debug("Building preview image for %s", track.id)
+ preview_image = trackmap.render(path, layer, tile_requester)
+ image_io = io.BytesIO()
+ preview_image.save(image_io, "PNG")
+ manager.set_preview(image_io.getvalue())
return track
diff --git a/fietsboek/data.py b/fietsboek/data.py
index 3fcd922..d4bbb07 100644
--- a/fietsboek/data.py
+++ b/fietsboek/data.py
@@ -13,12 +13,13 @@ import shutil
import string
import uuid
from pathlib import Path
-from typing import BinaryIO, Literal, Optional
+from typing import BinaryIO, Optional
import brotli
from filelock import FileLock
from . import util
+from .fstrans import Transaction
LOGGER = logging.getLogger(__name__)
@@ -50,8 +51,9 @@ class DataManager:
:ivar data_dir: Path to the data folder.
"""
- def __init__(self, data_dir: Path):
+ def __init__(self, data_dir: Path, *, txn: Transaction | None = None):
self.data_dir: Path = data_dir
+ self.txn = txn
def _track_data_dir(self, track_id):
return self.data_dir / "tracks" / str(track_id)
@@ -81,8 +83,11 @@ class DataManager:
:return: The manager that can be used to manage this track's data.
"""
path = self._track_data_dir(track_id)
- path.mkdir(parents=True)
- return TrackDataDir(track_id, path, journal=True, is_fresh=True)
+ if self.txn:
+ self.txn.make_dir(path)
+ else:
+ path.mkdir(parents=True)
+ return TrackDataDir(track_id, path, txn=self.txn)
def initialize_user(self, user_id: int) -> "UserDataDir":
"""Creates the data directory for a user.
@@ -92,8 +97,11 @@ class DataManager:
:return: The manager that can be used to manage this user's data.
"""
path = self._user_data_dir(user_id)
- path.mkdir(parents=True)
- return UserDataDir(user_id, path)
+ if self.txn:
+ self.txn.make_dir(path)
+ else:
+ path.mkdir(parents=True)
+ return UserDataDir(user_id, path, txn=self.txn)
def purge(self, track_id: int):
"""Forcefully purges all data from the given track.
@@ -101,9 +109,9 @@ class DataManager:
This function logs errors but raises no exception, as such it can
always be used to clean up after a track.
"""
- TrackDataDir(track_id, self._track_data_dir(track_id)).purge()
+ TrackDataDir(track_id, self._track_data_dir(track_id), txn=self.txn).purge()
- def open(self, track_id: int) -> "TrackDataDir":
+ def open(self, track_id: int, *, force: bool = False) -> "TrackDataDir":
"""Opens a track's data directory.
:raises FileNotFoundError: If the track directory does not exist.
@@ -111,9 +119,9 @@ class DataManager:
:return: The manager that can be used to manage this track's data.
"""
path = self._track_data_dir(track_id)
- if not path.is_dir():
+ if not force and not path.is_dir():
raise FileNotFoundError(f"The path {path} is not a directory") from None
- return TrackDataDir(track_id, path)
+ return TrackDataDir(track_id, path, txn=self.txn)
def open_user(self, user_id: int) -> "UserDataDir":
"""Opens a user's data directory.
@@ -125,7 +133,7 @@ class DataManager:
path = self._user_data_dir(user_id)
if not path.is_dir():
raise FileNotFoundError(f"The path {path} is not a directory") from None
- return UserDataDir(user_id, path)
+ return UserDataDir(user_id, path, txn=self.txn)
def size(self) -> int:
"""Returns the size of all data.
@@ -164,86 +172,10 @@ class TrackDataDir:
semantics, use ``journal = False``.
"""
- def __init__(self, track_id: int, path: Path, *, journal: bool = False, is_fresh: bool = False):
+ def __init__(self, track_id: int, path: Path, *, txn: Transaction | None = None):
self.track_id: int = track_id
self.path: Path = path
- self.journal: Optional[list] = [] if journal else None
- self.is_fresh = is_fresh
-
- def __enter__(self) -> "TrackDataDir":
- if self.journal is None:
- self.journal = []
- return self
-
- def __exit__(self, exc_type, exc_val, exc_tb) -> Literal[False]:
- if exc_type is None and exc_val is None and exc_tb is None:
- self.commit()
- else:
- self.rollback()
- return False
-
- def rollback(self):
- """Rolls back the journal, e.g. in case of error.
-
- :raises ValueError: If the data directory was opened without the
- journal, this raises :exc:`ValueError`.
- """
- LOGGER.debug("Rolling back state of %s", self.path)
-
- if self.journal is None:
- raise ValueError("Rollback on a non-journalling data directory")
-
- if self.is_fresh:
- # Shortcut if the directory is fresh, simply remove everything
- self.journal = None
- self.purge()
- return
-
- for action, *rest in reversed(self.journal):
- if action == "purge":
- (new_name,) = rest
- shutil.move(new_name, self.path)
- elif action == "add_image":
- (image_path,) = rest
- image_path.unlink()
- elif action == "delete_image":
- path, data = rest
- path.write_bytes(data)
- elif action == "set_preview":
- old_data = rest
- if old_data is None:
- self.preview_path().unlink()
- else:
- self.preview_path().write_bytes(old_data)
-
- self.journal = None
-
- def commit(self):
- """Commits all changed and deletes the journal.
-
- Note that this function will do nothing if the journal is disabled,
- meaning it can always be called.
- """
- LOGGER.debug("Committing journal for %s", self.path)
-
- if self.journal is None:
- return
-
- for action, *rest in reversed(self.journal):
- if action == "purge":
- (new_name,) = rest
- shutil.rmtree(new_name, ignore_errors=False, onerror=self._log_deletion_error)
- elif action == "add_image":
- # Nothing to do here, the image is already saved
- pass
- elif action == "delete_image":
- # Again, nothing to do here, we simply discard the in-memory image data
- pass
- elif action == "set_preview":
- # Still nothing to do here
- pass
-
- self.journal = None
+ self.txn = txn
def lock(self) -> FileLock:
"""Returns a FileLock that can be used to lock access to the track's
@@ -263,13 +195,11 @@ class TrackDataDir:
This function logs errors but raises no exception, as such it can
always be used to clean up after a track.
"""
- if self.journal is None:
+ if self.txn:
+ self.txn.purge(self.path)
+ else:
if self.path.is_dir():
shutil.rmtree(self.path, ignore_errors=False, onerror=self._log_deletion_error)
- else:
- new_name = self.path.with_name("trash-" + self.path.name)
- shutil.move(self.path, new_name)
- self.journal.append(("purge", new_name))
def size(self) -> int:
"""Returns the size of the data that this track entails.
@@ -289,7 +219,10 @@ class TrackDataDir:
quality but slowest compression speed.
"""
compressed = brotli.compress(data, quality=quality)
- self.backup_path().write_bytes(compressed)
+ if self.txn:
+ self.txn.write_bytes(self.backup_path(), compressed)
+ else:
+ self.backup_path().write_bytes(compressed)
def decompress_backup(self) -> bytes:
"""Returns the backup bytes decompressed.
@@ -337,15 +270,18 @@ class TrackDataDir:
:return: The ID of the saved image.
"""
image_dir = self.path / "images"
- image_dir.mkdir(parents=True, exist_ok=True)
+ if self.txn:
+ self.txn.make_dir(image_dir, exist_ok=True)
+ else:
+ image_dir.mkdir(parents=True, exist_ok=True)
filename = generate_filename(filename)
path = image_dir / filename
- with open(path, "wb") as fobj:
- shutil.copyfileobj(image, fobj)
-
- if self.journal is not None:
- self.journal.append(("add_image", path))
+ if self.txn:
+ self.txn.write_bytes(path, image.read())
+ else:
+ with open(path, "wb") as fobj:
+ shutil.copyfileobj(image, fobj)
return filename
@@ -361,10 +297,10 @@ class TrackDataDir:
return
path = self.image_path(image_id)
- if self.journal is not None:
- self.journal.append(("delete_image", path, path.read_bytes()))
-
- path.unlink()
+ if self.txn:
+ self.txn.unlink(path)
+ else:
+ path.unlink()
def preview_path(self) -> Path:
"""Gets the path to the "preview image".
@@ -378,21 +314,19 @@ class TrackDataDir:
:param data: The data of the preview image.
"""
- if self.journal is not None:
- try:
- previous_preview = self.preview_path().read_bytes()
- except FileNotFoundError:
- previous_preview = None
- self.journal.append(("set_preview", previous_preview))
- self.preview_path().write_bytes(data)
+ if self.txn:
+ self.txn.write_bytes(self.preview_path(), data)
+ else:
+ self.preview_path().write_bytes(data)
class UserDataDir:
"""Manager for a single user's data."""
- def __init__(self, user_id: int, path: Path):
+ def __init__(self, user_id: int, path: Path, *, txn: Transaction | None = None):
self.user_id = user_id
self.path = path
+ self.txn = txn
def heatmap_path(self) -> Path:
"""Returns the path for the heatmap tile file.
diff --git a/fietsboek/fstrans.py b/fietsboek/fstrans.py
new file mode 100644
index 0000000..d402266
--- /dev/null
+++ b/fietsboek/fstrans.py
@@ -0,0 +1,417 @@
+"""Filesystem transactions.
+
+Motivation
+##########
+
+Fietsboek does a lot of filesystem stuff, such as saving images and track
+backups. Like database actions, we want to ensure that these actions happen
+"atomically" -- if an error occurs during one action, we want to undo the
+previous ones. Similarly, if an error occurs after things have been sent to the
+database/filesystem, we want to ensure that we "clean up" (see `issue 98`_).
+
+.. _issue 98: https://gitlab.com/dunj3/fietsboek/-/issues/98
+
+By having "transactionized" file system actions, we can ensure that we do not
+not have such issues:
+
+* Actions are reversible in case changes need to be rolled back.
+* Actions are done all-or-nothing.
+* Transactions are combined with other transactions (such as SQLAlchemy); if
+ one fails, the others will be rolled back.
+
+Implementation
+##############
+
+The main mechanism is :class:`Transaction`. It provides the commit/abort
+interface as required by the transaction_ module, and user-facing methods to
+enqueue filesystem modifications.
+
+.. _transaction: https://github.com/zopefoundation/transaction
+
+A transaction is started by :func:`begin`, which also joins the transaction
+to the other transactions managed by ``transaction``.
+
+A transaction records :class:`Action`, which represent modifications to be done
+to the filesystem. Each action knows how it should be executed. Additionally,
+each action records how to undo it -- the :class:`WriteBytes` action, for
+example, records the previous file state so it can be restored.
+
+This implementation has some drawbacks:
+
+* Modifications are kept in-memory, which might be an issue for larger files.
+* Extra time is needed to record previous states (read a file before it is
+ overwritten).
+* Changes are not visible to other programs until they are committed.
+
+But the advantage is the ease of implementation: Cancelling a transaction just
+involves clearing the in-memory buffer, and there are no additional temporary
+files needed.
+
+To further ensure that the filesystem is in a consistent state, the
+transactions use a lock file to get exclusive access. Currently, this lock
+spans the complete data directory. In the future, more fine-grained locking can
+be implemented.
+
+Usage
+#####
+
+The transaction is automatically used by the
+:class:`fietsboek.data.DataManager`.
+
+Interface
+#########
+"""
+
+import enum
+import logging
+import shutil
+import uuid
+from pathlib import Path
+
+import transaction # type: ignore
+from filelock import FileLock
+
+LOGGER = logging.getLogger(__name__)
+
+
+def _log_deletion_error(_, path, exc_info):
+ LOGGER.warning("Failed to remove %s", path, exc_info=exc_info)
+
+
+class TransactionalError(Exception):
+ """An exception that occurs when committing filesystem transactions."""
+
+
+class State(enum.Enum):
+ """State of the transaction."""
+
+ OPEN = enum.auto()
+ """Transaction is open for further actions."""
+
+ COMMITTING = enum.auto()
+ """Transaction is in the process of being committed."""
+
+ COMMITTED = enum.auto()
+ """Transaction has been committed."""
+
+ TAINTED = enum.auto()
+ """Transaction is tainted."""
+
+
+class Action:
+ """Base class for any actions that can be applied to the filesystem."""
+
+ def commit_1(self):
+ """Commit this action (phase 1).
+
+ This corresponds to ``tpc_vote``, and may raise an exception if
+ committing should be cancelled.
+ """
+
+ def commit_2(self):
+ """Commit this action (phase 2).
+
+ This corresponds to ``tpc_finish``, and should not raise an exception.
+ """
+
+ def undo(self):
+ """Undo this action.
+
+ This is called if commiting fails, to undo any changes that were
+ already committed.
+ """
+
+
+class WriteBytes(Action):
+ """Write bytes to the given file."""
+
+ def __init__(self, path: Path, data: bytes):
+ self.path = path
+ self.data = data
+ self.old: bytes | None = None
+
+ def __repr__(self):
+ return f"<WriteBytes {len(self.data)} to {self.path}>"
+
+ def commit_1(self):
+ try:
+ self.old = self.path.read_bytes()
+ except FileNotFoundError:
+ self.old = None
+ self.path.write_bytes(self.data)
+
+ def undo(self):
+ if self.old is None:
+ self.path.unlink()
+ else:
+ self.path.write_bytes(self.old)
+
+
+class Unlink(Action):
+ """Remove the given file."""
+
+ def __init__(self, path: Path):
+ self.path = path
+ self.old: bytes | None = None
+
+ def __repr__(self):
+ return f"<Unlink {self.path}>"
+
+ def commit_1(self):
+ self.old = self.path.read_bytes()
+ self.path.unlink()
+
+ def undo(self):
+ # This should not happen, unless an exception occurs when we read the
+ # file
+ if self.old is not None:
+ self.path.write_bytes(self.old)
+
+
+class MakeDir(Action):
+ """Create the given directory."""
+
+ def __init__(self, path: Path, exist_ok: bool = False):
+ self.path = path
+ self.exist_ok = exist_ok
+ self.existed: bool | None = None
+
+ def __repr__(self):
+ return f"<MakeDir {self.path}>"
+
+ def commit_1(self):
+ self.existed = self.path.is_dir()
+ self.path.mkdir(exist_ok=self.exist_ok)
+
+ def undo(self):
+ if not self.existed:
+ self.path.rmdir()
+
+
+class RemoveDir(Action):
+ """Remove the given (empty) directory."""
+
+ def __init__(self, path: Path):
+ self.path = path
+
+ def __repr__(self):
+ return f"<RemoveDir {self.path}>"
+
+ def commit_1(self):
+ self.path.rmdir()
+
+ def undo(self):
+ self.path.mkdir()
+
+
+class Purge(Action):
+ """Purge (recursively remove) the given directory."""
+
+ def __init__(self, path: Path):
+ self.path = path
+
+ def __repr__(self):
+ return f"<Purge {self.path}>"
+
+ def commit_2(self):
+ # pylint: disable=deprecated-argument
+ shutil.rmtree(self.path, ignore_errors=False, onerror=_log_deletion_error)
+
+
+class Transaction:
+ """A transaction, recording pending filesystem changes."""
+
+ def __init__(self, lock_path: Path):
+ self.actions: list[Action] = []
+ self.actions_done: list[Action] = []
+ self.state = State.OPEN
+ self.lock_path = lock_path
+ self.id = uuid.uuid4().hex
+
+ def tpc_begin(self, _trans):
+ """Begin the transaction.
+
+ This is required by the two-phase commit protocol of ``transaction``.
+ """
+
+ def tpc_vote(self, _trans):
+ """Commit (phase 1) the pending transaction.
+
+ This is required by the two-phase commit protocol of ``transaction``.
+
+ This method may raise exceptions to signal that the transaction (and
+ all linked transactions) should be aborted.
+ """
+ if not self.actions:
+ return
+
+ with FileLock(self.lock_path):
+ self.commit_1()
+
+ def tpc_finish(self, _trans):
+ """Commit (phase 2) the pending transaction.
+
+ This is required by the two-phase commit protocol of ``transaction``.
+
+ This method should not raise an exception.
+ """
+ if not self.actions:
+ return
+
+ with FileLock(self.lock_path):
+ self.commit_2()
+
+ def tpc_abort(self, _trans):
+ """Abort the transaction, undoing all previously done changes.
+
+ This is required by the two-phase commit protocol of ``transaction``.
+ """
+ if not self.actions_done:
+ return
+
+ with FileLock(self.lock_path):
+ self.undo()
+
+ # Needs to conform to transaction API:
+ # pylint: disable=invalid-name
+ def sortKey(self):
+ """Returns the sort key to sort this transaction in relation to others."""
+ return f"filesystem:{self.id}"
+
+ def undo(self):
+ """Undo all actions that have already been applied."""
+ # pylint: disable=broad-exception-caught
+ for action in reversed(self.actions_done):
+ LOGGER.debug("Undoing %s", action)
+ try:
+ action.undo()
+ except Exception as exc_inner:
+ # Hide "during the handling of ... another exception occurred"
+ exc_inner.__context__ = None
+ LOGGER.exception(
+ "Exception ignored during rollback of %s",
+ action,
+ exc_info=exc_inner,
+ )
+
+ def commit(self, _trans):
+ """Commit this transaction.
+
+ This is required by the interface of ``transaction``.
+ """
+
+ def commit_1(self):
+ """Start the first phase of committing.
+
+ This method is called automatically by the transaction manager.
+ """
+ if self.state != State.OPEN:
+ raise TransactionalError(f"Transaction is not open but {self.state}")
+
+ self.state = State.COMMITTING
+
+ try:
+ for action in self.actions:
+ LOGGER.debug("Executing 1st phase %s", action)
+ action.commit_1()
+ self.actions_done.append(action)
+ except Exception as exc:
+ LOGGER.debug("Exception while committing")
+ self.state = State.TAINTED
+ raise exc
+
+ def commit_2(self):
+ """Start the second phase of committing.
+
+ This method is called automatically by the transaction manager.
+ """
+ if self.state != State.COMMITTING:
+ raise TransactionalError(f"Transaction is not committing but {self.state}")
+
+ self.state = State.TAINTED
+ for action in self.actions:
+ LOGGER.debug("Executing 2nd phase %s", action)
+ action.commit_2()
+ self.state = State.COMMITTED
+
+ def abort(self, _trans=None):
+ """Abort this transaction."""
+ self.actions.clear()
+ self.state = State.TAINTED
+
+ def write_bytes(self, path: Path, data: bytes):
+ """Write the given bytes to the given path.
+
+ This is a transactioned version of :meth:`Path.write_bytes`.
+
+ :param path: Path where to write the bytes to.
+ :param data: The data to write.
+ """
+ if self.state != State.OPEN:
+ raise TransactionalError(f"Transaction is not open but {self.state}")
+ self.actions.append(WriteBytes(path, data))
+
+ def unlink(self, path: Path):
+ """Unlinks (removes) the given file.
+
+ This is a transactioned version of :math:`Path.unlink`.
+
+ :param path: The path to the file to unlink.
+ """
+ if self.state != State.OPEN:
+ raise TransactionalError(f"Transaction is not open but {self.state}")
+ self.actions.append(Unlink(path))
+
+ def make_dir(self, path: Path, *, exist_ok: bool = False):
+ """Creates the directory.
+
+ This is a transactioned version of :meth:`Path.mkdir`.
+
+ :param path: The directory to create.
+ :param exist_ok: If ``True``, no error will be raised if the directory
+ already exists.
+ """
+ if self.state != State.OPEN:
+ raise TransactionalError(f"Transaction is not open but {self.state}")
+ self.actions.append(MakeDir(path, exist_ok=exist_ok))
+
+ def remove_dir(self, path: Path):
+ """Removes the (empty) directory.
+
+ This is a transactioned version of :meth:`Path.rmdir`.
+
+ :param path: The directory to remove.
+ """
+ if self.state != State.OPEN:
+ raise TransactionalError(f"Transaction is not open but {self.state}")
+ self.actions.append(RemoveDir(path))
+
+ def purge(self, path: Path):
+ """Completely remove (recursively) the given path.
+
+ This uses :func:`shutil.rmtree` to delete the path.
+
+ Unlike other actions, this cannot be undone!
+
+ :param path: The directory to remove.
+ """
+ if self.state != State.OPEN:
+ raise TransactionalError(f"Transaction is not open but {self.state}")
+ self.actions.append(Purge(path))
+
+
+def begin(lock_path: Path, tm=None) -> Transaction:
+ """Begin a new transaction and register it.
+
+ :param lock_path: The path to the lock file to use in order to synchronize
+ the transaction.
+ :param tm: The transaction manager from ``transaction`` in which to join
+ this transaction.
+ :return: The :class:`Transaction`.
+ """
+ trans = Transaction(lock_path)
+ if tm:
+ tm.get().join(trans)
+ else:
+ transaction.manager.get().join(trans)
+ return trans
diff --git a/fietsboek/updater/scripts/upd_20230103_lu8w3rwlz4ddcpms.py b/fietsboek/updater/scripts/upd_20230103_lu8w3rwlz4ddcpms.py
index 4362c2d..10c42d0 100644
--- a/fietsboek/updater/scripts/upd_20230103_lu8w3rwlz4ddcpms.py
+++ b/fietsboek/updater/scripts/upd_20230103_lu8w3rwlz4ddcpms.py
@@ -29,42 +29,42 @@ alembic_revision = 'c939800af428'
class Up(UpdateScript):
def pre_alembic(self, config):
engine = create_engine(config["sqlalchemy.url"])
- connection = engine.connect()
data_dir = Path(config["fietsboek.data_dir"])
- sql = (
- "SELECT tracks.id, tracks.title, tracks.description, tracks.date_raw, "
- "tracks.date_tz, users.name "
- "FROM tracks, users "
- "WHERE tracks.owner_id = users.id;"
- )
- for row in connection.execute(text(sql)):
- track_id, title, description, date_raw, date_tz, author_name = row
- if isinstance(date_raw, str):
- date_raw = datetime.datetime.strptime(date_raw, "%Y-%m-%d %H:%M:%S.%f")
- if date_tz is None:
- timezone = datetime.timezone.utc
- else:
- timezone = datetime.timezone(datetime.timedelta(minutes=date_tz))
- date = date_raw.replace(tzinfo=timezone)
-
- self.tell(f"Embedding metadata for track {track_id}")
- track_dir = data_dir / "tracks" / str(track_id)
- gpx_path = track_dir / "track.gpx.br"
-
- raw_gpx = brotli.decompress(gpx_path.read_bytes())
- gpx = gpxpy.parse(raw_gpx)
-
- for track in gpx.tracks:
- track.name = None
- track.description = None
-
- gpx.author_name = author_name
- gpx.name = title
- gpx.description = description
- gpx.time = date
-
- gpx_path.write_bytes(brotli.compress(gpx.to_xml().encode("utf-8"), quality=4))
+ with engine.connect() as connection:
+ sql = (
+ "SELECT tracks.id, tracks.title, tracks.description, tracks.date_raw, "
+ "tracks.date_tz, users.name "
+ "FROM tracks, users "
+ "WHERE tracks.owner_id = users.id;"
+ )
+ for row in connection.execute(text(sql)):
+ track_id, title, description, date_raw, date_tz, author_name = row
+ if isinstance(date_raw, str):
+ date_raw = datetime.datetime.strptime(date_raw, "%Y-%m-%d %H:%M:%S.%f")
+ if date_tz is None:
+ timezone = datetime.timezone.utc
+ else:
+ timezone = datetime.timezone(datetime.timedelta(minutes=date_tz))
+ date = date_raw.replace(tzinfo=timezone)
+
+ self.tell(f"Embedding metadata for track {track_id}")
+ track_dir = data_dir / "tracks" / str(track_id)
+ gpx_path = track_dir / "track.gpx.br"
+
+ raw_gpx = brotli.decompress(gpx_path.read_bytes())
+ gpx = gpxpy.parse(raw_gpx)
+
+ for track in gpx.tracks:
+ track.name = None
+ track.description = None
+
+ gpx.author_name = author_name
+ gpx.name = title
+ gpx.description = description
+ gpx.time = date
+
+ gpx_path.write_bytes(brotli.compress(gpx.to_xml().encode("utf-8"), quality=4))
def post_alembic(self, config):
pass
diff --git a/fietsboek/updater/scripts/upd_20251109_nm561argcq1s8w27.py b/fietsboek/updater/scripts/upd_20251109_nm561argcq1s8w27.py
index e3e5e47..ae6c29a 100644
--- a/fietsboek/updater/scripts/upd_20251109_nm561argcq1s8w27.py
+++ b/fietsboek/updater/scripts/upd_20251109_nm561argcq1s8w27.py
@@ -36,127 +36,128 @@ class Up(UpdateScript):
connection = engine.connect()
data_dir = Path(config["fietsboek.data_dir"])
- # This can happen in a fresh instance
- if not (data_dir / "tracks").exists():
- return
-
- for track_dir in (data_dir / "tracks").iterdir():
- track_id = int(track_dir.name)
- self.tell(f"Loading track {track_id}")
-
- gpx_path = track_dir / "track.gpx.br"
-
- # We're careful here, in case a previous update was interrupted
- if not gpx_path.exists():
- continue
-
- gpx_bytes = brotli.decompress(gpx_path.read_bytes())
-
- track = convert.smart_convert(gpx_bytes)
- with connection.begin():
- connection.execute(
- text("DELETE FROM track_points WHERE track_id = :id;"),
- {"id": track_id},
- )
- connection.execute(
- text("DELETE FROM waypoints WHERE track_id = :id;"),
- {"id": track_id},
- )
- for index, point in enumerate(track.path().points):
+ with engine.connect() as connection:
+ # This can happen in a fresh instance
+ if not (data_dir / "tracks").exists():
+ return
+
+ for track_dir in (data_dir / "tracks").iterdir():
+ track_id = int(track_dir.name)
+ self.tell(f"Loading track {track_id}")
+
+ gpx_path = track_dir / "track.gpx.br"
+
+ # We're careful here, in case a previous update was interrupted
+ if not gpx_path.exists():
+ continue
+
+ gpx_bytes = brotli.decompress(gpx_path.read_bytes())
+
+ track = convert.smart_convert(gpx_bytes)
+ with connection.begin():
connection.execute(
- text("""INSERT INTO track_points (
- track_id, "index", longitude, latitude, elevation, time_offset
- ) VALUES (
- :track_id, :index, :longitude, :latitude, :elevation, :time_offset
- );"""),
- {
- "track_id": track_id,
- "index": index,
- "longitude": point.longitude,
- "latitude": point.latitude,
- "elevation": point.elevation,
- "time_offset": point.time_offset,
- },
+ text("DELETE FROM track_points WHERE track_id = :id;"),
+ {"id": track_id},
)
- for waypoint in track.waypoints:
connection.execute(
- text("""INSERT INTO waypoints (
- track_id, longitude, latitude, elevation, name, description
- ) VALUES (
- :track_id, :longitude, :latitude, :elevation, :name, :description
- );"""),
- {
- "track_id": track_id,
- "longitude": waypoint.longitude,
- "latitude": waypoint.latitude,
- "elevation": waypoint.elevation,
- "name": waypoint.name,
- "description": waypoint.description,
- },
+ text("DELETE FROM waypoints WHERE track_id = :id;"),
+ {"id": track_id},
+ )
+ for index, point in enumerate(track.path().points):
+ connection.execute(
+ text("""INSERT INTO track_points (
+ track_id, "index", longitude, latitude, elevation, time_offset
+ ) VALUES (
+ :track_id, :index, :longitude, :latitude, :elevation, :time_offset
+ );"""),
+ {
+ "track_id": track_id,
+ "index": index,
+ "longitude": point.longitude,
+ "latitude": point.latitude,
+ "elevation": point.elevation,
+ "time_offset": point.time_offset,
+ },
+ )
+ for waypoint in track.waypoints:
+ connection.execute(
+ text("""INSERT INTO waypoints (
+ track_id, longitude, latitude, elevation, name, description
+ ) VALUES (
+ :track_id, :longitude, :latitude, :elevation, :name, :description
+ );"""),
+ {
+ "track_id": track_id,
+ "longitude": waypoint.longitude,
+ "latitude": waypoint.latitude,
+ "elevation": waypoint.elevation,
+ "name": waypoint.name,
+ "description": waypoint.description,
+ },
+ )
+
+ gpx_path.unlink()
+ shutil.move(
+ track_dir / "track.bck.gpx.br",
+ track_dir / "track.bck.br",
)
-
- gpx_path.unlink()
- shutil.move(
- track_dir / "track.bck.gpx.br",
- track_dir / "track.bck.br",
- )
class Down(UpdateScript):
def pre_alembic(self, config):
engine = create_engine(config["sqlalchemy.url"])
- connection = engine.connect()
data_dir = Path(config["fietsboek.data_dir"])
query = text("SELECT id, title, description, date_raw FROM tracks;")
- for row in connection.execute(query):
- gpx = gpxpy.gpx.GPX()
- gpx.description = row.description
- gpx.name = row.title
-
- start_date = row.date_raw
- if isinstance(start_date, str):
- start_date = datetime.datetime.fromisoformat(start_date)
-
- segment = gpxpy.gpx.GPXTrackSegment()
- points_query = text("""
- SELECT longitude, latitude, elevation, time_offset
- FROM track_points WHERE track_id = :track_id ORDER BY "index";
- """)
- for point in connection.execute(points_query, {"track_id": row.id}):
- segment.points.append(
- gpxpy.gpx.GPXTrackPoint(
- latitude=point.latitude,
- longitude=point.longitude,
- elevation=point.elevation,
- time=start_date + datetime.timedelta(seconds=point.time_offset),
+ with engine.connect() as connection:
+ for row in connection.execute(query):
+ gpx = gpxpy.gpx.GPX()
+ gpx.description = row.description
+ gpx.name = row.title
+
+ start_date = row.date_raw
+ if isinstance(start_date, str):
+ start_date = datetime.datetime.fromisoformat(start_date)
+
+ segment = gpxpy.gpx.GPXTrackSegment()
+ points_query = text("""
+ SELECT longitude, latitude, elevation, time_offset
+ FROM track_points WHERE track_id = :track_id ORDER BY "index";
+ """)
+ for point in connection.execute(points_query, {"track_id": row.id}):
+ segment.points.append(
+ gpxpy.gpx.GPXTrackPoint(
+ latitude=point.latitude,
+ longitude=point.longitude,
+ elevation=point.elevation,
+ time=start_date + datetime.timedelta(seconds=point.time_offset),
+ )
)
- )
- track = gpxpy.gpx.GPXTrack()
- track.segments.append(segment)
- gpx.tracks.append(track)
-
- waypoints_query = text("""
- SELECT longitude, latitude, elevation, name, description
- FROM waypoints WHERE track_id = :track_id;
- """)
- for wpt in connection.execute(waypoints_query, {"track_id": row.id}):
- gpx.waypoints.append(
- gpxpy.gpx.GPXWaypoint(
- longitude=wpt.longitude,
- latitude=wpt.latitude,
- elevation=wpt.elevation,
- name=wpt.name,
- comment=wpt.description,
- description=wpt.description,
+ track = gpxpy.gpx.GPXTrack()
+ track.segments.append(segment)
+ gpx.tracks.append(track)
+
+ waypoints_query = text("""
+ SELECT longitude, latitude, elevation, name, description
+ FROM waypoints WHERE track_id = :track_id;
+ """)
+ for wpt in connection.execute(waypoints_query, {"track_id": row.id}):
+ gpx.waypoints.append(
+ gpxpy.gpx.GPXWaypoint(
+ longitude=wpt.longitude,
+ latitude=wpt.latitude,
+ elevation=wpt.elevation,
+ name=wpt.name,
+ comment=wpt.description,
+ description=wpt.description,
+ )
)
- )
- xml_data = gpx.to_xml(prettyprint=False).encode("utf-8")
- track_dir = data_dir / "tracks" / str(row.id)
- (track_dir / "track.gpx.br").write_bytes(brotli.compress(xml_data))
- shutil.move(track_dir / "track.bck.br", track_dir / "track.bck.gpx.br")
+ xml_data = gpx.to_xml(prettyprint=False).encode("utf-8")
+ track_dir = data_dir / "tracks" / str(row.id)
+ (track_dir / "track.gpx.br").write_bytes(brotli.compress(xml_data))
+ shutil.move(track_dir / "track.bck.br", track_dir / "track.bck.gpx.br")
def post_alembic(self, config):
pass
diff --git a/fietsboek/updater/scripts/upd_30ppwg8zi4ujb46f.py b/fietsboek/updater/scripts/upd_30ppwg8zi4ujb46f.py
index e900c7a..cdc09f6 100644
--- a/fietsboek/updater/scripts/upd_30ppwg8zi4ujb46f.py
+++ b/fietsboek/updater/scripts/upd_30ppwg8zi4ujb46f.py
@@ -25,18 +25,18 @@ alembic_revision = 'c939800af428'
class Up(UpdateScript):
def pre_alembic(self, config):
engine = create_engine(config["sqlalchemy.url"])
- connection = engine.connect()
- data_dir = Path(config["fietsboek.data_dir"])
+ with engine.connect() as connection:
+ data_dir = Path(config["fietsboek.data_dir"])
- for row in connection.execute(text("SELECT id, gpx FROM tracks;")):
- self.tell(f"Moving GPX data for track {row.id} from database to disk")
- track_dir = data_dir / "tracks" / str(row.id)
- track_dir.mkdir(parents=True, exist_ok=True)
+ for row in connection.execute(text("SELECT id, gpx FROM tracks;")):
+ self.tell(f"Moving GPX data for track {row.id} from database to disk")
+ track_dir = data_dir / "tracks" / str(row.id)
+ track_dir.mkdir(parents=True, exist_ok=True)
- raw_gpx = gzip.decompress(row.gpx)
- gpx_path = track_dir / "track.gpx.br"
- gpx_path.write_bytes(brotli.compress(raw_gpx, quality=5))
- shutil.copy(gpx_path, track_dir / "track.bck.gpx.br")
+ raw_gpx = gzip.decompress(row.gpx)
+ gpx_path = track_dir / "track.gpx.br"
+ gpx_path.write_bytes(brotli.compress(raw_gpx, quality=5))
+ shutil.copy(gpx_path, track_dir / "track.bck.gpx.br")
def post_alembic(self, config):
pass
@@ -48,18 +48,18 @@ class Down(UpdateScript):
def post_alembic(self, config):
engine = create_engine(config["sqlalchemy.url"])
- connection = engine.connect()
- data_dir = Path(config["fietsboek.data_dir"])
+ with engine.connect() as connection:
+ data_dir = Path(config["fietsboek.data_dir"])
- for track_path in (data_dir / "tracks").iterdir():
- track_id = int(track_path.name)
- self.tell(f"Moving GPX data for track {track_id} from disk to database")
- brotli_data = (track_path / "track.gpx.br").read_bytes()
- gzip_data = gzip.compress(brotli.decompress(brotli_data))
- connection.execute(
- text("UPDATE tracks SET gpx = :gpx WHERE id = :id;"),
- gpx=gzip_data, id=track_id
- )
+ for track_path in (data_dir / "tracks").iterdir():
+ track_id = int(track_path.name)
+ self.tell(f"Moving GPX data for track {track_id} from disk to database")
+ brotli_data = (track_path / "track.gpx.br").read_bytes()
+ gzip_data = gzip.compress(brotli.decompress(brotli_data))
+ connection.execute(
+ text("UPDATE tracks SET gpx = :gpx WHERE id = :id;"),
+ gpx=gzip_data, id=track_id
+ )
- (track_path / "track.gpx.br").unlink()
- (track_path / "track.bck.gpx.br").unlink(missing_ok=True)
+ (track_path / "track.gpx.br").unlink()
+ (track_path / "track.bck.gpx.br").unlink(missing_ok=True)
diff --git a/fietsboek/views/edit.py b/fietsboek/views/edit.py
index d5e3a92..2b559d4 100644
--- a/fietsboek/views/edit.py
+++ b/fietsboek/views/edit.py
@@ -84,46 +84,45 @@ def do_edit(request):
data: TrackDataDir = request.data_manager.open(track.id)
tz_offset = datetime.timedelta(minutes=int(request.params["date-tz"]))
date = datetime.datetime.fromisoformat(request.params["date"])
- with data, data.lock():
- redo_cache = False
+ redo_cache = False
+ try:
+ gpx_bytes = request.POST["gpx"].file.read()
+ except AttributeError:
+ pass
+ else:
+ LOGGER.info("Setting new track for %s", track.id)
try:
- gpx_bytes = request.POST["gpx"].file.read()
- except AttributeError:
- pass
- else:
- LOGGER.info("Setting new track for %s", track.id)
- try:
- new_track = convert.smart_convert(gpx_bytes)
- except convert.ConversionError as exc:
- request.session.flash(request.localizer.translate(_("flash.invalid_file")))
- LOGGER.info("Could not parse gpx: %s", exc)
- return HTTPFound(request.route_url("edit", track_id=track.id))
- data.compress_backup(gpx_bytes)
- track.fast_set_path(new_track.path())
- track.transformers = []
- redo_cache = True
-
- track.date = date.replace(tzinfo=datetime.timezone(tz_offset))
-
- track.tagged_people = tagged_people
- track.title = request.params["title"]
- track.visibility = Visibility[request.params["visibility"]]
- track.type = TrackType[request.params["type"]]
- track.description = request.params["description"]
- track.badges = badges
- tags = request.params.getall("tag[]")
- track.sync_tags(tags)
-
- actions.edit_images(request, request.context, manager=data)
- actions.execute_transformers(request, request.context)
-
- # actions.execute_transformers automatically rebuilds the cache, so we only need to do
- # this if execute_transformers didn't do it
- if redo_cache:
- LOGGER.info("New file detected, rebuilding cache for %s", track.id)
- track.cache = None
- track.ensure_cache()
- request.dbsession.add(track.cache)
+ new_track = convert.smart_convert(gpx_bytes)
+ except convert.ConversionError as exc:
+ request.session.flash(request.localizer.translate(_("flash.invalid_file")))
+ LOGGER.info("Could not parse gpx: %s", exc)
+ return HTTPFound(request.route_url("edit", track_id=track.id))
+ data.compress_backup(gpx_bytes)
+ track.fast_set_path(new_track.path())
+ track.transformers = []
+ redo_cache = True
+
+ track.date = date.replace(tzinfo=datetime.timezone(tz_offset))
+
+ track.tagged_people = tagged_people
+ track.title = request.params["title"]
+ track.visibility = Visibility[request.params["visibility"]]
+ track.type = TrackType[request.params["type"]]
+ track.description = request.params["description"]
+ track.badges = badges
+ tags = request.params.getall("tag[]")
+ track.sync_tags(tags)
+
+ actions.edit_images(request, request.context, manager=data)
+ actions.execute_transformers(request, request.context)
+
+ # actions.execute_transformers automatically rebuilds the cache, so we only need to do
+ # this if execute_transformers didn't do it
+ if redo_cache:
+ LOGGER.info("New file detected, rebuilding cache for %s", track.id)
+ track.cache = None
+ track.ensure_cache()
+ request.dbsession.add(track.cache)
return HTTPFound(request.route_url("details", track_id=track.id))
diff --git a/fietsboek/views/upload.py b/fietsboek/views/upload.py
index 5c86b29..7be5a42 100644
--- a/fietsboek/views/upload.py
+++ b/fietsboek/views/upload.py
@@ -170,14 +170,9 @@ def do_finish_upload(request):
request.dbsession.delete(upload)
# Don't forget to add the images
+ manager = request.data_manager.open(track.id, force=True)
LOGGER.debug("Starting to edit images for the upload")
- try:
- actions.edit_images(request, track)
- except Exception:
- # We just created the folder, so we'll be fine deleting it
- LOGGER.info("Deleting partially created folder for track %d", track.id)
- request.data_manager.open(track.id).purge()
- raise
+ actions.edit_images(request, track, manager=manager)
request.session.flash(request.localizer.translate(_("flash.upload_success")))
diff --git a/tests/conftest.py b/tests/conftest.py
index b49dad2..732c8d2 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -76,8 +76,10 @@ def _cleanup_data(app_settings):
data_dir = Path(app_settings["fietsboek.data_dir"])
if (data_dir / "tracks").is_dir():
shutil.rmtree(data_dir / "tracks")
+ (data_dir / "tracks").mkdir()
if (data_dir / "users").is_dir():
shutil.rmtree(data_dir / "users")
+ (data_dir / "users").mkdir()
@pytest.fixture(scope='module')
def app(app_settings, dbengine, tmp_path_factory):